Compare commits
30 Commits
feat/oidc-
...
feat/user-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c696b935b8 | ||
|
|
09abce063f | ||
|
|
77884c14aa | ||
|
|
57c3a217c6 | ||
|
|
8f68e8be81 | ||
|
|
19446cb864 | ||
|
|
efb616d600 | ||
|
|
d6a17784dc | ||
|
|
bc690cc320 | ||
|
|
efed1c461d | ||
|
|
cf03731cc8 | ||
|
|
cbd5bd2405 | ||
|
|
ded3cd8876 | ||
|
|
b1147b6409 | ||
|
|
4448c13684 | ||
|
|
2fd04b6d65 | ||
|
|
a8955e51a3 | ||
|
|
d2559ba977 | ||
|
|
b50406ab9e | ||
|
|
acafdb54c2 | ||
|
|
ca3237c7be | ||
|
|
d3764fd9fe | ||
|
|
24fc57fd01 | ||
|
|
e0ab2c666a | ||
|
|
e8702e104d | ||
|
|
dd762f7223 | ||
|
|
4166eac99d | ||
|
|
82a1f554b5 | ||
|
|
0cdccb617c | ||
|
|
39649ce523 |
@@ -473,6 +473,15 @@ FIREBASE_STORAGE_BUCKET=
|
||||
FIREBASE_MESSAGING_SENDER_ID=
|
||||
FIREBASE_APP_ID=
|
||||
|
||||
#========================#
|
||||
# S3 AWS Bucket #
|
||||
#========================#
|
||||
|
||||
AWS_ACCESS_KEY_ID=
|
||||
AWS_SECRET_ACCESS_KEY=
|
||||
AWS_REGION=
|
||||
AWS_BUCKET_NAME=
|
||||
|
||||
#========================#
|
||||
# Shared Links #
|
||||
#========================#
|
||||
|
||||
@@ -5,6 +5,7 @@ const {
|
||||
isAgentsEndpoint,
|
||||
isParamEndpoint,
|
||||
EModelEndpoint,
|
||||
ContentTypes,
|
||||
excludedKeys,
|
||||
ErrorTypes,
|
||||
Constants,
|
||||
@@ -365,17 +366,14 @@ class BaseClient {
|
||||
* context: TMessage[],
|
||||
* remainingContextTokens: number,
|
||||
* messagesToRefine: TMessage[],
|
||||
* summaryIndex: number,
|
||||
* }>} An object with four properties: `context`, `summaryIndex`, `remainingContextTokens`, and `messagesToRefine`.
|
||||
* }>} An object with three properties: `context`, `remainingContextTokens`, and `messagesToRefine`.
|
||||
* `context` is an array of messages that fit within the token limit.
|
||||
* `summaryIndex` is the index of the first message in the `messagesToRefine` array.
|
||||
* `remainingContextTokens` is the number of tokens remaining within the limit after adding the messages to the context.
|
||||
* `messagesToRefine` is an array of messages that were not added to the context because they would have exceeded the token limit.
|
||||
*/
|
||||
async getMessagesWithinTokenLimit({ messages: _messages, maxContextTokens, instructions }) {
|
||||
// Every reply is primed with <|start|>assistant<|message|>, so we
|
||||
// start with 3 tokens for the label after all messages have been counted.
|
||||
let summaryIndex = -1;
|
||||
let currentTokenCount = 3;
|
||||
const instructionsTokenCount = instructions?.tokenCount ?? 0;
|
||||
let remainingContextTokens =
|
||||
@@ -408,14 +406,12 @@ class BaseClient {
|
||||
}
|
||||
|
||||
const prunedMemory = messages;
|
||||
summaryIndex = prunedMemory.length - 1;
|
||||
remainingContextTokens -= currentTokenCount;
|
||||
|
||||
return {
|
||||
context: context.reverse(),
|
||||
remainingContextTokens,
|
||||
messagesToRefine: prunedMemory,
|
||||
summaryIndex,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -458,7 +454,7 @@ class BaseClient {
|
||||
|
||||
let orderedWithInstructions = this.addInstructions(orderedMessages, instructions);
|
||||
|
||||
let { context, remainingContextTokens, messagesToRefine, summaryIndex } =
|
||||
let { context, remainingContextTokens, messagesToRefine } =
|
||||
await this.getMessagesWithinTokenLimit({
|
||||
messages: orderedWithInstructions,
|
||||
instructions,
|
||||
@@ -528,7 +524,7 @@ class BaseClient {
|
||||
}
|
||||
|
||||
// Make sure to only continue summarization logic if the summary message was generated
|
||||
shouldSummarize = summaryMessage && shouldSummarize;
|
||||
shouldSummarize = summaryMessage != null && shouldSummarize === true;
|
||||
|
||||
logger.debug('[BaseClient] Context Count (2/2)', {
|
||||
remainingContextTokens,
|
||||
@@ -538,17 +534,18 @@ class BaseClient {
|
||||
/** @type {Record<string, number> | undefined} */
|
||||
let tokenCountMap;
|
||||
if (buildTokenMap) {
|
||||
tokenCountMap = orderedWithInstructions.reduce((map, message, index) => {
|
||||
const currentPayload = shouldSummarize ? orderedWithInstructions : context;
|
||||
tokenCountMap = currentPayload.reduce((map, message, index) => {
|
||||
const { messageId } = message;
|
||||
if (!messageId) {
|
||||
return map;
|
||||
}
|
||||
|
||||
if (shouldSummarize && index === summaryIndex && !usePrevSummary) {
|
||||
if (shouldSummarize && index === messagesToRefine.length - 1 && !usePrevSummary) {
|
||||
map.summaryMessage = { ...summaryMessage, messageId, tokenCount: summaryTokenCount };
|
||||
}
|
||||
|
||||
map[messageId] = orderedWithInstructions[index].tokenCount;
|
||||
map[messageId] = currentPayload[index].tokenCount;
|
||||
return map;
|
||||
}, {});
|
||||
}
|
||||
@@ -1021,11 +1018,17 @@ class BaseClient {
|
||||
const processValue = (value) => {
|
||||
if (Array.isArray(value)) {
|
||||
for (let item of value) {
|
||||
if (!item || !item.type || item.type === 'image_url') {
|
||||
if (
|
||||
!item ||
|
||||
!item.type ||
|
||||
item.type === ContentTypes.THINK ||
|
||||
item.type === ContentTypes.ERROR ||
|
||||
item.type === ContentTypes.IMAGE_URL
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (item.type === 'tool_call' && item.tool_call != null) {
|
||||
if (item.type === ContentTypes.TOOL_CALL && item.tool_call != null) {
|
||||
const toolName = item.tool_call?.name || '';
|
||||
if (toolName != null && toolName && typeof toolName === 'string') {
|
||||
numTokens += this.getTokenCount(toolName);
|
||||
@@ -1121,9 +1124,13 @@ class BaseClient {
|
||||
return message;
|
||||
}
|
||||
|
||||
const files = await getFiles({
|
||||
file_id: { $in: fileIds },
|
||||
});
|
||||
const files = await getFiles(
|
||||
{
|
||||
file_id: { $in: fileIds },
|
||||
},
|
||||
{},
|
||||
{},
|
||||
);
|
||||
|
||||
await this.addImageURLs(message, files, this.visionMode);
|
||||
|
||||
|
||||
@@ -1272,6 +1272,29 @@ ${convo}
|
||||
});
|
||||
}
|
||||
|
||||
/** Note: OpenAI Web Search models do not support any known parameters besdies `max_tokens` */
|
||||
if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) {
|
||||
const searchExcludeParams = [
|
||||
'frequency_penalty',
|
||||
'presence_penalty',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'stop',
|
||||
'logit_bias',
|
||||
'seed',
|
||||
'response_format',
|
||||
'n',
|
||||
'logprobs',
|
||||
'user',
|
||||
];
|
||||
|
||||
this.options.dropParams = this.options.dropParams || [];
|
||||
this.options.dropParams = [
|
||||
...new Set([...this.options.dropParams, ...searchExcludeParams]),
|
||||
];
|
||||
}
|
||||
|
||||
if (this.options.dropParams && Array.isArray(this.options.dropParams)) {
|
||||
this.options.dropParams.forEach((param) => {
|
||||
delete modelOptions[param];
|
||||
|
||||
@@ -211,7 +211,7 @@ const formatAgentMessages = (payload) => {
|
||||
} else if (part.type === ContentTypes.THINK) {
|
||||
hasReasoning = true;
|
||||
continue;
|
||||
} else if (part.type === ContentTypes.ERROR) {
|
||||
} else if (part.type === ContentTypes.ERROR || part.type === ContentTypes.AGENT_UPDATE) {
|
||||
continue;
|
||||
} else {
|
||||
currentContent.push(part);
|
||||
|
||||
@@ -164,7 +164,7 @@ describe('BaseClient', () => {
|
||||
const result = await TestClient.getMessagesWithinTokenLimit({ messages });
|
||||
|
||||
expect(result.context).toEqual(expectedContext);
|
||||
expect(result.summaryIndex).toEqual(expectedIndex);
|
||||
expect(result.messagesToRefine.length - 1).toEqual(expectedIndex);
|
||||
expect(result.remainingContextTokens).toBe(expectedRemainingContextTokens);
|
||||
expect(result.messagesToRefine).toEqual(expectedMessagesToRefine);
|
||||
});
|
||||
@@ -200,7 +200,7 @@ describe('BaseClient', () => {
|
||||
const result = await TestClient.getMessagesWithinTokenLimit({ messages });
|
||||
|
||||
expect(result.context).toEqual(expectedContext);
|
||||
expect(result.summaryIndex).toEqual(expectedIndex);
|
||||
expect(result.messagesToRefine.length - 1).toEqual(expectedIndex);
|
||||
expect(result.remainingContextTokens).toBe(expectedRemainingContextTokens);
|
||||
expect(result.messagesToRefine).toEqual(expectedMessagesToRefine);
|
||||
});
|
||||
|
||||
@@ -172,7 +172,7 @@ Error Message: ${error.message}`);
|
||||
{
|
||||
type: ContentTypes.IMAGE_URL,
|
||||
image_url: {
|
||||
url: `data:image/jpeg;base64,${base64}`,
|
||||
url: `data:image/png;base64,${base64}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
@@ -21,6 +21,7 @@ const {
|
||||
} = require('../');
|
||||
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
|
||||
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { createMCPTool } = require('~/server/services/MCP');
|
||||
const { loadSpecs } = require('./loadSpecs');
|
||||
const { logger } = require('~/config');
|
||||
@@ -90,45 +91,6 @@ const validateTools = async (user, tools = []) => {
|
||||
}
|
||||
};
|
||||
|
||||
const loadAuthValues = async ({ userId, authFields, throwError = true }) => {
|
||||
let authValues = {};
|
||||
|
||||
/**
|
||||
* Finds the first non-empty value for the given authentication field, supporting alternate fields.
|
||||
* @param {string[]} fields Array of strings representing the authentication fields. Supports alternate fields delimited by "||".
|
||||
* @returns {Promise<{ authField: string, authValue: string} | null>} An object containing the authentication field and value, or null if not found.
|
||||
*/
|
||||
const findAuthValue = async (fields) => {
|
||||
for (const field of fields) {
|
||||
let value = process.env[field];
|
||||
if (value) {
|
||||
return { authField: field, authValue: value };
|
||||
}
|
||||
try {
|
||||
value = await getUserPluginAuthValue(userId, field, throwError);
|
||||
} catch (err) {
|
||||
if (field === fields[fields.length - 1] && !value) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
if (value) {
|
||||
return { authField: field, authValue: value };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
for (let authField of authFields) {
|
||||
const fields = authField.split('||');
|
||||
const result = await findAuthValue(fields);
|
||||
if (result) {
|
||||
authValues[result.authField] = result.authValue;
|
||||
}
|
||||
}
|
||||
|
||||
return authValues;
|
||||
};
|
||||
|
||||
/** @typedef {typeof import('@langchain/core/tools').Tool} ToolConstructor */
|
||||
/** @typedef {import('@langchain/core/tools').Tool} Tool */
|
||||
|
||||
@@ -348,7 +310,6 @@ const loadTools = async ({
|
||||
|
||||
module.exports = {
|
||||
loadToolWithAuth,
|
||||
loadAuthValues,
|
||||
validateTools,
|
||||
loadTools,
|
||||
};
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
const { validateTools, loadTools, loadAuthValues } = require('./handleTools');
|
||||
const { validateTools, loadTools } = require('./handleTools');
|
||||
const handleOpenAIErrors = require('./handleOpenAIErrors');
|
||||
|
||||
module.exports = {
|
||||
handleOpenAIErrors,
|
||||
loadAuthValues,
|
||||
validateTools,
|
||||
loadTools,
|
||||
};
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
const axios = require('axios');
|
||||
const { EventSource } = require('eventsource');
|
||||
const { Time, CacheKeys } = require('librechat-data-provider');
|
||||
const logger = require('./winston');
|
||||
@@ -47,9 +48,46 @@ const sendEvent = (res, event) => {
|
||||
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates and configures an Axios instance with optional proxy settings.
|
||||
*
|
||||
* @typedef {import('axios').AxiosInstance} AxiosInstance
|
||||
* @typedef {import('axios').AxiosProxyConfig} AxiosProxyConfig
|
||||
*
|
||||
* @returns {AxiosInstance} A configured Axios instance
|
||||
* @throws {Error} If there's an issue creating the Axios instance or parsing the proxy URL
|
||||
*/
|
||||
function createAxiosInstance() {
|
||||
const instance = axios.create();
|
||||
|
||||
if (process.env.proxy) {
|
||||
try {
|
||||
const url = new URL(process.env.proxy);
|
||||
|
||||
/** @type {AxiosProxyConfig} */
|
||||
const proxyConfig = {
|
||||
host: url.hostname.replace(/^\[|\]$/g, ''),
|
||||
protocol: url.protocol.replace(':', ''),
|
||||
};
|
||||
|
||||
if (url.port) {
|
||||
proxyConfig.port = parseInt(url.port, 10);
|
||||
}
|
||||
|
||||
instance.defaults.proxy = proxyConfig;
|
||||
} catch (error) {
|
||||
console.error('Error parsing proxy URL:', error);
|
||||
throw new Error(`Invalid proxy URL: ${process.env.proxy}`);
|
||||
}
|
||||
}
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
logger,
|
||||
sendEvent,
|
||||
getMCPManager,
|
||||
createAxiosInstance,
|
||||
getFlowStateManager,
|
||||
};
|
||||
|
||||
126
api/config/index.spec.js
Normal file
126
api/config/index.spec.js
Normal file
@@ -0,0 +1,126 @@
|
||||
const axios = require('axios');
|
||||
const { createAxiosInstance } = require('./index');
|
||||
|
||||
// Mock axios
|
||||
jest.mock('axios', () => ({
|
||||
interceptors: {
|
||||
request: { use: jest.fn(), eject: jest.fn() },
|
||||
response: { use: jest.fn(), eject: jest.fn() },
|
||||
},
|
||||
create: jest.fn().mockReturnValue({
|
||||
defaults: {
|
||||
proxy: null,
|
||||
},
|
||||
get: jest.fn().mockResolvedValue({ data: {} }),
|
||||
post: jest.fn().mockResolvedValue({ data: {} }),
|
||||
put: jest.fn().mockResolvedValue({ data: {} }),
|
||||
delete: jest.fn().mockResolvedValue({ data: {} }),
|
||||
}),
|
||||
get: jest.fn().mockResolvedValue({ data: {} }),
|
||||
post: jest.fn().mockResolvedValue({ data: {} }),
|
||||
put: jest.fn().mockResolvedValue({ data: {} }),
|
||||
delete: jest.fn().mockResolvedValue({ data: {} }),
|
||||
reset: jest.fn().mockImplementation(function () {
|
||||
this.get.mockClear();
|
||||
this.post.mockClear();
|
||||
this.put.mockClear();
|
||||
this.delete.mockClear();
|
||||
this.create.mockClear();
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('createAxiosInstance', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset mocks
|
||||
jest.clearAllMocks();
|
||||
// Create a clean copy of process.env
|
||||
process.env = { ...originalEnv };
|
||||
// Default: no proxy
|
||||
delete process.env.proxy;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original process.env
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
test('creates an axios instance without proxy when no proxy env is set', () => {
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toBeNull();
|
||||
});
|
||||
|
||||
test('configures proxy correctly with hostname and protocol', () => {
|
||||
process.env.proxy = 'http://example.com';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'example.com',
|
||||
protocol: 'http',
|
||||
});
|
||||
});
|
||||
|
||||
test('configures proxy correctly with hostname, protocol and port', () => {
|
||||
process.env.proxy = 'https://proxy.example.com:8080';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'https',
|
||||
port: 8080,
|
||||
});
|
||||
});
|
||||
|
||||
test('handles proxy URLs with authentication', () => {
|
||||
process.env.proxy = 'http://user:pass@proxy.example.com:3128';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'http',
|
||||
port: 3128,
|
||||
// Note: The current implementation doesn't handle auth - if needed, add this functionality
|
||||
});
|
||||
});
|
||||
|
||||
test('throws error when proxy URL is invalid', () => {
|
||||
process.env.proxy = 'invalid-url';
|
||||
|
||||
expect(() => createAxiosInstance()).toThrow('Invalid proxy URL');
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
// If you want to test the actual URL parsing more thoroughly
|
||||
test('handles edge case proxy URLs correctly', () => {
|
||||
// IPv6 address
|
||||
process.env.proxy = 'http://[::1]:8080';
|
||||
|
||||
let instance = createAxiosInstance();
|
||||
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: '::1',
|
||||
protocol: 'http',
|
||||
port: 8080,
|
||||
});
|
||||
|
||||
// URL with path (which should be ignored for proxy config)
|
||||
process.env.proxy = 'http://proxy.example.com:8080/some/path';
|
||||
|
||||
instance = createAxiosInstance();
|
||||
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'http',
|
||||
port: 8080,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -28,4 +28,4 @@ const getBanner = async (user) => {
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = { getBanner };
|
||||
module.exports = { Banner, getBanner };
|
||||
|
||||
@@ -15,19 +15,6 @@ const searchConversation = async (conversationId) => {
|
||||
throw new Error('Error searching conversation');
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Searches for a conversation by conversationId and returns associated file ids.
|
||||
* @param {string} conversationId - The conversation's ID.
|
||||
* @returns {Promise<string[] | null>}
|
||||
*/
|
||||
const getConvoFiles = async (conversationId) => {
|
||||
try {
|
||||
return (await Conversation.findOne({ conversationId }, 'files').lean())?.files ?? [];
|
||||
} catch (error) {
|
||||
logger.error('[getConvoFiles] Error getting conversation files', error);
|
||||
throw new Error('Error getting conversation files');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves a single conversation for a given user and conversation ID.
|
||||
@@ -73,6 +60,20 @@ const deleteNullOrEmptyConversations = async () => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Searches for a conversation by conversationId and returns associated file ids.
|
||||
* @param {string} conversationId - The conversation's ID.
|
||||
* @returns {Promise<string[] | null>}
|
||||
*/
|
||||
const getConvoFiles = async (conversationId) => {
|
||||
try {
|
||||
return (await Conversation.findOne({ conversationId }, 'files').lean())?.files ?? [];
|
||||
} catch (error) {
|
||||
logger.error('[getConvoFiles] Error getting conversation files', error);
|
||||
throw new Error('Error getting conversation files');
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Conversation,
|
||||
getConvoFiles,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { fileSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const File = mongoose.model('File', fileSchema);
|
||||
|
||||
@@ -17,11 +18,39 @@ const findFileById = async (file_id, options = {}) => {
|
||||
* Retrieves files matching a given filter, sorted by the most recently updated.
|
||||
* @param {Object} filter - The filter criteria to apply.
|
||||
* @param {Object} [_sortOptions] - Optional sort parameters.
|
||||
* @param {Object|String} [selectFields={ text: 0 }] - Fields to include/exclude in the query results.
|
||||
* Default excludes the 'text' field.
|
||||
* @returns {Promise<Array<IMongoFile>>} A promise that resolves to an array of file documents.
|
||||
*/
|
||||
const getFiles = async (filter, _sortOptions) => {
|
||||
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
|
||||
const sortOptions = { updatedAt: -1, ..._sortOptions };
|
||||
return await File.find(filter).sort(sortOptions).lean();
|
||||
return await File.find(filter).select(selectFields).sort(sortOptions).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves tool files (files that are embedded or have a fileIdentifier) from an array of file IDs
|
||||
* @param {string[]} fileIds - Array of file_id strings to search for
|
||||
* @returns {Promise<Array<IMongoFile>>} Files that match the criteria
|
||||
*/
|
||||
const getToolFilesByIds = async (fileIds) => {
|
||||
if (!fileIds || !fileIds.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const filter = {
|
||||
file_id: { $in: fileIds },
|
||||
$or: [{ embedded: true }, { 'metadata.fileIdentifier': { $exists: true } }],
|
||||
};
|
||||
|
||||
const selectFields = { text: 0 };
|
||||
const sortOptions = { updatedAt: -1 };
|
||||
|
||||
return await getFiles(filter, sortOptions, selectFields);
|
||||
} catch (error) {
|
||||
logger.error('[getToolFilesByIds] Error retrieving tool files:', error);
|
||||
throw new Error('Error retrieving tool files');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -109,6 +138,7 @@ module.exports = {
|
||||
File,
|
||||
findFileById,
|
||||
getFiles,
|
||||
getToolFilesByIds,
|
||||
createFile,
|
||||
updateFile,
|
||||
updateFileUsage,
|
||||
|
||||
6
api/models/Group.js
Normal file
6
api/models/Group.js
Normal file
@@ -0,0 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { groupSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Group = mongoose.model('Group', groupSchema);
|
||||
|
||||
module.exports = Group;
|
||||
@@ -71,7 +71,42 @@ async function saveMessage(req, params, metadata) {
|
||||
} catch (err) {
|
||||
logger.error('Error saving message:', err);
|
||||
logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
|
||||
throw err;
|
||||
|
||||
// Check if this is a duplicate key error (MongoDB error code 11000)
|
||||
if (err.code === 11000 && err.message.includes('duplicate key error')) {
|
||||
// Log the duplicate key error but don't crash the application
|
||||
logger.warn(`Duplicate messageId detected: ${params.messageId}. Continuing execution.`);
|
||||
|
||||
try {
|
||||
// Try to find the existing message with this ID
|
||||
const existingMessage = await Message.findOne({
|
||||
messageId: params.messageId,
|
||||
user: req.user.id,
|
||||
});
|
||||
|
||||
// If we found it, return it
|
||||
if (existingMessage) {
|
||||
return existingMessage.toObject();
|
||||
}
|
||||
|
||||
// If we can't find it (unlikely but possible in race conditions)
|
||||
return {
|
||||
...params,
|
||||
messageId: params.messageId,
|
||||
user: req.user.id,
|
||||
};
|
||||
} catch (findError) {
|
||||
// If the findOne also fails, log it but don't crash
|
||||
logger.warn(`Could not retrieve existing message with ID ${params.messageId}: ${findError.message}`);
|
||||
return {
|
||||
...params,
|
||||
messageId: params.messageId,
|
||||
user: req.user.id,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
throw err; // Re-throw other errors
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
127
api/models/groupMethods.js
Normal file
127
api/models/groupMethods.js
Normal file
@@ -0,0 +1,127 @@
|
||||
const User = require('./User');
|
||||
const Group = require('~/models/Group');
|
||||
|
||||
/**
|
||||
* Retrieve a group by ID and convert the found group document to a plain object.
|
||||
*
|
||||
* @param {string} groupId - The ID of the group to find and return as a plain object.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<Object|null>} A plain object representing the group document, or `null` if no group is found.
|
||||
*/
|
||||
const getGroupById = (groupId, fieldsToSelect = null) => {
|
||||
const query = Group.findById(groupId);
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
return query.lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Search for a single group or multiple groups based on partial data and return them as plain objects.
|
||||
*
|
||||
* @param {Partial<Object>} searchCriteria - The partial data to use for searching groups.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned documents.
|
||||
* @returns {Promise<Object[]>} An array of plain objects representing the group documents.
|
||||
*/
|
||||
const findGroup = (searchCriteria, fieldsToSelect = null) => {
|
||||
const query = Group.find(searchCriteria);
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
return query.lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Update a group with new data without overwriting existing properties.
|
||||
*
|
||||
* @param {string} groupId - The ID of the group to update.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @returns {Promise<Object|null>} The updated group document as a plain object, or `null` if no group is found.
|
||||
*/
|
||||
const updateGroup = (groupId, updateData) => {
|
||||
return Group.findByIdAndUpdate(
|
||||
groupId,
|
||||
{ $set: updateData },
|
||||
{ new: true, runValidators: true },
|
||||
).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new group.
|
||||
*
|
||||
* @param {Object} data - The group data to be created.
|
||||
* @returns {Promise<Object>} The created group document.
|
||||
*/
|
||||
const createGroup = async (data) => {
|
||||
return await Group.create(data);
|
||||
};
|
||||
|
||||
/**
|
||||
* Count the number of group documents in the collection based on the provided filter.
|
||||
*
|
||||
* @param {Object} [filter={}] - The filter to apply when counting the documents.
|
||||
* @returns {Promise<number>} The count of documents that match the filter.
|
||||
*/
|
||||
const countGroups = (filter = {}) => {
|
||||
return Group.countDocuments(filter);
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a group by its unique ID only if no user is assigned to it.
|
||||
*
|
||||
* @param {string} groupId - The ID of the group to delete.
|
||||
* @returns {Promise<{ deletedCount: number, message: string }>} An object indicating the number of deleted documents.
|
||||
*/
|
||||
const deleteGroupById = async (groupId) => {
|
||||
// Check if any users reference the group
|
||||
const userCount = await User.countDocuments({ groups: groupId });
|
||||
if (userCount > 0) {
|
||||
return { deletedCount: 0, message: `Cannot delete group; it is assigned to ${userCount} user(s).` };
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await Group.deleteOne({ _id: groupId });
|
||||
if (result.deletedCount === 0) {
|
||||
return { deletedCount: 0, message: 'No group found with that ID.' };
|
||||
}
|
||||
return { deletedCount: result.deletedCount, message: 'Group was deleted successfully.' };
|
||||
} catch (error) {
|
||||
throw new Error('Error deleting group: ' + error.message);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Override deletion of a group by its unique ID.
|
||||
* This function first removes the group ObjectId from all users' groups arrays,
|
||||
* then proceeds to delete the group document.
|
||||
*
|
||||
* @param {string} groupId - The ID of the group to delete.
|
||||
* @returns {Promise<{ deletedCount: number, message: string }>} An object indicating the deletion result.
|
||||
*/
|
||||
const overrideDeleteGroupById = async (groupId) => {
|
||||
// Remove group references from all users
|
||||
await User.updateMany(
|
||||
{ groups: groupId },
|
||||
{ $pull: { groups: groupId } },
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await Group.deleteOne({ _id: groupId });
|
||||
if (result.deletedCount === 0) {
|
||||
return { deletedCount: 0, message: 'No group found with that ID.' };
|
||||
}
|
||||
return { deletedCount: result.deletedCount, message: 'Group was deleted successfully (override).' };
|
||||
} catch (error) {
|
||||
throw new Error('Error deleting group: ' + error.message);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getGroupById,
|
||||
findGroup,
|
||||
updateGroup,
|
||||
createGroup,
|
||||
countGroups,
|
||||
deleteGroupById,
|
||||
overrideDeleteGroupById,
|
||||
};
|
||||
@@ -40,6 +40,7 @@ const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset')
|
||||
const { createToken, findToken, updateToken, deleteTokens } = require('./Token');
|
||||
const Balance = require('./Balance');
|
||||
const User = require('./User');
|
||||
const Group = require('./Group');
|
||||
const Key = require('./Key');
|
||||
|
||||
module.exports = {
|
||||
@@ -92,6 +93,7 @@ module.exports = {
|
||||
countActiveSessions,
|
||||
|
||||
User,
|
||||
Group,
|
||||
Key,
|
||||
Balance,
|
||||
};
|
||||
|
||||
@@ -61,6 +61,7 @@ const bedrockValues = {
|
||||
'amazon.nova-micro-v1:0': { prompt: 0.035, completion: 0.14 },
|
||||
'amazon.nova-lite-v1:0': { prompt: 0.06, completion: 0.24 },
|
||||
'amazon.nova-pro-v1:0': { prompt: 0.8, completion: 3.2 },
|
||||
'deepseek.r1': { prompt: 1.35, completion: 5.4 },
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -288,7 +288,7 @@ describe('AWS Bedrock Model Tests', () => {
|
||||
});
|
||||
|
||||
describe('Deepseek Model Tests', () => {
|
||||
const deepseekModels = ['deepseek-chat', 'deepseek-coder', 'deepseek-reasoner'];
|
||||
const deepseekModels = ['deepseek-chat', 'deepseek-coder', 'deepseek-reasoner', 'deepseek.r1'];
|
||||
|
||||
it('should return the correct prompt multipliers for all models', () => {
|
||||
const results = deepseekModels.map((model) => {
|
||||
|
||||
@@ -35,6 +35,8 @@
|
||||
"homepage": "https://librechat.ai",
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.37.0",
|
||||
"@aws-sdk/client-s3": "^3.758.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.758.0",
|
||||
"@azure/search-documents": "^12.0.0",
|
||||
"@google/generative-ai": "^0.23.0",
|
||||
"@googleapis/youtube": "^20.0.0",
|
||||
@@ -42,10 +44,10 @@
|
||||
"@keyv/redis": "^2.8.1",
|
||||
"@langchain/community": "^0.3.34",
|
||||
"@langchain/core": "^0.3.40",
|
||||
"@langchain/google-genai": "^0.1.9",
|
||||
"@langchain/google-vertexai": "^0.2.0",
|
||||
"@langchain/google-genai": "^0.1.11",
|
||||
"@langchain/google-vertexai": "^0.2.2",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.2.0",
|
||||
"@librechat/agents": "^2.2.8",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
@@ -82,7 +84,7 @@
|
||||
"memorystore": "^1.6.7",
|
||||
"mime": "^3.0.0",
|
||||
"module-alias": "^2.2.3",
|
||||
"mongoose": "^8.9.5",
|
||||
"mongoose": "^8.12.1",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"nanoid": "^3.3.7",
|
||||
"nodemailer": "^6.9.15",
|
||||
|
||||
@@ -10,8 +10,8 @@ const {
|
||||
ChatModelStreamHandler,
|
||||
} = require('@librechat/agents');
|
||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { saveBase64Image } = require('~/server/services/Files/process');
|
||||
const { loadAuthValues } = require('~/app/clients/tools/util');
|
||||
const { logger, sendEvent } = require('~/config');
|
||||
|
||||
/** @typedef {import('@librechat/agents').Graph} Graph */
|
||||
|
||||
@@ -7,7 +7,16 @@
|
||||
// validateVisionModel,
|
||||
// mapModelToAzureConfig,
|
||||
// } = require('librechat-data-provider');
|
||||
const { Callback, createMetadataAggregator } = require('@librechat/agents');
|
||||
require('events').EventEmitter.defaultMaxListeners = 100;
|
||||
const {
|
||||
Callback,
|
||||
GraphEvents,
|
||||
formatMessage,
|
||||
formatAgentMessages,
|
||||
formatContentStrings,
|
||||
getTokenCountForMessage,
|
||||
createMetadataAggregator,
|
||||
} = require('@librechat/agents');
|
||||
const {
|
||||
Constants,
|
||||
VisionModes,
|
||||
@@ -17,24 +26,19 @@ const {
|
||||
KnownEndpoints,
|
||||
anthropicSchema,
|
||||
isAgentsEndpoint,
|
||||
AgentCapabilities,
|
||||
bedrockInputSchema,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
formatMessage,
|
||||
addCacheControl,
|
||||
formatAgentMessages,
|
||||
formatContentStrings,
|
||||
createContextHandlers,
|
||||
} = require('~/app/clients/prompts');
|
||||
const { getCustomEndpointConfig, checkCapability } = require('~/server/services/Config');
|
||||
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { getCustomEndpointConfig } = require('~/server/services/Config');
|
||||
const Tokenizer = require('~/server/services/Tokenizer');
|
||||
const BaseClient = require('~/app/clients/BaseClient');
|
||||
const { logger, sendEvent } = require('~/config');
|
||||
const { createRun } = require('./run');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/** @typedef {import('@librechat/agents').MessageContentComplex} MessageContentComplex */
|
||||
/** @typedef {import('@langchain/core/runnables').RunnableConfig} RunnableConfig */
|
||||
@@ -99,6 +103,8 @@ class AgentClient extends BaseClient {
|
||||
this.outputTokensKey = 'output_tokens';
|
||||
/** @type {UsageMetadata} */
|
||||
this.usage;
|
||||
/** @type {Record<string, number>} */
|
||||
this.indexTokenCountMap = {};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -223,14 +229,23 @@ class AgentClient extends BaseClient {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TMessage} message
|
||||
* @param {Array<MongoFile>} attachments
|
||||
* @returns {Promise<Array<Partial<MongoFile>>>}
|
||||
*/
|
||||
async addImageURLs(message, attachments) {
|
||||
const { files, image_urls } = await encodeAndFormat(
|
||||
const { files, text, image_urls } = await encodeAndFormat(
|
||||
this.options.req,
|
||||
attachments,
|
||||
this.options.agent.provider,
|
||||
VisionModes.agents,
|
||||
);
|
||||
message.image_urls = image_urls.length ? image_urls : undefined;
|
||||
if (text && text.length) {
|
||||
message.ocr = text;
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
@@ -308,7 +323,21 @@ class AgentClient extends BaseClient {
|
||||
assistantName: this.options?.modelLabel,
|
||||
});
|
||||
|
||||
const needsTokenCount = this.contextStrategy && !orderedMessages[i].tokenCount;
|
||||
if (message.ocr && i !== orderedMessages.length - 1) {
|
||||
if (typeof formattedMessage.content === 'string') {
|
||||
formattedMessage.content = message.ocr + '\n' + formattedMessage.content;
|
||||
} else {
|
||||
const textPart = formattedMessage.content.find((part) => part.type === 'text');
|
||||
textPart
|
||||
? (textPart.text = message.ocr + '\n' + textPart.text)
|
||||
: formattedMessage.content.unshift({ type: 'text', text: message.ocr });
|
||||
}
|
||||
} else if (message.ocr && i === orderedMessages.length - 1) {
|
||||
systemContent = [systemContent, message.ocr].join('\n');
|
||||
}
|
||||
|
||||
const needsTokenCount =
|
||||
(this.contextStrategy && !orderedMessages[i].tokenCount) || message.ocr;
|
||||
|
||||
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
|
||||
if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) {
|
||||
@@ -354,6 +383,10 @@ class AgentClient extends BaseClient {
|
||||
}));
|
||||
}
|
||||
|
||||
for (let i = 0; i < messages.length; i++) {
|
||||
this.indexTokenCountMap[i] = messages[i].tokenCount;
|
||||
}
|
||||
|
||||
const result = {
|
||||
tokenCountMap,
|
||||
prompt: payload,
|
||||
@@ -599,6 +632,9 @@ class AgentClient extends BaseClient {
|
||||
// });
|
||||
// }
|
||||
|
||||
/** @type {TCustomConfig['endpoints']['agents']} */
|
||||
const agentsEConfig = this.options.req.app.locals[EModelEndpoint.agents];
|
||||
|
||||
/** @type {Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string; streamMode: string }} */
|
||||
const config = {
|
||||
configurable: {
|
||||
@@ -606,19 +642,30 @@ class AgentClient extends BaseClient {
|
||||
last_agent_index: this.agentConfigs?.size ?? 0,
|
||||
hide_sequential_outputs: this.options.agent.hide_sequential_outputs,
|
||||
},
|
||||
recursionLimit: this.options.req.app.locals[EModelEndpoint.agents]?.recursionLimit,
|
||||
recursionLimit: agentsEConfig?.recursionLimit,
|
||||
signal: abortController.signal,
|
||||
streamMode: 'values',
|
||||
version: 'v2',
|
||||
};
|
||||
|
||||
const initialMessages = formatAgentMessages(payload);
|
||||
const toolSet = new Set((this.options.agent.tools ?? []).map((tool) => tool && tool.name));
|
||||
let { messages: initialMessages, indexTokenCountMap } = formatAgentMessages(
|
||||
payload,
|
||||
this.indexTokenCountMap,
|
||||
toolSet,
|
||||
);
|
||||
if (legacyContentEndpoints.has(this.options.agent.endpoint)) {
|
||||
formatContentStrings(initialMessages);
|
||||
initialMessages = formatContentStrings(initialMessages);
|
||||
}
|
||||
|
||||
/** @type {ReturnType<createRun>} */
|
||||
let run;
|
||||
const countTokens = ((text) => this.getTokenCount(text)).bind(this);
|
||||
|
||||
/** @type {(message: BaseMessage) => number} */
|
||||
const tokenCounter = (message) => {
|
||||
return getTokenCountForMessage(message, countTokens);
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -626,12 +673,23 @@ class AgentClient extends BaseClient {
|
||||
* @param {BaseMessage[]} messages
|
||||
* @param {number} [i]
|
||||
* @param {TMessageContentParts[]} [contentData]
|
||||
* @param {Record<string, number>} [currentIndexCountMap]
|
||||
*/
|
||||
const runAgent = async (agent, _messages, i = 0, contentData = []) => {
|
||||
const runAgent = async (agent, _messages, i = 0, contentData = [], _currentIndexCountMap) => {
|
||||
config.configurable.model = agent.model_parameters.model;
|
||||
const currentIndexCountMap = _currentIndexCountMap ?? indexTokenCountMap;
|
||||
if (i > 0) {
|
||||
this.model = agent.model_parameters.model;
|
||||
}
|
||||
if (agent.recursion_limit && typeof agent.recursion_limit === 'number') {
|
||||
config.recursionLimit = agent.recursion_limit;
|
||||
}
|
||||
if (
|
||||
agentsEConfig?.maxRecursionLimit &&
|
||||
config.recursionLimit > agentsEConfig?.maxRecursionLimit
|
||||
) {
|
||||
config.recursionLimit = agentsEConfig?.maxRecursionLimit;
|
||||
}
|
||||
config.configurable.agent_id = agent.id;
|
||||
config.configurable.name = agent.name;
|
||||
config.configurable.agent_index = i;
|
||||
@@ -694,11 +752,29 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
|
||||
if (contentData.length) {
|
||||
const agentUpdate = {
|
||||
type: ContentTypes.AGENT_UPDATE,
|
||||
[ContentTypes.AGENT_UPDATE]: {
|
||||
index: contentData.length,
|
||||
runId: this.responseMessageId,
|
||||
agentId: agent.id,
|
||||
},
|
||||
};
|
||||
const streamData = {
|
||||
event: GraphEvents.ON_AGENT_UPDATE,
|
||||
data: agentUpdate,
|
||||
};
|
||||
this.options.aggregateContent(streamData);
|
||||
sendEvent(this.options.res, streamData);
|
||||
contentData.push(agentUpdate);
|
||||
run.Graph.contentData = contentData;
|
||||
}
|
||||
|
||||
await run.processStream({ messages }, config, {
|
||||
keepContent: i !== 0,
|
||||
tokenCounter,
|
||||
indexTokenCountMap: currentIndexCountMap,
|
||||
maxContextTokens: agent.maxContextTokens,
|
||||
callbacks: {
|
||||
[Callback.TOOL_ERROR]: (graph, error, toolId) => {
|
||||
logger.error(
|
||||
@@ -712,9 +788,13 @@ class AgentClient extends BaseClient {
|
||||
};
|
||||
|
||||
await runAgent(this.options.agent, initialMessages);
|
||||
|
||||
let finalContentStart = 0;
|
||||
if (this.agentConfigs && this.agentConfigs.size > 0) {
|
||||
if (
|
||||
this.agentConfigs &&
|
||||
this.agentConfigs.size > 0 &&
|
||||
(await checkCapability(this.options.req, AgentCapabilities.chain))
|
||||
) {
|
||||
const windowSize = 5;
|
||||
let latestMessage = initialMessages.pop().content;
|
||||
if (typeof latestMessage !== 'string') {
|
||||
latestMessage = latestMessage[0].text;
|
||||
@@ -722,7 +802,16 @@ class AgentClient extends BaseClient {
|
||||
let i = 1;
|
||||
let runMessages = [];
|
||||
|
||||
const lastFiveMessages = initialMessages.slice(-5);
|
||||
const windowIndexCountMap = {};
|
||||
const windowMessages = initialMessages.slice(-windowSize);
|
||||
let currentIndex = 4;
|
||||
for (let i = initialMessages.length - 1; i >= 0; i--) {
|
||||
windowIndexCountMap[currentIndex] = indexTokenCountMap[i];
|
||||
currentIndex--;
|
||||
if (currentIndex < 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (const [agentId, agent] of this.agentConfigs) {
|
||||
if (abortController.signal.aborted === true) {
|
||||
break;
|
||||
@@ -757,7 +846,9 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
try {
|
||||
const contextMessages = [];
|
||||
for (const message of lastFiveMessages) {
|
||||
const runIndexCountMap = {};
|
||||
for (let i = 0; i < windowMessages.length; i++) {
|
||||
const message = windowMessages[i];
|
||||
const messageType = message._getType();
|
||||
if (
|
||||
(!agent.tools || agent.tools.length === 0) &&
|
||||
@@ -765,11 +856,13 @@ class AgentClient extends BaseClient {
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
runIndexCountMap[contextMessages.length] = windowIndexCountMap[i];
|
||||
contextMessages.push(message);
|
||||
}
|
||||
const currentMessages = [...contextMessages, new HumanMessage(bufferString)];
|
||||
await runAgent(agent, currentMessages, i, contentData);
|
||||
const bufferMessage = new HumanMessage(bufferString);
|
||||
runIndexCountMap[contextMessages.length] = tokenCounter(bufferMessage);
|
||||
const currentMessages = [...contextMessages, bufferMessage];
|
||||
await runAgent(agent, currentMessages, i, contentData, runIndexCountMap);
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[api/server/controllers/agents/client.js #chatCompletion] Error running agent ${agentId} (${i})`,
|
||||
@@ -780,6 +873,7 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
}
|
||||
|
||||
/** Note: not implemented */
|
||||
if (config.configurable.hide_sequential_outputs !== true) {
|
||||
finalContentStart = 0;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
const fs = require('fs').promises;
|
||||
const { nanoid } = require('nanoid');
|
||||
const {
|
||||
FileContext,
|
||||
Constants,
|
||||
Tools,
|
||||
Constants,
|
||||
FileContext,
|
||||
SystemRoles,
|
||||
EToolResources,
|
||||
actionDelimiter,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
@@ -203,14 +204,21 @@ const duplicateAgentHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
const {
|
||||
_id: __id,
|
||||
id: _id,
|
||||
_id: __id,
|
||||
author: _author,
|
||||
createdAt: _createdAt,
|
||||
updatedAt: _updatedAt,
|
||||
tool_resources: _tool_resources = {},
|
||||
...cloneData
|
||||
} = agent;
|
||||
|
||||
if (_tool_resources?.[EToolResources.ocr]) {
|
||||
cloneData.tool_resources = {
|
||||
[EToolResources.ocr]: _tool_resources[EToolResources.ocr],
|
||||
};
|
||||
}
|
||||
|
||||
const newAgentId = `agent_${nanoid()}`;
|
||||
const newAgentData = Object.assign(cloneData, {
|
||||
id: newAgentId,
|
||||
|
||||
@@ -10,7 +10,8 @@ const {
|
||||
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
|
||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||
const { createToolCall, getToolCallsByConvo } = require('~/models/ToolCall');
|
||||
const { loadAuthValues, loadTools } = require('~/app/clients/tools/util');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { loadTools } = require('~/app/clients/tools/util');
|
||||
const { checkAccess } = require('~/server/middleware');
|
||||
const { getMessage } = require('~/models/Message');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -10,7 +10,6 @@ const openAI = require('~/server/services/Endpoints/openAI');
|
||||
const agents = require('~/server/services/Endpoints/agents');
|
||||
const custom = require('~/server/services/Endpoints/custom');
|
||||
const google = require('~/server/services/Endpoints/google');
|
||||
const { getConvoFiles } = require('~/models/Conversation');
|
||||
const { handleError } = require('~/server/utils');
|
||||
|
||||
const buildFunction = {
|
||||
@@ -87,16 +86,8 @@ async function buildEndpointOption(req, res, next) {
|
||||
|
||||
// TODO: use `getModelsConfig` only when necessary
|
||||
const modelsConfig = await getModelsConfig(req);
|
||||
const { resendFiles = true } = req.body.endpointOption;
|
||||
req.body.endpointOption.modelsConfig = modelsConfig;
|
||||
if (isAgents && resendFiles && req.body.conversationId) {
|
||||
const fileIds = await getConvoFiles(req.body.conversationId);
|
||||
const requestFiles = req.body.files ?? [];
|
||||
if (requestFiles.length || fileIds.length) {
|
||||
req.body.endpointOption.attachments = processFiles(requestFiles, fileIds);
|
||||
}
|
||||
} else if (req.body.files) {
|
||||
// hold the promise
|
||||
if (req.body.files && !isAgents) {
|
||||
req.body.endpointOption.attachments = processFiles(req.body.files);
|
||||
}
|
||||
next();
|
||||
|
||||
@@ -16,7 +16,7 @@ const {
|
||||
} = require('~/server/services/Files/process');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
|
||||
const { loadAuthValues } = require('~/app/clients/tools/util');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
const { getFiles } = require('~/models/File');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -161,9 +161,9 @@ async function createActionTool({
|
||||
|
||||
if (metadata.auth && metadata.auth.type !== AuthTypeEnum.None) {
|
||||
try {
|
||||
const action_id = action.action_id;
|
||||
const identifier = `${req.user.id}:${action.action_id}`;
|
||||
if (metadata.auth.type === AuthTypeEnum.OAuth && metadata.auth.authorization_url) {
|
||||
const action_id = action.action_id;
|
||||
const identifier = `${req.user.id}:${action.action_id}`;
|
||||
const requestLogin = async () => {
|
||||
const { args: _args, stepId, ...toolCall } = config.toolCall ?? {};
|
||||
if (!stepId) {
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
const { FileSources, EModelEndpoint, getConfigDefaults } = require('librechat-data-provider');
|
||||
const {
|
||||
FileSources,
|
||||
EModelEndpoint,
|
||||
loadOCRConfig,
|
||||
processMCPEnv,
|
||||
getConfigDefaults,
|
||||
} = require('librechat-data-provider');
|
||||
const { checkVariables, checkHealth, checkConfig, checkAzureVariables } = require('./start/checks');
|
||||
const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants');
|
||||
const { initializeFirebase } = require('./Files/Firebase/initialize');
|
||||
const { initializeS3 } = require('./Files/S3/initialize');
|
||||
const loadCustomConfig = require('./Config/loadCustomConfig');
|
||||
const handleRateLimits = require('./Config/handleRateLimits');
|
||||
const { loadDefaultInterface } = require('./start/interface');
|
||||
@@ -25,6 +32,7 @@ const AppService = async (app) => {
|
||||
const config = (await loadCustomConfig()) ?? {};
|
||||
const configDefaults = getConfigDefaults();
|
||||
|
||||
const ocr = loadOCRConfig(config.ocr);
|
||||
const filteredTools = config.filteredTools;
|
||||
const includedTools = config.includedTools;
|
||||
const fileStrategy = config.fileStrategy ?? configDefaults.fileStrategy;
|
||||
@@ -37,6 +45,8 @@ const AppService = async (app) => {
|
||||
|
||||
if (fileStrategy === FileSources.firebase) {
|
||||
initializeFirebase();
|
||||
} else if (fileStrategy === FileSources.s3) {
|
||||
initializeS3();
|
||||
}
|
||||
|
||||
/** @type {Record<string, FunctionTool} */
|
||||
@@ -48,7 +58,7 @@ const AppService = async (app) => {
|
||||
|
||||
if (config.mcpServers != null) {
|
||||
const mcpManager = await getMCPManager();
|
||||
await mcpManager.initializeMCP(config.mcpServers);
|
||||
await mcpManager.initializeMCP(config.mcpServers, processMCPEnv);
|
||||
await mcpManager.mapAvailableTools(availableTools);
|
||||
}
|
||||
|
||||
@@ -57,6 +67,7 @@ const AppService = async (app) => {
|
||||
const interfaceConfig = await loadDefaultInterface(config, configDefaults);
|
||||
|
||||
const defaultLocals = {
|
||||
ocr,
|
||||
paths,
|
||||
fileStrategy,
|
||||
socialLogins,
|
||||
|
||||
@@ -120,6 +120,7 @@ describe('AppService', () => {
|
||||
},
|
||||
},
|
||||
paths: expect.anything(),
|
||||
ocr: expect.anything(),
|
||||
imageOutputType: expect.any(String),
|
||||
fileConfig: undefined,
|
||||
secureImageLinks: undefined,
|
||||
@@ -588,4 +589,33 @@ describe('AppService updating app.locals and issuing warnings', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not parse environment variable references in OCR config', async () => {
|
||||
// Mock custom configuration with env variable references in OCR config
|
||||
const mockConfig = {
|
||||
ocr: {
|
||||
apiKey: '${OCR_API_KEY_CUSTOM_VAR_NAME}',
|
||||
baseURL: '${OCR_BASEURL_CUSTOM_VAR_NAME}',
|
||||
strategy: 'mistral_ocr',
|
||||
mistralModel: 'mistral-medium',
|
||||
},
|
||||
};
|
||||
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve(mockConfig));
|
||||
|
||||
// Set actual environment variables with different values
|
||||
process.env.OCR_API_KEY_CUSTOM_VAR_NAME = 'actual-api-key';
|
||||
process.env.OCR_BASEURL_CUSTOM_VAR_NAME = 'https://actual-ocr-url.com';
|
||||
|
||||
// Initialize app
|
||||
const app = { locals: {} };
|
||||
await AppService(app);
|
||||
|
||||
// Verify that the raw string references were preserved and not interpolated
|
||||
expect(app.locals.ocr).toBeDefined();
|
||||
expect(app.locals.ocr.apiKey).toEqual('${OCR_API_KEY_CUSTOM_VAR_NAME}');
|
||||
expect(app.locals.ocr.baseURL).toEqual('${OCR_BASEURL_CUSTOM_VAR_NAME}');
|
||||
expect(app.locals.ocr.strategy).toEqual('mistral_ocr');
|
||||
expect(app.locals.ocr.mistralModel).toEqual('mistral-medium');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -72,4 +72,15 @@ async function getEndpointsConfig(req) {
|
||||
return endpointsConfig;
|
||||
}
|
||||
|
||||
module.exports = { getEndpointsConfig };
|
||||
/**
|
||||
* @param {ServerRequest} req
|
||||
* @param {import('librechat-data-provider').AgentCapabilities} capability
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
const checkCapability = async (req, capability) => {
|
||||
const endpointsConfig = await getEndpointsConfig(req);
|
||||
const capabilities = endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? [];
|
||||
return capabilities.includes(capability);
|
||||
};
|
||||
|
||||
module.exports = { getEndpointsConfig, checkCapability };
|
||||
|
||||
@@ -2,15 +2,8 @@ const { loadAgent } = require('~/models/Agent');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const buildOptions = (req, endpoint, parsedBody) => {
|
||||
const {
|
||||
spec,
|
||||
iconURL,
|
||||
agent_id,
|
||||
instructions,
|
||||
maxContextTokens,
|
||||
resendFiles = true,
|
||||
...model_parameters
|
||||
} = parsedBody;
|
||||
const { spec, iconURL, agent_id, instructions, maxContextTokens, ...model_parameters } =
|
||||
parsedBody;
|
||||
const agentPromise = loadAgent({
|
||||
req,
|
||||
agent_id,
|
||||
@@ -24,7 +17,6 @@ const buildOptions = (req, endpoint, parsedBody) => {
|
||||
iconURL,
|
||||
endpoint,
|
||||
agent_id,
|
||||
resendFiles,
|
||||
instructions,
|
||||
maxContextTokens,
|
||||
model_parameters,
|
||||
|
||||
@@ -2,6 +2,7 @@ const { createContentAggregator, Providers } = require('@librechat/agents');
|
||||
const {
|
||||
EModelEndpoint,
|
||||
getResponseSender,
|
||||
AgentCapabilities,
|
||||
providerEndpointMap,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
@@ -15,10 +16,14 @@ const initCustom = require('~/server/services/Endpoints/custom/initialize');
|
||||
const initGoogle = require('~/server/services/Endpoints/google/initialize');
|
||||
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
|
||||
const { getCustomEndpointConfig } = require('~/server/services/Config');
|
||||
const { processFiles } = require('~/server/services/Files/process');
|
||||
const { loadAgentTools } = require('~/server/services/ToolService');
|
||||
const AgentClient = require('~/server/controllers/agents/client');
|
||||
const { getConvoFiles } = require('~/models/Conversation');
|
||||
const { getToolFilesByIds } = require('~/models/File');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
const { getFiles } = require('~/models/File');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const providerConfigMap = {
|
||||
@@ -34,20 +39,38 @@ const providerConfigMap = {
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {ServerRequest} req
|
||||
* @param {Promise<Array<MongoFile | null>> | undefined} _attachments
|
||||
* @param {AgentToolResources | undefined} _tool_resources
|
||||
* @returns {Promise<{ attachments: Array<MongoFile | undefined> | undefined, tool_resources: AgentToolResources | undefined }>}
|
||||
*/
|
||||
const primeResources = async (_attachments, _tool_resources) => {
|
||||
const primeResources = async (req, _attachments, _tool_resources) => {
|
||||
try {
|
||||
/** @type {Array<MongoFile | undefined> | undefined} */
|
||||
let attachments;
|
||||
const tool_resources = _tool_resources ?? {};
|
||||
const isOCREnabled = (req.app.locals?.[EModelEndpoint.agents]?.capabilities ?? []).includes(
|
||||
AgentCapabilities.ocr,
|
||||
);
|
||||
if (tool_resources.ocr?.file_ids && isOCREnabled) {
|
||||
const context = await getFiles(
|
||||
{
|
||||
file_id: { $in: tool_resources.ocr.file_ids },
|
||||
},
|
||||
{},
|
||||
{},
|
||||
);
|
||||
attachments = (attachments ?? []).concat(context);
|
||||
}
|
||||
if (!_attachments) {
|
||||
return { attachments: undefined, tool_resources: _tool_resources };
|
||||
return { attachments, tool_resources };
|
||||
}
|
||||
/** @type {Array<MongoFile | undefined> | undefined} */
|
||||
const files = await _attachments;
|
||||
const attachments = [];
|
||||
const tool_resources = _tool_resources ?? {};
|
||||
if (!attachments) {
|
||||
/** @type {Array<MongoFile | undefined>} */
|
||||
attachments = [];
|
||||
}
|
||||
|
||||
for (const file of files) {
|
||||
if (!file) {
|
||||
@@ -82,7 +105,6 @@ const primeResources = async (_attachments, _tool_resources) => {
|
||||
* @param {ServerResponse} params.res
|
||||
* @param {Agent} params.agent
|
||||
* @param {object} [params.endpointOption]
|
||||
* @param {AgentToolResources} [params.tool_resources]
|
||||
* @param {boolean} [params.isInitialAgent]
|
||||
* @returns {Promise<Agent>}
|
||||
*/
|
||||
@@ -91,9 +113,30 @@ const initializeAgentOptions = async ({
|
||||
res,
|
||||
agent,
|
||||
endpointOption,
|
||||
tool_resources,
|
||||
isInitialAgent = false,
|
||||
}) => {
|
||||
let currentFiles;
|
||||
/** @type {Array<MongoFile>} */
|
||||
const requestFiles = req.body.files ?? [];
|
||||
if (
|
||||
isInitialAgent &&
|
||||
req.body.conversationId != null &&
|
||||
(agent.model_parameters?.resendFiles ?? true) === true
|
||||
) {
|
||||
const fileIds = (await getConvoFiles(req.body.conversationId)) ?? [];
|
||||
const toolFiles = await getToolFilesByIds(fileIds);
|
||||
if (requestFiles.length || toolFiles.length) {
|
||||
currentFiles = await processFiles(requestFiles.concat(toolFiles));
|
||||
}
|
||||
} else if (isInitialAgent && requestFiles.length) {
|
||||
currentFiles = await processFiles(requestFiles);
|
||||
}
|
||||
|
||||
const { attachments, tool_resources } = await primeResources(
|
||||
req,
|
||||
currentFiles,
|
||||
agent.tool_resources,
|
||||
);
|
||||
const { tools, toolContextMap } = await loadAgentTools({
|
||||
req,
|
||||
res,
|
||||
@@ -138,6 +181,7 @@ const initializeAgentOptions = async ({
|
||||
agent.provider = options.provider;
|
||||
}
|
||||
|
||||
/** @type {import('@librechat/agents').ClientOptions} */
|
||||
agent.model_parameters = Object.assign(model_parameters, options.llmConfig);
|
||||
if (options.configOptions) {
|
||||
agent.model_parameters.configuration = options.configOptions;
|
||||
@@ -156,15 +200,16 @@ const initializeAgentOptions = async ({
|
||||
|
||||
const tokensModel =
|
||||
agent.provider === EModelEndpoint.azureOpenAI ? agent.model : agent.model_parameters.model;
|
||||
const maxTokens = agent.model_parameters.maxOutputTokens ?? agent.model_parameters.maxTokens ?? 0;
|
||||
|
||||
return {
|
||||
...agent,
|
||||
tools,
|
||||
attachments,
|
||||
toolContextMap,
|
||||
maxContextTokens:
|
||||
agent.max_context_tokens ??
|
||||
getModelMaxTokens(tokensModel, providerEndpointMap[provider]) ??
|
||||
4000,
|
||||
((getModelMaxTokens(tokensModel, providerEndpointMap[provider]) ?? 4000) - maxTokens) * 0.9,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -197,11 +242,6 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
throw new Error('Agent not found');
|
||||
}
|
||||
|
||||
const { attachments, tool_resources } = await primeResources(
|
||||
endpointOption.attachments,
|
||||
primaryAgent.tool_resources,
|
||||
);
|
||||
|
||||
const agentConfigs = new Map();
|
||||
|
||||
// Handle primary agent
|
||||
@@ -210,7 +250,6 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
res,
|
||||
agent: primaryAgent,
|
||||
endpointOption,
|
||||
tool_resources,
|
||||
isInitialAgent: true,
|
||||
});
|
||||
|
||||
@@ -240,18 +279,21 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
|
||||
const client = new AgentClient({
|
||||
req,
|
||||
agent: primaryConfig,
|
||||
res,
|
||||
sender,
|
||||
attachments,
|
||||
contentParts,
|
||||
agentConfigs,
|
||||
eventHandlers,
|
||||
collectedUsage,
|
||||
aggregateContent,
|
||||
artifactPromises,
|
||||
agent: primaryConfig,
|
||||
spec: endpointOption.spec,
|
||||
iconURL: endpointOption.iconURL,
|
||||
agentConfigs,
|
||||
endpoint: EModelEndpoint.agents,
|
||||
attachments: primaryConfig.attachments,
|
||||
maxContextTokens: primaryConfig.maxContextTokens,
|
||||
resendFiles: primaryConfig.model_parameters?.resendFiles ?? true,
|
||||
});
|
||||
|
||||
return { client };
|
||||
|
||||
@@ -23,8 +23,9 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const agent = {
|
||||
id: EModelEndpoint.bedrock,
|
||||
name: endpointOption.name,
|
||||
instructions: endpointOption.promptPrefix,
|
||||
provider: EModelEndpoint.bedrock,
|
||||
endpoint: EModelEndpoint.bedrock,
|
||||
instructions: endpointOption.promptPrefix,
|
||||
model: endpointOption.model_parameters.model,
|
||||
model_parameters: endpointOption.model_parameters,
|
||||
};
|
||||
@@ -54,6 +55,7 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
|
||||
const client = new AgentClient({
|
||||
req,
|
||||
res,
|
||||
agent,
|
||||
sender,
|
||||
// tools,
|
||||
|
||||
@@ -135,12 +135,9 @@ const initializeClient = async ({
|
||||
}
|
||||
|
||||
if (optionsOnly) {
|
||||
clientOptions = Object.assign(
|
||||
{
|
||||
modelOptions: endpointOption.model_parameters,
|
||||
},
|
||||
clientOptions,
|
||||
);
|
||||
const modelOptions = endpointOption.model_parameters;
|
||||
modelOptions.model = modelName;
|
||||
clientOptions = Object.assign({ modelOptions }, clientOptions);
|
||||
clientOptions.modelOptions.user = req.user.id;
|
||||
const options = getLLMConfig(apiKey, clientOptions);
|
||||
if (!clientOptions.streamRate) {
|
||||
|
||||
@@ -28,7 +28,7 @@ const { isEnabled } = require('~/server/utils');
|
||||
* @returns {Object} Configuration options for creating an LLM instance.
|
||||
*/
|
||||
function getLLMConfig(apiKey, options = {}, endpoint = null) {
|
||||
const {
|
||||
let {
|
||||
modelOptions = {},
|
||||
reverseProxyUrl,
|
||||
defaultQuery,
|
||||
@@ -50,10 +50,32 @@ function getLLMConfig(apiKey, options = {}, endpoint = null) {
|
||||
if (addParams && typeof addParams === 'object') {
|
||||
Object.assign(llmConfig, addParams);
|
||||
}
|
||||
/** Note: OpenAI Web Search models do not support any known parameters besdies `max_tokens` */
|
||||
if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) {
|
||||
const searchExcludeParams = [
|
||||
'frequency_penalty',
|
||||
'presence_penalty',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'stop',
|
||||
'logit_bias',
|
||||
'seed',
|
||||
'response_format',
|
||||
'n',
|
||||
'logprobs',
|
||||
'user',
|
||||
];
|
||||
|
||||
dropParams = dropParams || [];
|
||||
dropParams = [...new Set([...dropParams, ...searchExcludeParams])];
|
||||
}
|
||||
|
||||
if (dropParams && Array.isArray(dropParams)) {
|
||||
dropParams.forEach((param) => {
|
||||
delete llmConfig[param];
|
||||
if (llmConfig[param]) {
|
||||
llmConfig[param] = undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
const axios = require('axios');
|
||||
const FormData = require('form-data');
|
||||
const { getCodeBaseURL } = require('@librechat/agents');
|
||||
const { createAxiosInstance } = require('~/config');
|
||||
const { logAxiosError } = require('~/utils');
|
||||
|
||||
const axios = createAxiosInstance();
|
||||
|
||||
const MAX_FILE_SIZE = 150 * 1024 * 1024;
|
||||
|
||||
/**
|
||||
@@ -27,13 +29,6 @@ async function getCodeOutputDownloadStream(fileIdentifier, apiKey) {
|
||||
timeout: 15000,
|
||||
};
|
||||
|
||||
if (process.env.PROXY) {
|
||||
options.proxy = {
|
||||
host: process.env.PROXY,
|
||||
protocol: process.env.PROXY.startsWith('https') ? 'https' : 'http',
|
||||
};
|
||||
}
|
||||
|
||||
const response = await axios(options);
|
||||
return response;
|
||||
} catch (error) {
|
||||
@@ -79,13 +74,6 @@ async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = ''
|
||||
maxBodyLength: MAX_FILE_SIZE,
|
||||
};
|
||||
|
||||
if (process.env.PROXY) {
|
||||
options.proxy = {
|
||||
host: process.env.PROXY,
|
||||
protocol: process.env.PROXY.startsWith('https') ? 'https' : 'http',
|
||||
};
|
||||
}
|
||||
|
||||
const response = await axios.post(`${baseURL}/upload`, form, options);
|
||||
|
||||
/** @type {{ message: string; session_id: string; files: Array<{ fileId: string; filename: string }> }} */
|
||||
|
||||
207
api/server/services/Files/MistralOCR/crud.js
Normal file
207
api/server/services/Files/MistralOCR/crud.js
Normal file
@@ -0,0 +1,207 @@
|
||||
// ~/server/services/Files/MistralOCR/crud.js
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const FormData = require('form-data');
|
||||
const { FileSources, envVarRegex, extractEnvVariable } = require('librechat-data-provider');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { logger, createAxiosInstance } = require('~/config');
|
||||
const { logAxiosError } = require('~/utils');
|
||||
|
||||
const axios = createAxiosInstance();
|
||||
|
||||
/**
|
||||
* Uploads a document to Mistral API using file streaming to avoid loading the entire file into memory
|
||||
*
|
||||
* @param {Object} params Upload parameters
|
||||
* @param {string} params.filePath The path to the file on disk
|
||||
* @param {string} [params.fileName] Optional filename to use (defaults to the name from filePath)
|
||||
* @param {string} params.apiKey Mistral API key
|
||||
* @param {string} [params.baseURL=https://api.mistral.ai/v1] Mistral API base URL
|
||||
* @returns {Promise<Object>} The response from Mistral API
|
||||
*/
|
||||
async function uploadDocumentToMistral({
|
||||
filePath,
|
||||
fileName = '',
|
||||
apiKey,
|
||||
baseURL = 'https://api.mistral.ai/v1',
|
||||
}) {
|
||||
const form = new FormData();
|
||||
form.append('purpose', 'ocr');
|
||||
const actualFileName = fileName || path.basename(filePath);
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
form.append('file', fileStream, { filename: actualFileName });
|
||||
|
||||
return axios
|
||||
.post(`${baseURL}/files`, form, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
...form.getHeaders(),
|
||||
},
|
||||
maxBodyLength: Infinity,
|
||||
maxContentLength: Infinity,
|
||||
})
|
||||
.then((res) => res.data)
|
||||
.catch((error) => {
|
||||
logger.error('Error uploading document to Mistral:', error.message);
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
async function getSignedUrl({
|
||||
apiKey,
|
||||
fileId,
|
||||
expiry = 24,
|
||||
baseURL = 'https://api.mistral.ai/v1',
|
||||
}) {
|
||||
return axios
|
||||
.get(`${baseURL}/files/${fileId}/url?expiry=${expiry}`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
})
|
||||
.then((res) => res.data)
|
||||
.catch((error) => {
|
||||
logger.error('Error fetching signed URL:', error.message);
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Object} params
|
||||
* @param {string} params.apiKey
|
||||
* @param {string} params.documentUrl
|
||||
* @param {string} [params.baseURL]
|
||||
* @returns {Promise<OCRResult>}
|
||||
*/
|
||||
async function performOCR({
|
||||
apiKey,
|
||||
documentUrl,
|
||||
model = 'mistral-ocr-latest',
|
||||
baseURL = 'https://api.mistral.ai/v1',
|
||||
}) {
|
||||
return axios
|
||||
.post(
|
||||
`${baseURL}/ocr`,
|
||||
{
|
||||
model,
|
||||
include_image_base64: false,
|
||||
document: {
|
||||
type: 'document_url',
|
||||
document_url: documentUrl,
|
||||
},
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
},
|
||||
)
|
||||
.then((res) => res.data)
|
||||
.catch((error) => {
|
||||
logger.error('Error performing OCR:', error.message);
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
function extractVariableName(str) {
|
||||
const match = str.match(envVarRegex);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
const uploadMistralOCR = async ({ req, file, file_id, entity_id }) => {
|
||||
try {
|
||||
/** @type {TCustomConfig['ocr']} */
|
||||
const ocrConfig = req.app.locals?.ocr;
|
||||
|
||||
const apiKeyConfig = ocrConfig.apiKey || '';
|
||||
const baseURLConfig = ocrConfig.baseURL || '';
|
||||
|
||||
const isApiKeyEnvVar = envVarRegex.test(apiKeyConfig);
|
||||
const isBaseURLEnvVar = envVarRegex.test(baseURLConfig);
|
||||
|
||||
const isApiKeyEmpty = !apiKeyConfig.trim();
|
||||
const isBaseURLEmpty = !baseURLConfig.trim();
|
||||
|
||||
let apiKey, baseURL;
|
||||
|
||||
if (isApiKeyEnvVar || isBaseURLEnvVar || isApiKeyEmpty || isBaseURLEmpty) {
|
||||
const apiKeyVarName = isApiKeyEnvVar ? extractVariableName(apiKeyConfig) : 'OCR_API_KEY';
|
||||
const baseURLVarName = isBaseURLEnvVar ? extractVariableName(baseURLConfig) : 'OCR_BASEURL';
|
||||
|
||||
const authValues = await loadAuthValues({
|
||||
userId: req.user.id,
|
||||
authFields: [baseURLVarName, apiKeyVarName],
|
||||
optional: new Set([baseURLVarName]),
|
||||
});
|
||||
|
||||
apiKey = authValues[apiKeyVarName];
|
||||
baseURL = authValues[baseURLVarName];
|
||||
} else {
|
||||
apiKey = apiKeyConfig;
|
||||
baseURL = baseURLConfig;
|
||||
}
|
||||
|
||||
const mistralFile = await uploadDocumentToMistral({
|
||||
filePath: file.path,
|
||||
fileName: file.originalname,
|
||||
apiKey,
|
||||
baseURL,
|
||||
});
|
||||
|
||||
const modelConfig = ocrConfig.mistralModel || '';
|
||||
const model = envVarRegex.test(modelConfig)
|
||||
? extractEnvVariable(modelConfig)
|
||||
: modelConfig.trim() || 'mistral-ocr-latest';
|
||||
|
||||
const signedUrlResponse = await getSignedUrl({
|
||||
apiKey,
|
||||
baseURL,
|
||||
fileId: mistralFile.id,
|
||||
});
|
||||
|
||||
const ocrResult = await performOCR({
|
||||
apiKey,
|
||||
baseURL,
|
||||
model,
|
||||
documentUrl: signedUrlResponse.url,
|
||||
});
|
||||
|
||||
let aggregatedText = '';
|
||||
const images = [];
|
||||
ocrResult.pages.forEach((page, index) => {
|
||||
if (ocrResult.pages.length > 1) {
|
||||
aggregatedText += `# PAGE ${index + 1}\n`;
|
||||
}
|
||||
|
||||
aggregatedText += page.markdown + '\n\n';
|
||||
|
||||
if (page.images && page.images.length > 0) {
|
||||
page.images.forEach((image) => {
|
||||
if (image.image_base64) {
|
||||
images.push(image.image_base64);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
filename: file.originalname,
|
||||
bytes: aggregatedText.length * 4,
|
||||
filepath: FileSources.mistral_ocr,
|
||||
text: aggregatedText,
|
||||
images,
|
||||
};
|
||||
} catch (error) {
|
||||
const message = 'Error uploading document to Mistral OCR API';
|
||||
logAxiosError({ error, message });
|
||||
throw new Error(message);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
uploadDocumentToMistral,
|
||||
uploadMistralOCR,
|
||||
getSignedUrl,
|
||||
performOCR,
|
||||
};
|
||||
737
api/server/services/Files/MistralOCR/crud.spec.js
Normal file
737
api/server/services/Files/MistralOCR/crud.spec.js
Normal file
@@ -0,0 +1,737 @@
|
||||
const fs = require('fs');
|
||||
|
||||
const mockAxios = {
|
||||
interceptors: {
|
||||
request: { use: jest.fn(), eject: jest.fn() },
|
||||
response: { use: jest.fn(), eject: jest.fn() },
|
||||
},
|
||||
create: jest.fn().mockReturnValue({
|
||||
defaults: {
|
||||
proxy: null,
|
||||
},
|
||||
get: jest.fn().mockResolvedValue({ data: {} }),
|
||||
post: jest.fn().mockResolvedValue({ data: {} }),
|
||||
put: jest.fn().mockResolvedValue({ data: {} }),
|
||||
delete: jest.fn().mockResolvedValue({ data: {} }),
|
||||
}),
|
||||
get: jest.fn().mockResolvedValue({ data: {} }),
|
||||
post: jest.fn().mockResolvedValue({ data: {} }),
|
||||
put: jest.fn().mockResolvedValue({ data: {} }),
|
||||
delete: jest.fn().mockResolvedValue({ data: {} }),
|
||||
reset: jest.fn().mockImplementation(function () {
|
||||
this.get.mockClear();
|
||||
this.post.mockClear();
|
||||
this.put.mockClear();
|
||||
this.delete.mockClear();
|
||||
this.create.mockClear();
|
||||
}),
|
||||
};
|
||||
|
||||
jest.mock('axios', () => mockAxios);
|
||||
jest.mock('fs');
|
||||
jest.mock('~/utils', () => ({
|
||||
logAxiosError: jest.fn(),
|
||||
}));
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
},
|
||||
createAxiosInstance: () => mockAxios,
|
||||
}));
|
||||
jest.mock('~/server/services/Tools/credentials', () => ({
|
||||
loadAuthValues: jest.fn(),
|
||||
}));
|
||||
|
||||
const { uploadDocumentToMistral, uploadMistralOCR, getSignedUrl, performOCR } = require('./crud');
|
||||
|
||||
describe('MistralOCR Service', () => {
|
||||
afterEach(() => {
|
||||
mockAxios.reset();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('uploadDocumentToMistral', () => {
|
||||
beforeEach(() => {
|
||||
// Create a more complete mock for file streams that FormData can work with
|
||||
const mockReadStream = {
|
||||
on: jest.fn().mockImplementation(function (event, handler) {
|
||||
// Simulate immediate 'end' event to make FormData complete processing
|
||||
if (event === 'end') {
|
||||
handler();
|
||||
}
|
||||
return this;
|
||||
}),
|
||||
pipe: jest.fn().mockImplementation(function () {
|
||||
return this;
|
||||
}),
|
||||
pause: jest.fn(),
|
||||
resume: jest.fn(),
|
||||
emit: jest.fn(),
|
||||
once: jest.fn(),
|
||||
destroy: jest.fn(),
|
||||
};
|
||||
|
||||
fs.createReadStream = jest.fn().mockReturnValue(mockReadStream);
|
||||
|
||||
// Mock FormData's append to avoid actual stream processing
|
||||
jest.mock('form-data', () => {
|
||||
const mockFormData = function () {
|
||||
return {
|
||||
append: jest.fn(),
|
||||
getHeaders: jest
|
||||
.fn()
|
||||
.mockReturnValue({ 'content-type': 'multipart/form-data; boundary=---boundary' }),
|
||||
getBuffer: jest.fn().mockReturnValue(Buffer.from('mock-form-data')),
|
||||
getLength: jest.fn().mockReturnValue(100),
|
||||
};
|
||||
};
|
||||
return mockFormData;
|
||||
});
|
||||
});
|
||||
|
||||
it('should upload a document to Mistral API using file streaming', async () => {
|
||||
const mockResponse = { data: { id: 'file-123', purpose: 'ocr' } };
|
||||
mockAxios.post.mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const result = await uploadDocumentToMistral({
|
||||
filePath: '/path/to/test.pdf',
|
||||
fileName: 'test.pdf',
|
||||
apiKey: 'test-api-key',
|
||||
});
|
||||
|
||||
// Check that createReadStream was called with the correct file path
|
||||
expect(fs.createReadStream).toHaveBeenCalledWith('/path/to/test.pdf');
|
||||
|
||||
// Since we're mocking FormData, we'll just check that axios was called correctly
|
||||
expect(mockAxios.post).toHaveBeenCalledWith(
|
||||
'https://api.mistral.ai/v1/files',
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer test-api-key',
|
||||
}),
|
||||
maxBodyLength: Infinity,
|
||||
maxContentLength: Infinity,
|
||||
}),
|
||||
);
|
||||
expect(result).toEqual(mockResponse.data);
|
||||
});
|
||||
|
||||
it('should handle errors during document upload', async () => {
|
||||
const errorMessage = 'API error';
|
||||
mockAxios.post.mockRejectedValueOnce(new Error(errorMessage));
|
||||
|
||||
await expect(
|
||||
uploadDocumentToMistral({
|
||||
filePath: '/path/to/test.pdf',
|
||||
fileName: 'test.pdf',
|
||||
apiKey: 'test-api-key',
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
|
||||
const { logger } = require('~/config');
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error uploading document to Mistral:'),
|
||||
expect.any(String),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSignedUrl', () => {
|
||||
it('should fetch signed URL from Mistral API', async () => {
|
||||
const mockResponse = { data: { url: 'https://document-url.com' } };
|
||||
mockAxios.get.mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const result = await getSignedUrl({
|
||||
fileId: 'file-123',
|
||||
apiKey: 'test-api-key',
|
||||
});
|
||||
|
||||
expect(mockAxios.get).toHaveBeenCalledWith(
|
||||
'https://api.mistral.ai/v1/files/file-123/url?expiry=24',
|
||||
{
|
||||
headers: {
|
||||
Authorization: 'Bearer test-api-key',
|
||||
},
|
||||
},
|
||||
);
|
||||
expect(result).toEqual(mockResponse.data);
|
||||
});
|
||||
|
||||
it('should handle errors when fetching signed URL', async () => {
|
||||
const errorMessage = 'API error';
|
||||
mockAxios.get.mockRejectedValueOnce(new Error(errorMessage));
|
||||
|
||||
await expect(
|
||||
getSignedUrl({
|
||||
fileId: 'file-123',
|
||||
apiKey: 'test-api-key',
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
|
||||
const { logger } = require('~/config');
|
||||
expect(logger.error).toHaveBeenCalledWith('Error fetching signed URL:', errorMessage);
|
||||
});
|
||||
});
|
||||
|
||||
describe('performOCR', () => {
|
||||
it('should perform OCR using Mistral API', async () => {
|
||||
const mockResponse = {
|
||||
data: {
|
||||
pages: [{ markdown: 'Page 1 content' }, { markdown: 'Page 2 content' }],
|
||||
},
|
||||
};
|
||||
mockAxios.post.mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const result = await performOCR({
|
||||
apiKey: 'test-api-key',
|
||||
documentUrl: 'https://document-url.com',
|
||||
model: 'mistral-ocr-latest',
|
||||
});
|
||||
|
||||
expect(mockAxios.post).toHaveBeenCalledWith(
|
||||
'https://api.mistral.ai/v1/ocr',
|
||||
{
|
||||
model: 'mistral-ocr-latest',
|
||||
include_image_base64: false,
|
||||
document: {
|
||||
type: 'document_url',
|
||||
document_url: 'https://document-url.com',
|
||||
},
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: 'Bearer test-api-key',
|
||||
},
|
||||
},
|
||||
);
|
||||
expect(result).toEqual(mockResponse.data);
|
||||
});
|
||||
|
||||
it('should handle errors during OCR processing', async () => {
|
||||
const errorMessage = 'OCR processing error';
|
||||
mockAxios.post.mockRejectedValueOnce(new Error(errorMessage));
|
||||
|
||||
await expect(
|
||||
performOCR({
|
||||
apiKey: 'test-api-key',
|
||||
documentUrl: 'https://document-url.com',
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
|
||||
const { logger } = require('~/config');
|
||||
expect(logger.error).toHaveBeenCalledWith('Error performing OCR:', errorMessage);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadMistralOCR', () => {
|
||||
beforeEach(() => {
|
||||
const mockReadStream = {
|
||||
on: jest.fn().mockImplementation(function (event, handler) {
|
||||
if (event === 'end') {
|
||||
handler();
|
||||
}
|
||||
return this;
|
||||
}),
|
||||
pipe: jest.fn().mockImplementation(function () {
|
||||
return this;
|
||||
}),
|
||||
pause: jest.fn(),
|
||||
resume: jest.fn(),
|
||||
emit: jest.fn(),
|
||||
once: jest.fn(),
|
||||
destroy: jest.fn(),
|
||||
};
|
||||
|
||||
fs.createReadStream = jest.fn().mockReturnValue(mockReadStream);
|
||||
});
|
||||
|
||||
it('should process OCR for a file with standard configuration', async () => {
|
||||
// Setup mocks
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
loadAuthValues.mockResolvedValue({
|
||||
OCR_API_KEY: 'test-api-key',
|
||||
OCR_BASEURL: 'https://api.mistral.ai/v1',
|
||||
});
|
||||
|
||||
// Mock file upload response
|
||||
mockAxios.post.mockResolvedValueOnce({
|
||||
data: { id: 'file-123', purpose: 'ocr' },
|
||||
});
|
||||
|
||||
// Mock signed URL response
|
||||
mockAxios.get.mockResolvedValueOnce({
|
||||
data: { url: 'https://signed-url.com' },
|
||||
});
|
||||
|
||||
// Mock OCR response with text and images
|
||||
mockAxios.post.mockResolvedValueOnce({
|
||||
data: {
|
||||
pages: [
|
||||
{
|
||||
markdown: 'Page 1 content',
|
||||
images: [{ image_base64: 'base64image1' }],
|
||||
},
|
||||
{
|
||||
markdown: 'Page 2 content',
|
||||
images: [{ image_base64: 'base64image2' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
app: {
|
||||
locals: {
|
||||
ocr: {
|
||||
// Use environment variable syntax to ensure loadAuthValues is called
|
||||
apiKey: '${OCR_API_KEY}',
|
||||
baseURL: '${OCR_BASEURL}',
|
||||
mistralModel: 'mistral-medium',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const file = {
|
||||
path: '/tmp/upload/file.pdf',
|
||||
originalname: 'document.pdf',
|
||||
};
|
||||
|
||||
const result = await uploadMistralOCR({
|
||||
req,
|
||||
file,
|
||||
file_id: 'file123',
|
||||
entity_id: 'entity123',
|
||||
});
|
||||
|
||||
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
|
||||
|
||||
expect(loadAuthValues).toHaveBeenCalledWith({
|
||||
userId: 'user123',
|
||||
authFields: ['OCR_BASEURL', 'OCR_API_KEY'],
|
||||
optional: expect.any(Set),
|
||||
});
|
||||
|
||||
// Verify OCR result
|
||||
expect(result).toEqual({
|
||||
filename: 'document.pdf',
|
||||
bytes: expect.any(Number),
|
||||
filepath: 'mistral_ocr',
|
||||
text: expect.stringContaining('# PAGE 1'),
|
||||
images: ['base64image1', 'base64image2'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should process variable references in configuration', async () => {
|
||||
// Setup mocks with environment variables
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
loadAuthValues.mockResolvedValue({
|
||||
CUSTOM_API_KEY: 'custom-api-key',
|
||||
CUSTOM_BASEURL: 'https://custom-api.mistral.ai/v1',
|
||||
});
|
||||
|
||||
// Mock API responses
|
||||
mockAxios.post.mockResolvedValueOnce({
|
||||
data: { id: 'file-123', purpose: 'ocr' },
|
||||
});
|
||||
mockAxios.get.mockResolvedValueOnce({
|
||||
data: { url: 'https://signed-url.com' },
|
||||
});
|
||||
mockAxios.post.mockResolvedValueOnce({
|
||||
data: {
|
||||
pages: [{ markdown: 'Content from custom API' }],
|
||||
},
|
||||
});
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
app: {
|
||||
locals: {
|
||||
ocr: {
|
||||
apiKey: '${CUSTOM_API_KEY}',
|
||||
baseURL: '${CUSTOM_BASEURL}',
|
||||
mistralModel: '${CUSTOM_MODEL}',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Set environment variable for model
|
||||
process.env.CUSTOM_MODEL = 'mistral-large';
|
||||
|
||||
const file = {
|
||||
path: '/tmp/upload/file.pdf',
|
||||
originalname: 'document.pdf',
|
||||
};
|
||||
|
||||
const result = await uploadMistralOCR({
|
||||
req,
|
||||
file,
|
||||
file_id: 'file123',
|
||||
entity_id: 'entity123',
|
||||
});
|
||||
|
||||
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
|
||||
|
||||
// Verify that custom environment variables were extracted and used
|
||||
expect(loadAuthValues).toHaveBeenCalledWith({
|
||||
userId: 'user123',
|
||||
authFields: ['CUSTOM_BASEURL', 'CUSTOM_API_KEY'],
|
||||
optional: expect.any(Set),
|
||||
});
|
||||
|
||||
// Check that mistral-large was used in the OCR API call
|
||||
expect(mockAxios.post).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
model: 'mistral-large',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
|
||||
expect(result.text).toEqual('Content from custom API\n\n');
|
||||
});
|
||||
|
||||
it('should fall back to default values when variables are not properly formatted', async () => {
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
loadAuthValues.mockResolvedValue({
|
||||
OCR_API_KEY: 'default-api-key',
|
||||
OCR_BASEURL: undefined, // Testing optional parameter
|
||||
});
|
||||
|
||||
mockAxios.post.mockResolvedValueOnce({
|
||||
data: { id: 'file-123', purpose: 'ocr' },
|
||||
});
|
||||
mockAxios.get.mockResolvedValueOnce({
|
||||
data: { url: 'https://signed-url.com' },
|
||||
});
|
||||
mockAxios.post.mockResolvedValueOnce({
|
||||
data: {
|
||||
pages: [{ markdown: 'Default API result' }],
|
||||
},
|
||||
});
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
app: {
|
||||
locals: {
|
||||
ocr: {
|
||||
// Use environment variable syntax to ensure loadAuthValues is called
|
||||
apiKey: '${INVALID_FORMAT}', // Using valid env var format but with an invalid name
|
||||
baseURL: '${OCR_BASEURL}', // Using valid env var format
|
||||
mistralModel: 'mistral-ocr-latest', // Plain string value
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const file = {
|
||||
path: '/tmp/upload/file.pdf',
|
||||
originalname: 'document.pdf',
|
||||
};
|
||||
|
||||
await uploadMistralOCR({
|
||||
req,
|
||||
file,
|
||||
file_id: 'file123',
|
||||
entity_id: 'entity123',
|
||||
});
|
||||
|
||||
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
|
||||
|
||||
// Should use the default values
|
||||
expect(loadAuthValues).toHaveBeenCalledWith({
|
||||
userId: 'user123',
|
||||
authFields: ['OCR_BASEURL', 'INVALID_FORMAT'],
|
||||
optional: expect.any(Set),
|
||||
});
|
||||
|
||||
// Should use the default model when not using environment variable format
|
||||
expect(mockAxios.post).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
model: 'mistral-ocr-latest',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle API errors during OCR process', async () => {
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
loadAuthValues.mockResolvedValue({
|
||||
OCR_API_KEY: 'test-api-key',
|
||||
});
|
||||
|
||||
// Mock file upload to fail
|
||||
mockAxios.post.mockRejectedValueOnce(new Error('Upload failed'));
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
app: {
|
||||
locals: {
|
||||
ocr: {
|
||||
apiKey: 'OCR_API_KEY',
|
||||
baseURL: 'OCR_BASEURL',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const file = {
|
||||
path: '/tmp/upload/file.pdf',
|
||||
originalname: 'document.pdf',
|
||||
};
|
||||
|
||||
await expect(
|
||||
uploadMistralOCR({
|
||||
req,
|
||||
file,
|
||||
file_id: 'file123',
|
||||
entity_id: 'entity123',
|
||||
}),
|
||||
).rejects.toThrow('Error uploading document to Mistral OCR API');
|
||||
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
|
||||
|
||||
const { logAxiosError } = require('~/utils');
|
||||
expect(logAxiosError).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle single page documents without page numbering', async () => {
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
loadAuthValues.mockResolvedValue({
|
||||
OCR_API_KEY: 'test-api-key',
|
||||
OCR_BASEURL: 'https://api.mistral.ai/v1', // Make sure this is included
|
||||
});
|
||||
|
||||
// Clear all previous mocks
|
||||
mockAxios.post.mockClear();
|
||||
mockAxios.get.mockClear();
|
||||
|
||||
// 1. First mock: File upload response
|
||||
mockAxios.post.mockImplementationOnce(() =>
|
||||
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
|
||||
);
|
||||
|
||||
// 2. Second mock: Signed URL response
|
||||
mockAxios.get.mockImplementationOnce(() =>
|
||||
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
|
||||
);
|
||||
|
||||
// 3. Third mock: OCR response
|
||||
mockAxios.post.mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
data: {
|
||||
pages: [{ markdown: 'Single page content' }],
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
app: {
|
||||
locals: {
|
||||
ocr: {
|
||||
apiKey: 'OCR_API_KEY',
|
||||
baseURL: 'OCR_BASEURL',
|
||||
mistralModel: 'mistral-ocr-latest',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const file = {
|
||||
path: '/tmp/upload/file.pdf',
|
||||
originalname: 'single-page.pdf',
|
||||
};
|
||||
|
||||
const result = await uploadMistralOCR({
|
||||
req,
|
||||
file,
|
||||
file_id: 'file123',
|
||||
entity_id: 'entity123',
|
||||
});
|
||||
|
||||
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
|
||||
|
||||
// Verify that single page documents don't include page numbering
|
||||
expect(result.text).not.toContain('# PAGE');
|
||||
expect(result.text).toEqual('Single page content\n\n');
|
||||
});
|
||||
|
||||
it('should use literal values in configuration when provided directly', async () => {
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
// We'll still mock this but it should not be used for literal values
|
||||
loadAuthValues.mockResolvedValue({});
|
||||
|
||||
// Clear all previous mocks
|
||||
mockAxios.post.mockClear();
|
||||
mockAxios.get.mockClear();
|
||||
|
||||
// 1. First mock: File upload response
|
||||
mockAxios.post.mockImplementationOnce(() =>
|
||||
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
|
||||
);
|
||||
|
||||
// 2. Second mock: Signed URL response
|
||||
mockAxios.get.mockImplementationOnce(() =>
|
||||
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
|
||||
);
|
||||
|
||||
// 3. Third mock: OCR response
|
||||
mockAxios.post.mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
data: {
|
||||
pages: [{ markdown: 'Processed with literal config values' }],
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
app: {
|
||||
locals: {
|
||||
ocr: {
|
||||
// Direct values that should be used as-is, without variable substitution
|
||||
apiKey: 'actual-api-key-value',
|
||||
baseURL: 'https://direct-api-url.mistral.ai/v1',
|
||||
mistralModel: 'mistral-direct-model',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const file = {
|
||||
path: '/tmp/upload/file.pdf',
|
||||
originalname: 'direct-values.pdf',
|
||||
};
|
||||
|
||||
const result = await uploadMistralOCR({
|
||||
req,
|
||||
file,
|
||||
file_id: 'file123',
|
||||
entity_id: 'entity123',
|
||||
});
|
||||
|
||||
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
|
||||
|
||||
// Verify the correct URL was used with the direct baseURL value
|
||||
expect(mockAxios.post).toHaveBeenCalledWith(
|
||||
'https://direct-api-url.mistral.ai/v1/files',
|
||||
expect.any(Object),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer actual-api-key-value',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
// Check the OCR call was made with the direct model value
|
||||
expect(mockAxios.post).toHaveBeenCalledWith(
|
||||
'https://direct-api-url.mistral.ai/v1/ocr',
|
||||
expect.objectContaining({
|
||||
model: 'mistral-direct-model',
|
||||
}),
|
||||
expect.any(Object),
|
||||
);
|
||||
|
||||
// Verify the result
|
||||
expect(result.text).toEqual('Processed with literal config values\n\n');
|
||||
|
||||
// Verify loadAuthValues was never called since we used direct values
|
||||
expect(loadAuthValues).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle empty configuration values and use defaults', async () => {
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
// Set up the mock values to be returned by loadAuthValues
|
||||
loadAuthValues.mockResolvedValue({
|
||||
OCR_API_KEY: 'default-from-env-key',
|
||||
OCR_BASEURL: 'https://default-from-env.mistral.ai/v1',
|
||||
});
|
||||
|
||||
// Clear all previous mocks
|
||||
mockAxios.post.mockClear();
|
||||
mockAxios.get.mockClear();
|
||||
|
||||
// 1. First mock: File upload response
|
||||
mockAxios.post.mockImplementationOnce(() =>
|
||||
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
|
||||
);
|
||||
|
||||
// 2. Second mock: Signed URL response
|
||||
mockAxios.get.mockImplementationOnce(() =>
|
||||
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
|
||||
);
|
||||
|
||||
// 3. Third mock: OCR response
|
||||
mockAxios.post.mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
data: {
|
||||
pages: [{ markdown: 'Content from default configuration' }],
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
app: {
|
||||
locals: {
|
||||
ocr: {
|
||||
// Empty string values - should fall back to defaults
|
||||
apiKey: '',
|
||||
baseURL: '',
|
||||
mistralModel: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const file = {
|
||||
path: '/tmp/upload/file.pdf',
|
||||
originalname: 'empty-config.pdf',
|
||||
};
|
||||
|
||||
const result = await uploadMistralOCR({
|
||||
req,
|
||||
file,
|
||||
file_id: 'file123',
|
||||
entity_id: 'entity123',
|
||||
});
|
||||
|
||||
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
|
||||
|
||||
// Verify loadAuthValues was called with the default variable names
|
||||
expect(loadAuthValues).toHaveBeenCalledWith({
|
||||
userId: 'user123',
|
||||
authFields: ['OCR_BASEURL', 'OCR_API_KEY'],
|
||||
optional: expect.any(Set),
|
||||
});
|
||||
|
||||
// Verify the API calls used the default values from loadAuthValues
|
||||
expect(mockAxios.post).toHaveBeenCalledWith(
|
||||
'https://default-from-env.mistral.ai/v1/files',
|
||||
expect.any(Object),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer default-from-env-key',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify the OCR model defaulted to mistral-ocr-latest
|
||||
expect(mockAxios.post).toHaveBeenCalledWith(
|
||||
'https://default-from-env.mistral.ai/v1/ocr',
|
||||
expect.objectContaining({
|
||||
model: 'mistral-ocr-latest',
|
||||
}),
|
||||
expect.any(Object),
|
||||
);
|
||||
|
||||
// Check result
|
||||
expect(result.text).toEqual('Content from default configuration\n\n');
|
||||
});
|
||||
});
|
||||
});
|
||||
5
api/server/services/Files/MistralOCR/index.js
Normal file
5
api/server/services/Files/MistralOCR/index.js
Normal file
@@ -0,0 +1,5 @@
|
||||
const crud = require('./crud');
|
||||
|
||||
module.exports = {
|
||||
...crud,
|
||||
};
|
||||
162
api/server/services/Files/S3/crud.js
Normal file
162
api/server/services/Files/S3/crud.js
Normal file
@@ -0,0 +1,162 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const axios = require('axios');
|
||||
const fetch = require('node-fetch');
|
||||
const { getBufferMetadata } = require('~/server/utils');
|
||||
const { initializeS3 } = require('./initialize');
|
||||
const { logger } = require('~/config');
|
||||
const { PutObjectCommand, GetObjectCommand, DeleteObjectCommand } = require('@aws-sdk/client-s3');
|
||||
const { getSignedUrl } = require('@aws-sdk/s3-request-presigner');
|
||||
|
||||
const bucketName = process.env.AWS_BUCKET_NAME;
|
||||
const s3 = initializeS3();
|
||||
const defaultBasePath = 'images';
|
||||
|
||||
/**
|
||||
* Constructs the S3 key based on the base path, user ID, and file name.
|
||||
*/
|
||||
const getS3Key = (basePath, userId, fileName) => `${basePath}/${userId}/${fileName}`;
|
||||
|
||||
/**
|
||||
* Uploads a buffer to S3 and returns a signed URL.
|
||||
*
|
||||
* @param {Object} params
|
||||
* @param {string} params.userId - The user's unique identifier.
|
||||
* @param {Buffer} params.buffer - The buffer containing file data.
|
||||
* @param {string} params.fileName - The file name to use in S3.
|
||||
* @param {string} [params.basePath='images'] - The base path in the bucket.
|
||||
* @returns {Promise<string>} Signed URL of the uploaded file.
|
||||
*/
|
||||
async function saveBufferToS3({ userId, buffer, fileName, basePath = defaultBasePath }) {
|
||||
const key = getS3Key(basePath, userId, fileName);
|
||||
const params = { Bucket: bucketName, Key: key, Body: buffer };
|
||||
|
||||
try {
|
||||
await s3.send(new PutObjectCommand(params));
|
||||
return await getS3URL({ userId, fileName, basePath });
|
||||
} catch (error) {
|
||||
logger.error('[saveBufferToS3] Error uploading buffer to S3:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a signed URL for a file stored in S3.
|
||||
*
|
||||
* @param {Object} params
|
||||
* @param {string} params.userId - The user's unique identifier.
|
||||
* @param {string} params.fileName - The file name in S3.
|
||||
* @param {string} [params.basePath='images'] - The base path in the bucket.
|
||||
* @returns {Promise<string>} A signed URL valid for 24 hours.
|
||||
*/
|
||||
async function getS3URL({ userId, fileName, basePath = defaultBasePath }) {
|
||||
const key = getS3Key(basePath, userId, fileName);
|
||||
const params = { Bucket: bucketName, Key: key };
|
||||
|
||||
try {
|
||||
return await getSignedUrl(s3, new GetObjectCommand(params), { expiresIn: 86400 });
|
||||
} catch (error) {
|
||||
logger.error('[getS3URL] Error getting signed URL from S3:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves a file from a given URL to S3.
|
||||
*
|
||||
* @param {Object} params
|
||||
* @param {string} params.userId - The user's unique identifier.
|
||||
* @param {string} params.URL - The source URL of the file.
|
||||
* @param {string} params.fileName - The file name to use in S3.
|
||||
* @param {string} [params.basePath='images'] - The base path in the bucket.
|
||||
* @returns {Promise<string>} Signed URL of the uploaded file.
|
||||
*/
|
||||
async function saveURLToS3({ userId, URL, fileName, basePath = defaultBasePath }) {
|
||||
try {
|
||||
const response = await fetch(URL);
|
||||
const buffer = await response.buffer();
|
||||
// Optionally you can call getBufferMetadata(buffer) if needed.
|
||||
return await saveBufferToS3({ userId, buffer, fileName, basePath });
|
||||
} catch (error) {
|
||||
logger.error('[saveURLToS3] Error uploading file from URL to S3:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a file from S3.
|
||||
*
|
||||
* @param {Object} params
|
||||
* @param {string} params.userId - The user's unique identifier.
|
||||
* @param {string} params.fileName - The file name in S3.
|
||||
* @param {string} [params.basePath='images'] - The base path in the bucket.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function deleteFileFromS3({ userId, fileName, basePath = defaultBasePath }) {
|
||||
const key = getS3Key(basePath, userId, fileName);
|
||||
const params = { Bucket: bucketName, Key: key };
|
||||
|
||||
try {
|
||||
await s3.send(new DeleteObjectCommand(params));
|
||||
logger.debug('[deleteFileFromS3] File deleted successfully from S3');
|
||||
} catch (error) {
|
||||
logger.error('[deleteFileFromS3] Error deleting file from S3:', error.message);
|
||||
// If the file is not found, we can safely return.
|
||||
if (error.code === 'NoSuchKey') {
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a local file to S3.
|
||||
*
|
||||
* @param {Object} params
|
||||
* @param {import('express').Request} params.req - The Express request (must include user).
|
||||
* @param {Express.Multer.File} params.file - The file object from Multer.
|
||||
* @param {string} params.file_id - Unique file identifier.
|
||||
* @param {string} [params.basePath='images'] - The base path in the bucket.
|
||||
* @returns {Promise<{ filepath: string, bytes: number }>}
|
||||
*/
|
||||
async function uploadFileToS3({ req, file, file_id, basePath = defaultBasePath }) {
|
||||
try {
|
||||
const inputFilePath = file.path;
|
||||
const inputBuffer = await fs.promises.readFile(inputFilePath);
|
||||
const bytes = Buffer.byteLength(inputBuffer);
|
||||
const userId = req.user.id;
|
||||
const fileName = `${file_id}__${path.basename(inputFilePath)}`;
|
||||
const fileURL = await saveBufferToS3({ userId, buffer: inputBuffer, fileName, basePath });
|
||||
await fs.promises.unlink(inputFilePath);
|
||||
return { filepath: fileURL, bytes };
|
||||
} catch (error) {
|
||||
logger.error('[uploadFileToS3] Error uploading file to S3:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a readable stream for a file stored in S3.
|
||||
*
|
||||
* @param {string} filePath - The S3 key of the file.
|
||||
* @returns {Promise<NodeJS.ReadableStream>}
|
||||
*/
|
||||
async function getS3FileStream(filePath) {
|
||||
const params = { Bucket: bucketName, Key: filePath };
|
||||
try {
|
||||
const data = await s3.send(new GetObjectCommand(params));
|
||||
return data.Body; // Returns a Node.js ReadableStream.
|
||||
} catch (error) {
|
||||
logger.error('[getS3FileStream] Error retrieving S3 file stream:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
saveBufferToS3,
|
||||
saveURLToS3,
|
||||
getS3URL,
|
||||
deleteFileFromS3,
|
||||
uploadFileToS3,
|
||||
getS3FileStream,
|
||||
};
|
||||
118
api/server/services/Files/S3/images.js
Normal file
118
api/server/services/Files/S3/images.js
Normal file
@@ -0,0 +1,118 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const sharp = require('sharp');
|
||||
const { resizeImageBuffer } = require('../images/resize');
|
||||
const { updateUser } = require('~/models/userMethods');
|
||||
const { saveBufferToS3 } = require('./crud');
|
||||
const { updateFile } = require('~/models/File');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const defaultBasePath = 'images';
|
||||
|
||||
/**
|
||||
* Resizes, converts, and uploads an image file to S3.
|
||||
*
|
||||
* @param {Object} params
|
||||
* @param {import('express').Request} params.req - Express request (expects user and app.locals.imageOutputType).
|
||||
* @param {Express.Multer.File} params.file - File object from Multer.
|
||||
* @param {string} params.file_id - Unique file identifier.
|
||||
* @param {any} params.endpoint - Endpoint identifier used in image processing.
|
||||
* @param {string} [params.resolution='high'] - Desired image resolution.
|
||||
* @param {string} [params.basePath='images'] - Base path in the bucket.
|
||||
* @returns {Promise<{ filepath: string, bytes: number, width: number, height: number }>}
|
||||
*/
|
||||
async function uploadImageToS3({
|
||||
req,
|
||||
file,
|
||||
file_id,
|
||||
endpoint,
|
||||
resolution = 'high',
|
||||
basePath = defaultBasePath,
|
||||
}) {
|
||||
try {
|
||||
const inputFilePath = file.path;
|
||||
const inputBuffer = await fs.promises.readFile(inputFilePath);
|
||||
const {
|
||||
buffer: resizedBuffer,
|
||||
width,
|
||||
height,
|
||||
} = await resizeImageBuffer(inputBuffer, resolution, endpoint);
|
||||
const extension = path.extname(inputFilePath);
|
||||
const userId = req.user.id;
|
||||
|
||||
let processedBuffer;
|
||||
let fileName = `${file_id}__${path.basename(inputFilePath)}`;
|
||||
const targetExtension = `.${req.app.locals.imageOutputType}`;
|
||||
|
||||
if (extension.toLowerCase() === targetExtension) {
|
||||
processedBuffer = resizedBuffer;
|
||||
} else {
|
||||
processedBuffer = await sharp(resizedBuffer)
|
||||
.toFormat(req.app.locals.imageOutputType)
|
||||
.toBuffer();
|
||||
fileName = fileName.replace(new RegExp(path.extname(fileName) + '$'), targetExtension);
|
||||
if (!path.extname(fileName)) {
|
||||
fileName += targetExtension;
|
||||
}
|
||||
}
|
||||
|
||||
const downloadURL = await saveBufferToS3({
|
||||
userId,
|
||||
buffer: processedBuffer,
|
||||
fileName,
|
||||
basePath,
|
||||
});
|
||||
await fs.promises.unlink(inputFilePath);
|
||||
const bytes = Buffer.byteLength(processedBuffer);
|
||||
return { filepath: downloadURL, bytes, width, height };
|
||||
} catch (error) {
|
||||
logger.error('[uploadImageToS3] Error uploading image to S3:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a file record and returns its signed URL.
|
||||
*
|
||||
* @param {import('express').Request} req - Express request.
|
||||
* @param {Object} file - File metadata.
|
||||
* @returns {Promise<[Promise<any>, string]>}
|
||||
*/
|
||||
async function prepareImageURLS3(req, file) {
|
||||
try {
|
||||
const updatePromise = updateFile({ file_id: file.file_id });
|
||||
return Promise.all([updatePromise, file.filepath]);
|
||||
} catch (error) {
|
||||
logger.error('[prepareImageURLS3] Error preparing image URL:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a user's avatar image by uploading it to S3 and updating the user's avatar URL if required.
|
||||
*
|
||||
* @param {Object} params
|
||||
* @param {Buffer} params.buffer - Avatar image buffer.
|
||||
* @param {string} params.userId - User's unique identifier.
|
||||
* @param {string} params.manual - 'true' or 'false' flag for manual update.
|
||||
* @param {string} [params.basePath='images'] - Base path in the bucket.
|
||||
* @returns {Promise<string>} Signed URL of the uploaded avatar.
|
||||
*/
|
||||
async function processS3Avatar({ buffer, userId, manual, basePath = defaultBasePath }) {
|
||||
try {
|
||||
const downloadURL = await saveBufferToS3({ userId, buffer, fileName: 'avatar.png', basePath });
|
||||
if (manual === 'true') {
|
||||
await updateUser(userId, { avatar: downloadURL });
|
||||
}
|
||||
return downloadURL;
|
||||
} catch (error) {
|
||||
logger.error('[processS3Avatar] Error processing S3 avatar:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
uploadImageToS3,
|
||||
prepareImageURLS3,
|
||||
processS3Avatar,
|
||||
};
|
||||
9
api/server/services/Files/S3/index.js
Normal file
9
api/server/services/Files/S3/index.js
Normal file
@@ -0,0 +1,9 @@
|
||||
const crud = require('./crud');
|
||||
const images = require('./images');
|
||||
const initialize = require('./initialize');
|
||||
|
||||
module.exports = {
|
||||
...crud,
|
||||
...images,
|
||||
...initialize,
|
||||
};
|
||||
43
api/server/services/Files/S3/initialize.js
Normal file
43
api/server/services/Files/S3/initialize.js
Normal file
@@ -0,0 +1,43 @@
|
||||
const { S3Client } = require('@aws-sdk/client-s3');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
let s3 = null;
|
||||
|
||||
/**
|
||||
* Initializes and returns an instance of the AWS S3 client.
|
||||
*
|
||||
* If AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are provided, they will be used.
|
||||
* Otherwise, the AWS SDK's default credentials chain (including IRSA) is used.
|
||||
*
|
||||
* @returns {S3Client|null} An instance of S3Client if the region is provided; otherwise, null.
|
||||
*/
|
||||
const initializeS3 = () => {
|
||||
if (s3) {
|
||||
return s3;
|
||||
}
|
||||
|
||||
const region = process.env.AWS_REGION;
|
||||
if (!region) {
|
||||
logger.error('[initializeS3] AWS_REGION is not set. Cannot initialize S3.');
|
||||
return null;
|
||||
}
|
||||
|
||||
const accessKeyId = process.env.AWS_ACCESS_KEY_ID;
|
||||
const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
|
||||
|
||||
if (accessKeyId && secretAccessKey) {
|
||||
s3 = new S3Client({
|
||||
region,
|
||||
credentials: { accessKeyId, secretAccessKey },
|
||||
});
|
||||
logger.info('[initializeS3] S3 initialized with provided credentials.');
|
||||
} else {
|
||||
// When using IRSA, credentials are automatically provided via the IAM Role attached to the ServiceAccount.
|
||||
s3 = new S3Client({ region });
|
||||
logger.info('[initializeS3] S3 initialized using default credentials (IRSA).');
|
||||
}
|
||||
|
||||
return s3;
|
||||
};
|
||||
|
||||
module.exports = { initializeS3 };
|
||||
@@ -49,6 +49,7 @@ async function encodeAndFormat(req, files, endpoint, mode) {
|
||||
const promises = [];
|
||||
const encodingMethods = {};
|
||||
const result = {
|
||||
text: '',
|
||||
files: [],
|
||||
image_urls: [],
|
||||
};
|
||||
@@ -59,6 +60,9 @@ async function encodeAndFormat(req, files, endpoint, mode) {
|
||||
|
||||
for (let file of files) {
|
||||
const source = file.source ?? FileSources.local;
|
||||
if (source === FileSources.text && file.text) {
|
||||
result.text += `${!result.text ? 'Attached document(s):\n```md' : '\n\n---\n\n'}# "${file.filename}"\n${file.text}\n`;
|
||||
}
|
||||
|
||||
if (!file.height) {
|
||||
promises.push([file, null]);
|
||||
@@ -85,6 +89,10 @@ async function encodeAndFormat(req, files, endpoint, mode) {
|
||||
promises.push(preparePayload(req, file));
|
||||
}
|
||||
|
||||
if (result.text) {
|
||||
result.text += '\n```';
|
||||
}
|
||||
|
||||
const detail = req.body.imageDetail ?? ImageDetail.auto;
|
||||
|
||||
/** @type {Array<[MongoFile, string]>} */
|
||||
|
||||
@@ -28,8 +28,8 @@ const { addResourceFileId, deleteResourceFileId } = require('~/server/controller
|
||||
const { addAgentResourceFile, removeAgentResourceFiles } = require('~/models/Agent');
|
||||
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
|
||||
const { createFile, updateFileUsage, deleteFiles } = require('~/models/File');
|
||||
const { getEndpointsConfig } = require('~/server/services/Config');
|
||||
const { loadAuthValues } = require('~/app/clients/tools/util');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { checkCapability } = require('~/server/services/Config');
|
||||
const { LB_QueueAsyncCall } = require('~/server/utils/queue');
|
||||
const { getStrategyFunctions } = require('./strategies');
|
||||
const { determineFileType } = require('~/server/utils');
|
||||
@@ -162,7 +162,6 @@ const processDeleteRequest = async ({ req, files }) => {
|
||||
|
||||
for (const file of files) {
|
||||
const source = file.source ?? FileSources.local;
|
||||
|
||||
if (req.body.agent_id && req.body.tool_resource) {
|
||||
agentFiles.push({
|
||||
tool_resource: req.body.tool_resource,
|
||||
@@ -170,6 +169,11 @@ const processDeleteRequest = async ({ req, files }) => {
|
||||
});
|
||||
}
|
||||
|
||||
if (source === FileSources.text) {
|
||||
resolvedFileIds.push(file.file_id);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (checkOpenAIStorage(source) && !client[source]) {
|
||||
await initializeClients();
|
||||
}
|
||||
@@ -453,17 +457,6 @@ const processFileUpload = async ({ req, res, metadata }) => {
|
||||
res.status(200).json({ message: 'File uploaded and processed successfully', ...result });
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req
|
||||
* @param {AgentCapabilities} capability
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
const checkCapability = async (req, capability) => {
|
||||
const endpointsConfig = await getEndpointsConfig(req);
|
||||
const capabilities = endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? [];
|
||||
return capabilities.includes(capability);
|
||||
};
|
||||
|
||||
/**
|
||||
* Applies the current strategy for file uploads.
|
||||
* Saves file metadata to the database with an expiry TTL.
|
||||
@@ -521,6 +514,52 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
|
||||
if (!isFileSearchEnabled) {
|
||||
throw new Error('File search is not enabled for Agents');
|
||||
}
|
||||
} else if (tool_resource === EToolResources.ocr) {
|
||||
const isOCREnabled = await checkCapability(req, AgentCapabilities.ocr);
|
||||
if (!isOCREnabled) {
|
||||
throw new Error('OCR capability is not enabled for Agents');
|
||||
}
|
||||
|
||||
const { handleFileUpload } = getStrategyFunctions(
|
||||
req.app.locals?.ocr?.strategy ?? FileSources.mistral_ocr,
|
||||
);
|
||||
const { file_id, temp_file_id } = metadata;
|
||||
|
||||
const {
|
||||
text,
|
||||
bytes,
|
||||
// TODO: OCR images support?
|
||||
images,
|
||||
filename,
|
||||
filepath: ocrFileURL,
|
||||
} = await handleFileUpload({ req, file, file_id, entity_id: agent_id });
|
||||
|
||||
const fileInfo = removeNullishValues({
|
||||
text,
|
||||
bytes,
|
||||
file_id,
|
||||
temp_file_id,
|
||||
user: req.user.id,
|
||||
type: file.mimetype,
|
||||
filepath: ocrFileURL,
|
||||
source: FileSources.text,
|
||||
filename: filename ?? file.originalname,
|
||||
model: messageAttachment ? undefined : req.body.model,
|
||||
context: messageAttachment ? FileContext.message_attachment : FileContext.agents,
|
||||
});
|
||||
|
||||
if (!messageAttachment && tool_resource) {
|
||||
await addAgentResourceFile({
|
||||
req,
|
||||
file_id,
|
||||
agent_id,
|
||||
tool_resource,
|
||||
});
|
||||
}
|
||||
const result = await createFile(fileInfo, true);
|
||||
return res
|
||||
.status(200)
|
||||
.json({ message: 'Agent file uploaded and processed successfully', ...result });
|
||||
}
|
||||
|
||||
const source =
|
||||
|
||||
@@ -21,9 +21,21 @@ const {
|
||||
processLocalAvatar,
|
||||
getLocalFileStream,
|
||||
} = require('./Local');
|
||||
const {
|
||||
getS3URL,
|
||||
saveURLToS3,
|
||||
saveBufferToS3,
|
||||
getS3FileStream,
|
||||
uploadImageToS3,
|
||||
prepareImageURLS3,
|
||||
deleteFileFromS3,
|
||||
processS3Avatar,
|
||||
uploadFileToS3,
|
||||
} = require('./S3');
|
||||
const { uploadOpenAIFile, deleteOpenAIFile, getOpenAIFileStream } = require('./OpenAI');
|
||||
const { getCodeOutputDownloadStream, uploadCodeEnvFile } = require('./Code');
|
||||
const { uploadVectors, deleteVectors } = require('./VectorDB');
|
||||
const { uploadMistralOCR } = require('./MistralOCR');
|
||||
|
||||
/**
|
||||
* Firebase Storage Strategy Functions
|
||||
@@ -57,6 +69,22 @@ const localStrategy = () => ({
|
||||
getDownloadStream: getLocalFileStream,
|
||||
});
|
||||
|
||||
/**
|
||||
* S3 Storage Strategy Functions
|
||||
*
|
||||
* */
|
||||
const s3Strategy = () => ({
|
||||
handleFileUpload: uploadFileToS3,
|
||||
saveURL: saveURLToS3,
|
||||
getFileURL: getS3URL,
|
||||
deleteFile: deleteFileFromS3,
|
||||
saveBuffer: saveBufferToS3,
|
||||
prepareImagePayload: prepareImageURLS3,
|
||||
processAvatar: processS3Avatar,
|
||||
handleImageUpload: uploadImageToS3,
|
||||
getDownloadStream: getS3FileStream,
|
||||
});
|
||||
|
||||
/**
|
||||
* VectorDB Storage Strategy Functions
|
||||
*
|
||||
@@ -127,6 +155,26 @@ const codeOutputStrategy = () => ({
|
||||
getDownloadStream: getCodeOutputDownloadStream,
|
||||
});
|
||||
|
||||
const mistralOCRStrategy = () => ({
|
||||
/** @type {typeof saveFileFromURL | null} */
|
||||
saveURL: null,
|
||||
/** @type {typeof getLocalFileURL | null} */
|
||||
getFileURL: null,
|
||||
/** @type {typeof saveLocalBuffer | null} */
|
||||
saveBuffer: null,
|
||||
/** @type {typeof processLocalAvatar | null} */
|
||||
processAvatar: null,
|
||||
/** @type {typeof uploadLocalImage | null} */
|
||||
handleImageUpload: null,
|
||||
/** @type {typeof prepareImagesLocal | null} */
|
||||
prepareImagePayload: null,
|
||||
/** @type {typeof deleteLocalFile | null} */
|
||||
deleteFile: null,
|
||||
/** @type {typeof getLocalFileStream | null} */
|
||||
getDownloadStream: null,
|
||||
handleFileUpload: uploadMistralOCR,
|
||||
});
|
||||
|
||||
// Strategy Selector
|
||||
const getStrategyFunctions = (fileSource) => {
|
||||
if (fileSource === FileSources.firebase) {
|
||||
@@ -139,8 +187,12 @@ const getStrategyFunctions = (fileSource) => {
|
||||
return openAIStrategy();
|
||||
} else if (fileSource === FileSources.vectordb) {
|
||||
return vectorStrategy();
|
||||
} else if (fileSource === FileSources.s3) {
|
||||
return s3Strategy();
|
||||
} else if (fileSource === FileSources.execute_code) {
|
||||
return codeOutputStrategy();
|
||||
} else if (fileSource === FileSources.mistral_ocr) {
|
||||
return mistralOCRStrategy();
|
||||
} else {
|
||||
throw new Error('Invalid file source');
|
||||
}
|
||||
|
||||
@@ -362,7 +362,12 @@ async function processRequiredActions(client, requiredActions) {
|
||||
continue;
|
||||
}
|
||||
|
||||
tool = await createActionTool({ action: actionSet, requestBuilder });
|
||||
tool = await createActionTool({
|
||||
req: client.req,
|
||||
res: client.res,
|
||||
action: actionSet,
|
||||
requestBuilder,
|
||||
});
|
||||
if (!tool) {
|
||||
logger.warn(
|
||||
`Invalid action: user: ${client.req.user.id} | thread_id: ${requiredActions[0].thread_id} | run_id: ${requiredActions[0].run_id} | toolName: ${currentAction.tool}`,
|
||||
|
||||
56
api/server/services/Tools/credentials.js
Normal file
56
api/server/services/Tools/credentials.js
Normal file
@@ -0,0 +1,56 @@
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Object} params
|
||||
* @param {string} params.userId
|
||||
* @param {string[]} params.authFields
|
||||
* @param {Set<string>} [params.optional]
|
||||
* @param {boolean} [params.throwError]
|
||||
* @returns
|
||||
*/
|
||||
const loadAuthValues = async ({ userId, authFields, optional, throwError = true }) => {
|
||||
let authValues = {};
|
||||
|
||||
/**
|
||||
* Finds the first non-empty value for the given authentication field, supporting alternate fields.
|
||||
* @param {string[]} fields Array of strings representing the authentication fields. Supports alternate fields delimited by "||".
|
||||
* @returns {Promise<{ authField: string, authValue: string} | null>} An object containing the authentication field and value, or null if not found.
|
||||
*/
|
||||
const findAuthValue = async (fields) => {
|
||||
for (const field of fields) {
|
||||
let value = process.env[field];
|
||||
if (value) {
|
||||
return { authField: field, authValue: value };
|
||||
}
|
||||
try {
|
||||
value = await getUserPluginAuthValue(userId, field, throwError);
|
||||
} catch (err) {
|
||||
if (optional && optional.has(field)) {
|
||||
return { authField: field, authValue: undefined };
|
||||
}
|
||||
if (field === fields[fields.length - 1] && !value) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
if (value) {
|
||||
return { authField: field, authValue: value };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
for (let authField of authFields) {
|
||||
const fields = authField.split('||');
|
||||
const result = await findAuthValue(fields);
|
||||
if (result) {
|
||||
authValues[result.authField] = result.authValue;
|
||||
}
|
||||
}
|
||||
|
||||
return authValues;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
loadAuthValues,
|
||||
};
|
||||
@@ -203,6 +203,8 @@ function generateConfig(key, baseURL, endpoint) {
|
||||
AgentCapabilities.artifacts,
|
||||
AgentCapabilities.actions,
|
||||
AgentCapabilities.tools,
|
||||
AgentCapabilities.ocr,
|
||||
AgentCapabilities.chain,
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { Issuer, Strategy: OpenIDStrategy, custom } = require('openid-client');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { findUser, createUser, updateUser } = require('~/models/userMethods');
|
||||
const { findGroup } = require('~/models/groupMethods');
|
||||
const { hashToken } = require('~/server/utils/crypto');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
@@ -105,6 +106,71 @@ function convertToUsername(input, defaultValue = '') {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts roles from the specified token using configuration from environment variables.
|
||||
* @param {object} tokenset - The token set returned by the OpenID provider.
|
||||
* @returns {Array} The roles extracted from the token.
|
||||
*/
|
||||
function extractRoles(tokenset) {
|
||||
const requiredRoleTokenKind = process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND;
|
||||
const requiredRoleParameterPath = process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH;
|
||||
const token =
|
||||
requiredRoleTokenKind === 'access'
|
||||
? jwtDecode(tokenset.access_token)
|
||||
: jwtDecode(tokenset.id_token);
|
||||
const pathParts = requiredRoleParameterPath.split('.');
|
||||
let found = true;
|
||||
const roles = pathParts.reduce((acc, key) => {
|
||||
if (!acc || !(key in acc)) {
|
||||
found = false;
|
||||
return [];
|
||||
}
|
||||
return acc[key];
|
||||
}, token);
|
||||
if (!found) {
|
||||
logger.error(
|
||||
`[openidStrategy] Key '${requiredRoleParameterPath}' not found in ${requiredRoleTokenKind} token!`,
|
||||
);
|
||||
}
|
||||
return roles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the user's groups based on the provided roles.
|
||||
* It removes any existing OpenID group references and then adds the groups
|
||||
* that match the roles from the external group collection.
|
||||
*
|
||||
* @param {object} user - The user object.
|
||||
* @param {Array} roles - The roles extracted from the token.
|
||||
* @returns {Promise<Array>} The updated groups array.
|
||||
*/
|
||||
async function updateUserGroups(user, roles) {
|
||||
user.groups = user.groups || [];
|
||||
// Remove existing OpenID group references.
|
||||
const currentOpenIdGroups = await findGroup({
|
||||
_id: { $in: user.groups },
|
||||
provider: 'openid',
|
||||
});
|
||||
const currentOpenIdGroupIds = new Set(
|
||||
currentOpenIdGroups.map((g) => g._id.toString()),
|
||||
);
|
||||
user.groups = user.groups.filter(
|
||||
(id) => !currentOpenIdGroupIds.has(id.toString()),
|
||||
);
|
||||
|
||||
// Look up groups matching the roles.
|
||||
const matchingGroups = await findGroup({
|
||||
provider: 'openid',
|
||||
externalId: { $in: roles },
|
||||
});
|
||||
matchingGroups.forEach((group) => {
|
||||
if (!user.groups.some((id) => id.toString() === group._id.toString())) {
|
||||
user.groups.push(group._id);
|
||||
}
|
||||
});
|
||||
return user.groups;
|
||||
}
|
||||
|
||||
async function setupOpenId() {
|
||||
try {
|
||||
if (process.env.PROXY) {
|
||||
@@ -134,8 +200,6 @@ async function setupOpenId() {
|
||||
}
|
||||
const client = new issuer.Client(clientMetadata);
|
||||
const requiredRole = process.env.OPENID_REQUIRED_ROLE;
|
||||
const requiredRoleParameterPath = process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH;
|
||||
const requiredRoleTokenKind = process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND;
|
||||
const openidLogin = new OpenIDStrategy(
|
||||
{
|
||||
client,
|
||||
@@ -145,8 +209,13 @@ async function setupOpenId() {
|
||||
},
|
||||
async (tokenset, userinfo, done) => {
|
||||
try {
|
||||
logger.info(`[openidStrategy] verify login openidId: ${userinfo.sub}`);
|
||||
logger.debug('[openidStrategy] very login tokenset and userinfo', { tokenset, userinfo });
|
||||
logger.info(
|
||||
`[openidStrategy] verify login openidId: ${userinfo.sub}`,
|
||||
);
|
||||
logger.debug('[openidStrategy] verify login tokenset and userinfo', {
|
||||
tokenset,
|
||||
userinfo,
|
||||
});
|
||||
|
||||
let user = await findUser({ openidId: userinfo.sub });
|
||||
logger.info(
|
||||
@@ -164,29 +233,10 @@ async function setupOpenId() {
|
||||
|
||||
const fullName = getFullName(userinfo);
|
||||
|
||||
// Check for the required role using extracted roles.
|
||||
let roles = [];
|
||||
if (requiredRole) {
|
||||
let decodedToken = '';
|
||||
if (requiredRoleTokenKind === 'access') {
|
||||
decodedToken = jwtDecode(tokenset.access_token);
|
||||
} else if (requiredRoleTokenKind === 'id') {
|
||||
decodedToken = jwtDecode(tokenset.id_token);
|
||||
}
|
||||
const pathParts = requiredRoleParameterPath.split('.');
|
||||
let found = true;
|
||||
let roles = pathParts.reduce((o, key) => {
|
||||
if (o === null || o === undefined || !(key in o)) {
|
||||
found = false;
|
||||
return [];
|
||||
}
|
||||
return o[key];
|
||||
}, decodedToken);
|
||||
|
||||
if (!found) {
|
||||
logger.error(
|
||||
`[openidStrategy] Key '${requiredRoleParameterPath}' not found in ${requiredRoleTokenKind} token!`,
|
||||
);
|
||||
}
|
||||
|
||||
roles = extractRoles(tokenset);
|
||||
if (!roles.includes(requiredRole)) {
|
||||
return done(null, false, {
|
||||
message: `You must have the "${requiredRole}" role to log in.`,
|
||||
@@ -243,6 +293,10 @@ async function setupOpenId() {
|
||||
}
|
||||
}
|
||||
|
||||
if (requiredRole) {
|
||||
await updateUserGroups(user, roles);
|
||||
}
|
||||
|
||||
user = await updateUser(user._id, user);
|
||||
|
||||
logger.info(
|
||||
|
||||
@@ -19,6 +19,9 @@ jest.mock('~/models/userMethods', () => ({
|
||||
createUser: jest.fn(),
|
||||
updateUser: jest.fn(),
|
||||
}));
|
||||
jest.mock('~/models/groupMethods', () => ({
|
||||
findGroup: jest.fn().mockResolvedValue([]),
|
||||
}));
|
||||
jest.mock('~/server/utils/crypto', () => ({
|
||||
hashToken: jest.fn().mockResolvedValue('hashed-token'),
|
||||
}));
|
||||
|
||||
@@ -39,7 +39,10 @@ jest.mock('winston-daily-rotate-file', () => {
|
||||
});
|
||||
|
||||
jest.mock('~/config', () => {
|
||||
const actualModule = jest.requireActual('~/config');
|
||||
return {
|
||||
sendEvent: actualModule.sendEvent,
|
||||
createAxiosInstance: actualModule.createAxiosInstance,
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
|
||||
@@ -1787,3 +1787,51 @@
|
||||
* @typedef {Promise<{ message: TMessage, conversation: TConversation }> | undefined} ClientDatabaseSavePromise
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports OCRImage
|
||||
* @typedef {Object} OCRImage
|
||||
* @property {string} id - The identifier of the image.
|
||||
* @property {number} top_left_x - X-coordinate of the top left corner of the image.
|
||||
* @property {number} top_left_y - Y-coordinate of the top left corner of the image.
|
||||
* @property {number} bottom_right_x - X-coordinate of the bottom right corner of the image.
|
||||
* @property {number} bottom_right_y - Y-coordinate of the bottom right corner of the image.
|
||||
* @property {string} image_base64 - Base64-encoded image data.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports PageDimensions
|
||||
* @typedef {Object} PageDimensions
|
||||
* @property {number} dpi - The dots per inch resolution of the page.
|
||||
* @property {number} height - The height of the page in pixels.
|
||||
* @property {number} width - The width of the page in pixels.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports OCRPage
|
||||
* @typedef {Object} OCRPage
|
||||
* @property {number} index - The index of the page in the document.
|
||||
* @property {string} markdown - The extracted text content of the page in markdown format.
|
||||
* @property {OCRImage[]} images - Array of images found on the page.
|
||||
* @property {PageDimensions} dimensions - The dimensions of the page.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports OCRUsageInfo
|
||||
* @typedef {Object} OCRUsageInfo
|
||||
* @property {number} pages_processed - Number of pages processed in the document.
|
||||
* @property {number} doc_size_bytes - Size of the document in bytes.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports OCRResult
|
||||
* @typedef {Object} OCRResult
|
||||
* @property {OCRPage[]} pages - Array of pages extracted from the document.
|
||||
* @property {string} model - The model used for OCR processing.
|
||||
* @property {OCRUsageInfo} usage_info - Usage information for the OCR operation.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
@@ -92,6 +92,7 @@ const anthropicModels = {
|
||||
const deepseekModels = {
|
||||
'deepseek-reasoner': 63000, // -1000 from max (API)
|
||||
deepseek: 63000, // -1000 from max (API)
|
||||
'deepseek.r1': 127500,
|
||||
};
|
||||
|
||||
const metaModels = {
|
||||
|
||||
@@ -423,6 +423,9 @@ describe('Meta Models Tests', () => {
|
||||
expect(getModelMaxTokens('deepseek-reasoner')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['deepseek-reasoner'],
|
||||
);
|
||||
expect(getModelMaxTokens('deepseek.r1')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['deepseek.r1'],
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import type { OptionWithIcon, ExtendedFile } from './types';
|
||||
export type TAgentOption = OptionWithIcon &
|
||||
Agent & {
|
||||
knowledge_files?: Array<[string, ExtendedFile]>;
|
||||
context_files?: Array<[string, ExtendedFile]>;
|
||||
code_files?: Array<[string, ExtendedFile]>;
|
||||
};
|
||||
|
||||
@@ -27,4 +28,5 @@ export type AgentForm = {
|
||||
provider?: AgentProvider | OptionWithIcon;
|
||||
agent_ids?: string[];
|
||||
[AgentCapabilities.artifacts]?: ArtifactModes | string;
|
||||
recursion_limit?: number;
|
||||
} & TAgentCapabilities;
|
||||
|
||||
@@ -131,6 +131,7 @@ export interface DataColumnMeta {
|
||||
}
|
||||
|
||||
export enum Panel {
|
||||
advanced = 'advanced',
|
||||
builder = 'builder',
|
||||
actions = 'actions',
|
||||
model = 'model',
|
||||
@@ -181,6 +182,7 @@ export type AgentPanelProps = {
|
||||
activePanel?: string;
|
||||
action?: t.Action;
|
||||
actions?: t.Action[];
|
||||
createMutation: UseMutationResult<t.Agent, Error, t.AgentCreateParams>;
|
||||
setActivePanel: React.Dispatch<React.SetStateAction<Panel>>;
|
||||
setAction: React.Dispatch<React.SetStateAction<t.Action | undefined>>;
|
||||
endpointsConfig?: t.TEndpointsConfig;
|
||||
@@ -483,6 +485,7 @@ export interface ExtendedFile {
|
||||
attached?: boolean;
|
||||
embedded?: boolean;
|
||||
tool_resource?: string;
|
||||
metadata?: t.TFile['metadata'];
|
||||
}
|
||||
|
||||
export type ContextType = { navVisible: boolean; setNavVisible: (visible: boolean) => void };
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import React, { useRef, useState, useMemo } from 'react';
|
||||
import { FileSearch, ImageUpIcon, TerminalSquareIcon } from 'lucide-react';
|
||||
import { EToolResources, EModelEndpoint } from 'librechat-data-provider';
|
||||
import { FileSearch, ImageUpIcon, TerminalSquareIcon, FileType2Icon } from 'lucide-react';
|
||||
import { FileUpload, TooltipAnchor, DropdownPopup } from '~/components/ui';
|
||||
import { useGetEndpointsQuery } from '~/data-provider';
|
||||
import { AttachmentIcon } from '~/components/svg';
|
||||
@@ -49,6 +49,17 @@ const AttachFile = ({ isRTL, disabled, handleFileChange }: AttachFileProps) => {
|
||||
},
|
||||
];
|
||||
|
||||
if (capabilities.includes(EToolResources.ocr)) {
|
||||
items.push({
|
||||
label: localize('com_ui_upload_ocr_text'),
|
||||
onClick: () => {
|
||||
setToolResource(EToolResources.ocr);
|
||||
handleUploadClick();
|
||||
},
|
||||
icon: <FileType2Icon className="icon-md" />,
|
||||
});
|
||||
}
|
||||
|
||||
if (capabilities.includes(EToolResources.file_search)) {
|
||||
items.push({
|
||||
label: localize('com_ui_upload_file_search'),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import { EModelEndpoint, EToolResources } from 'librechat-data-provider';
|
||||
import { FileSearch, ImageUpIcon, TerminalSquareIcon } from 'lucide-react';
|
||||
import { FileSearch, ImageUpIcon, FileType2Icon, TerminalSquareIcon } from 'lucide-react';
|
||||
import OGDialogTemplate from '~/components/ui/OGDialogTemplate';
|
||||
import { useGetEndpointsQuery } from '~/data-provider';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
@@ -50,6 +50,12 @@ const DragDropModal = ({ onOptionSelect, setShowModal, files, isVisible }: DragD
|
||||
value: EToolResources.execute_code,
|
||||
icon: <TerminalSquareIcon className="icon-md" />,
|
||||
});
|
||||
} else if (capability === EToolResources.ocr) {
|
||||
_options.push({
|
||||
label: localize('com_ui_upload_ocr_text'),
|
||||
value: EToolResources.ocr,
|
||||
icon: <FileType2Icon className="icon-md" />,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ const FilePreview = ({
|
||||
};
|
||||
className?: string;
|
||||
}) => {
|
||||
const radius = 55; // Radius of the SVG circle
|
||||
const radius = 55;
|
||||
const circumference = 2 * Math.PI * radius;
|
||||
const progress = useProgress(
|
||||
file?.['progress'] ?? 1,
|
||||
@@ -27,16 +27,15 @@ const FilePreview = ({
|
||||
(file as ExtendedFile | undefined)?.size ?? 1,
|
||||
);
|
||||
|
||||
// Calculate the offset based on the loading progress
|
||||
const offset = circumference - progress * circumference;
|
||||
const circleCSSProperties = {
|
||||
transition: 'stroke-dashoffset 0.5s linear',
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={cn('size-10 shrink-0 overflow-hidden rounded-xl', className)}>
|
||||
<div className={cn('relative size-10 shrink-0 overflow-hidden rounded-xl', className)}>
|
||||
<FileIcon file={file} fileType={fileType} />
|
||||
<SourceIcon source={file?.source} />
|
||||
<SourceIcon source={file?.source} isCodeFile={!!file?.['metadata']?.fileIdentifier} />
|
||||
{progress < 1 && (
|
||||
<ProgressCircle
|
||||
circumference={circumference}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { Terminal, Type, Database } from 'lucide-react';
|
||||
import { EModelEndpoint, FileSources } from 'librechat-data-provider';
|
||||
import { MinimalIcon } from '~/components/Endpoints';
|
||||
import { cn } from '~/utils';
|
||||
@@ -6,9 +7,13 @@ const sourceToEndpoint = {
|
||||
[FileSources.openai]: EModelEndpoint.openAI,
|
||||
[FileSources.azure]: EModelEndpoint.azureOpenAI,
|
||||
};
|
||||
|
||||
const sourceToClassname = {
|
||||
[FileSources.openai]: 'bg-white/75 dark:bg-black/65',
|
||||
[FileSources.azure]: 'azure-bg-color opacity-85',
|
||||
[FileSources.execute_code]: 'bg-black text-white opacity-85',
|
||||
[FileSources.text]: 'bg-blue-500 dark:bg-blue-900 opacity-85 text-white',
|
||||
[FileSources.vectordb]: 'bg-yellow-700 dark:bg-yellow-900 opacity-85 text-white',
|
||||
};
|
||||
|
||||
const defaultClassName =
|
||||
@@ -16,13 +21,41 @@ const defaultClassName =
|
||||
|
||||
export default function SourceIcon({
|
||||
source,
|
||||
isCodeFile,
|
||||
className = defaultClassName,
|
||||
}: {
|
||||
source?: FileSources;
|
||||
isCodeFile?: boolean;
|
||||
className?: string;
|
||||
}) {
|
||||
if (source === FileSources.local || source === FileSources.firebase) {
|
||||
return null;
|
||||
if (isCodeFile === true) {
|
||||
return (
|
||||
<div className={cn(className, sourceToClassname[FileSources.execute_code] ?? '')}>
|
||||
<span className="flex items-center justify-center">
|
||||
<Terminal className="h-3 w-3" />
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (source === FileSources.text) {
|
||||
return (
|
||||
<div className={cn(className, sourceToClassname[source] ?? '')}>
|
||||
<span className="flex items-center justify-center">
|
||||
<Type className="h-3 w-3" />
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (source === FileSources.vectordb) {
|
||||
return (
|
||||
<div className={cn(className, sourceToClassname[source] ?? '')}>
|
||||
<span className="flex items-center justify-center">
|
||||
<Database className="h-3 w-3" />
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const endpoint = sourceToEndpoint[source ?? ''];
|
||||
@@ -31,7 +64,7 @@ export default function SourceIcon({
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<button type="button" className={cn(className, sourceToClassname[source ?? ''] ?? '')}>
|
||||
<div className={cn(className, sourceToClassname[source ?? ''] ?? '')}>
|
||||
<span className="flex items-center justify-center">
|
||||
<MinimalIcon
|
||||
endpoint={endpoint}
|
||||
@@ -40,6 +73,6 @@ export default function SourceIcon({
|
||||
iconClassName="h-3 w-3"
|
||||
/>
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -75,7 +75,20 @@ const MenuItem: FC<MenuItemProps> = ({
|
||||
{showIconInMenu && <SpecIcon currentSpec={spec} endpointsConfig={endpointsConfig} />}
|
||||
<div>
|
||||
{title}
|
||||
<div className="text-token-text-tertiary">{description}</div>
|
||||
<div className="text-text-secondary">{description}</div>
|
||||
{spec.badges && spec.badges.length > 0 && (
|
||||
<div className="mt-1 flex gap-2">
|
||||
{spec.badges.map((badge, index) => (
|
||||
<span
|
||||
key={index}
|
||||
className="inline-flex items-center px-2 py-0.5 rounded-full text-xs font-semibold shadow-sm"
|
||||
style={{ backgroundColor: badge.color, color: '#fff' }}
|
||||
>
|
||||
{badge.text}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -19,9 +19,22 @@ export default function ModelSpecsMenu({ modelSpecs }: { modelSpecs?: TModelSpec
|
||||
const localize = useLocalize();
|
||||
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
|
||||
const modularChat = useRecoilValue(store.modularChat);
|
||||
const user = useRecoilValue(store.user);
|
||||
const getDefaultConversation = useDefaultConvo();
|
||||
const assistantMap = useAssistantsMapContext();
|
||||
|
||||
const allowedModelSpecs = useMemo(() => {
|
||||
if (!modelSpecs) {return [];}
|
||||
return modelSpecs.filter(spec => {
|
||||
// If no groups defined for spec, allow it.
|
||||
if (!spec.groups || spec.groups.length === 0) {return true;}
|
||||
// Otherwise, check if the user exists and has groups.
|
||||
if (!user || !user.groups || user.groups.length === 0) {return false;}
|
||||
// Check if at least one of the spec's groups is in the user's groups.
|
||||
return spec.groups.some(groupId => user.groups.includes(groupId));
|
||||
});
|
||||
}, [modelSpecs, user]);
|
||||
|
||||
const onSelectSpec = (spec: TModelSpec) => {
|
||||
const { preset } = spec;
|
||||
preset.iconURL = getModelSpecIconURL(spec);
|
||||
@@ -82,21 +95,15 @@ export default function ModelSpecsMenu({ modelSpecs }: { modelSpecs?: TModelSpec
|
||||
};
|
||||
|
||||
const selected = useMemo(() => {
|
||||
const spec = modelSpecs?.find((spec) => spec.name === conversation?.spec);
|
||||
if (!spec) {
|
||||
return undefined;
|
||||
}
|
||||
return spec;
|
||||
}, [modelSpecs, conversation?.spec]);
|
||||
const spec = allowedModelSpecs.find((spec) => spec.name === conversation?.spec);
|
||||
return spec || undefined;
|
||||
}, [allowedModelSpecs, conversation?.spec]);
|
||||
|
||||
const menuRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const handleKeyDown = useCallback((event: KeyboardEvent) => {
|
||||
const menuItems = menuRef.current?.querySelectorAll('[role="option"]');
|
||||
if (!menuItems) {
|
||||
return;
|
||||
}
|
||||
if (!menuItems.length) {
|
||||
if (!menuItems || !menuItems.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -132,7 +139,7 @@ export default function ModelSpecsMenu({ modelSpecs }: { modelSpecs?: TModelSpec
|
||||
endpointsConfig={endpointsConfig}
|
||||
/>
|
||||
<Portal>
|
||||
{modelSpecs && modelSpecs.length && (
|
||||
{allowedModelSpecs && allowedModelSpecs.length > 0 && (
|
||||
<div
|
||||
style={{
|
||||
position: 'fixed',
|
||||
@@ -154,7 +161,7 @@ export default function ModelSpecsMenu({ modelSpecs }: { modelSpecs?: TModelSpec
|
||||
className="models-scrollbar mt-2 max-h-[65vh] min-w-[340px] max-w-xs overflow-y-auto rounded-lg border border-gray-100 bg-white shadow-lg dark:border-gray-700 dark:bg-gray-700 dark:text-white lg:max-h-[75vh]"
|
||||
>
|
||||
<ModelSpecs
|
||||
specs={modelSpecs}
|
||||
specs={allowedModelSpecs}
|
||||
selected={selected}
|
||||
setSelected={onSelectSpec}
|
||||
endpointsConfig={endpointsConfig}
|
||||
|
||||
@@ -139,6 +139,7 @@ const ContentParts = memo(
|
||||
isSubmitting={isSubmitting}
|
||||
key={`part-${messageId}-${idx}`}
|
||||
isCreatedByUser={isCreatedByUser}
|
||||
isLast={idx === content.length - 1}
|
||||
showCursor={idx === content.length - 1 && isLast}
|
||||
/>
|
||||
</MessageContext.Provider>
|
||||
|
||||
@@ -166,15 +166,12 @@ export const p: React.ElementType = memo(({ children }: TParagraphProps) => {
|
||||
return <p className="mb-2 whitespace-pre-wrap">{children}</p>;
|
||||
});
|
||||
|
||||
const cursor = ' ';
|
||||
|
||||
type TContentProps = {
|
||||
content: string;
|
||||
showCursor?: boolean;
|
||||
isLatestMessage: boolean;
|
||||
};
|
||||
|
||||
const Markdown = memo(({ content = '', showCursor, isLatestMessage }: TContentProps) => {
|
||||
const Markdown = memo(({ content = '', isLatestMessage }: TContentProps) => {
|
||||
const LaTeXParsing = useRecoilValue<boolean>(store.LaTeXParsing);
|
||||
const isInitializing = content === '';
|
||||
|
||||
@@ -240,7 +237,7 @@ const Markdown = memo(({ content = '', showCursor, isLatestMessage }: TContentPr
|
||||
}
|
||||
}
|
||||
>
|
||||
{isLatestMessage && (showCursor ?? false) ? currentContent + cursor : currentContent}
|
||||
{currentContent}
|
||||
</ReactMarkdown>
|
||||
</CodeBlockProvider>
|
||||
</ArtifactProvider>
|
||||
|
||||
@@ -83,9 +83,7 @@ const DisplayMessage = ({ text, isCreatedByUser, message, showCursor }: TDisplay
|
||||
|
||||
let content: React.ReactElement;
|
||||
if (!isCreatedByUser) {
|
||||
content = (
|
||||
<Markdown content={text} showCursor={showCursorState} isLatestMessage={isLatestMessage} />
|
||||
);
|
||||
content = <Markdown content={text} isLatestMessage={isLatestMessage} />;
|
||||
} else if (enableUserMsgMarkdown) {
|
||||
content = <MarkdownLite content={text} />;
|
||||
} else {
|
||||
|
||||
@@ -8,9 +8,11 @@ import {
|
||||
import { memo } from 'react';
|
||||
import type { TMessageContentParts, TAttachment } from 'librechat-data-provider';
|
||||
import { ErrorMessage } from './MessageContent';
|
||||
import AgentUpdate from './Parts/AgentUpdate';
|
||||
import ExecuteCode from './Parts/ExecuteCode';
|
||||
import RetrievalCall from './RetrievalCall';
|
||||
import Reasoning from './Parts/Reasoning';
|
||||
import EmptyText from './Parts/EmptyText';
|
||||
import CodeAnalyze from './CodeAnalyze';
|
||||
import Container from './Container';
|
||||
import ToolCall from './ToolCall';
|
||||
@@ -20,145 +22,159 @@ import Image from './Image';
|
||||
|
||||
type PartProps = {
|
||||
part?: TMessageContentParts;
|
||||
isLast?: boolean;
|
||||
isSubmitting: boolean;
|
||||
showCursor: boolean;
|
||||
isCreatedByUser: boolean;
|
||||
attachments?: TAttachment[];
|
||||
};
|
||||
|
||||
const Part = memo(({ part, isSubmitting, attachments, showCursor, isCreatedByUser }: PartProps) => {
|
||||
if (!part) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (part.type === ContentTypes.ERROR) {
|
||||
return (
|
||||
<ErrorMessage
|
||||
text={part[ContentTypes.ERROR] ?? part[ContentTypes.TEXT]?.value}
|
||||
className="my-2"
|
||||
/>
|
||||
);
|
||||
} else if (part.type === ContentTypes.TEXT) {
|
||||
const text = typeof part.text === 'string' ? part.text : part.text.value;
|
||||
|
||||
if (typeof text !== 'string') {
|
||||
return null;
|
||||
}
|
||||
if (part.tool_call_ids != null && !text) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Container>
|
||||
<Text text={text} isCreatedByUser={isCreatedByUser} showCursor={showCursor} />
|
||||
</Container>
|
||||
);
|
||||
} else if (part.type === ContentTypes.THINK) {
|
||||
const reasoning = typeof part.think === 'string' ? part.think : part.think.value;
|
||||
if (typeof reasoning !== 'string') {
|
||||
return null;
|
||||
}
|
||||
return <Reasoning reasoning={reasoning} />;
|
||||
} else if (part.type === ContentTypes.TOOL_CALL) {
|
||||
const toolCall = part[ContentTypes.TOOL_CALL];
|
||||
|
||||
if (!toolCall) {
|
||||
const Part = memo(
|
||||
({ part, isSubmitting, attachments, isLast, showCursor, isCreatedByUser }: PartProps) => {
|
||||
if (!part) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const isToolCall =
|
||||
'args' in toolCall && (!toolCall.type || toolCall.type === ToolCallTypes.TOOL_CALL);
|
||||
if (isToolCall && toolCall.name === Tools.execute_code) {
|
||||
if (part.type === ContentTypes.ERROR) {
|
||||
return (
|
||||
<ExecuteCode
|
||||
args={typeof toolCall.args === 'string' ? toolCall.args : ''}
|
||||
output={toolCall.output ?? ''}
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
isSubmitting={isSubmitting}
|
||||
attachments={attachments}
|
||||
<ErrorMessage
|
||||
text={part[ContentTypes.ERROR] ?? part[ContentTypes.TEXT]?.value}
|
||||
className="my-2"
|
||||
/>
|
||||
);
|
||||
} else if (isToolCall) {
|
||||
} else if (part.type === ContentTypes.AGENT_UPDATE) {
|
||||
return (
|
||||
<ToolCall
|
||||
args={toolCall.args ?? ''}
|
||||
name={toolCall.name || ''}
|
||||
output={toolCall.output ?? ''}
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
isSubmitting={isSubmitting}
|
||||
attachments={attachments}
|
||||
auth={toolCall.auth}
|
||||
expires_at={toolCall.expires_at}
|
||||
/>
|
||||
);
|
||||
} else if (toolCall.type === ToolCallTypes.CODE_INTERPRETER) {
|
||||
const code_interpreter = toolCall[ToolCallTypes.CODE_INTERPRETER];
|
||||
return (
|
||||
<CodeAnalyze
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
code={code_interpreter.input}
|
||||
outputs={code_interpreter.outputs ?? []}
|
||||
isSubmitting={isSubmitting}
|
||||
/>
|
||||
);
|
||||
} else if (
|
||||
toolCall.type === ToolCallTypes.RETRIEVAL ||
|
||||
toolCall.type === ToolCallTypes.FILE_SEARCH
|
||||
) {
|
||||
return (
|
||||
<RetrievalCall initialProgress={toolCall.progress ?? 0.1} isSubmitting={isSubmitting} />
|
||||
);
|
||||
} else if (
|
||||
toolCall.type === ToolCallTypes.FUNCTION &&
|
||||
ToolCallTypes.FUNCTION in toolCall &&
|
||||
imageGenTools.has(toolCall.function.name)
|
||||
) {
|
||||
return (
|
||||
<ImageGen
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
args={toolCall.function.arguments as string}
|
||||
/>
|
||||
);
|
||||
} else if (toolCall.type === ToolCallTypes.FUNCTION && ToolCallTypes.FUNCTION in toolCall) {
|
||||
if (isImageVisionTool(toolCall)) {
|
||||
if (isSubmitting && showCursor) {
|
||||
return (
|
||||
<>
|
||||
<AgentUpdate currentAgentId={part[ContentTypes.AGENT_UPDATE]?.agentId} />
|
||||
{isLast && showCursor && (
|
||||
<Container>
|
||||
<Text text={''} isCreatedByUser={isCreatedByUser} showCursor={showCursor} />
|
||||
<EmptyText />
|
||||
</Container>
|
||||
);
|
||||
}
|
||||
)}
|
||||
</>
|
||||
);
|
||||
} else if (part.type === ContentTypes.TEXT) {
|
||||
const text = typeof part.text === 'string' ? part.text : part.text.value;
|
||||
|
||||
if (typeof text !== 'string') {
|
||||
return null;
|
||||
}
|
||||
if (part.tool_call_ids != null && !text) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Container>
|
||||
<Text text={text} isCreatedByUser={isCreatedByUser} showCursor={showCursor} />
|
||||
</Container>
|
||||
);
|
||||
} else if (part.type === ContentTypes.THINK) {
|
||||
const reasoning = typeof part.think === 'string' ? part.think : part.think.value;
|
||||
if (typeof reasoning !== 'string') {
|
||||
return null;
|
||||
}
|
||||
return <Reasoning reasoning={reasoning} />;
|
||||
} else if (part.type === ContentTypes.TOOL_CALL) {
|
||||
const toolCall = part[ContentTypes.TOOL_CALL];
|
||||
|
||||
if (!toolCall) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const isToolCall =
|
||||
'args' in toolCall && (!toolCall.type || toolCall.type === ToolCallTypes.TOOL_CALL);
|
||||
if (isToolCall && toolCall.name === Tools.execute_code) {
|
||||
return (
|
||||
<ExecuteCode
|
||||
args={typeof toolCall.args === 'string' ? toolCall.args : ''}
|
||||
output={toolCall.output ?? ''}
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
isSubmitting={isSubmitting}
|
||||
attachments={attachments}
|
||||
/>
|
||||
);
|
||||
} else if (isToolCall) {
|
||||
return (
|
||||
<ToolCall
|
||||
args={toolCall.args ?? ''}
|
||||
name={toolCall.name || ''}
|
||||
output={toolCall.output ?? ''}
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
isSubmitting={isSubmitting}
|
||||
attachments={attachments}
|
||||
auth={toolCall.auth}
|
||||
expires_at={toolCall.expires_at}
|
||||
/>
|
||||
);
|
||||
} else if (toolCall.type === ToolCallTypes.CODE_INTERPRETER) {
|
||||
const code_interpreter = toolCall[ToolCallTypes.CODE_INTERPRETER];
|
||||
return (
|
||||
<CodeAnalyze
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
code={code_interpreter.input}
|
||||
outputs={code_interpreter.outputs ?? []}
|
||||
isSubmitting={isSubmitting}
|
||||
/>
|
||||
);
|
||||
} else if (
|
||||
toolCall.type === ToolCallTypes.RETRIEVAL ||
|
||||
toolCall.type === ToolCallTypes.FILE_SEARCH
|
||||
) {
|
||||
return (
|
||||
<RetrievalCall initialProgress={toolCall.progress ?? 0.1} isSubmitting={isSubmitting} />
|
||||
);
|
||||
} else if (
|
||||
toolCall.type === ToolCallTypes.FUNCTION &&
|
||||
ToolCallTypes.FUNCTION in toolCall &&
|
||||
imageGenTools.has(toolCall.function.name)
|
||||
) {
|
||||
return (
|
||||
<ImageGen
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
args={toolCall.function.arguments as string}
|
||||
/>
|
||||
);
|
||||
} else if (toolCall.type === ToolCallTypes.FUNCTION && ToolCallTypes.FUNCTION in toolCall) {
|
||||
if (isImageVisionTool(toolCall)) {
|
||||
if (isSubmitting && showCursor) {
|
||||
return (
|
||||
<Container>
|
||||
<Text text={''} isCreatedByUser={isCreatedByUser} showCursor={showCursor} />
|
||||
</Container>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<ToolCall
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
isSubmitting={isSubmitting}
|
||||
args={toolCall.function.arguments as string}
|
||||
name={toolCall.function.name}
|
||||
output={toolCall.function.output}
|
||||
/>
|
||||
);
|
||||
}
|
||||
} else if (part.type === ContentTypes.IMAGE_FILE) {
|
||||
const imageFile = part[ContentTypes.IMAGE_FILE];
|
||||
const height = imageFile.height ?? 1920;
|
||||
const width = imageFile.width ?? 1080;
|
||||
return (
|
||||
<ToolCall
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
isSubmitting={isSubmitting}
|
||||
args={toolCall.function.arguments as string}
|
||||
name={toolCall.function.name}
|
||||
output={toolCall.function.output}
|
||||
<Image
|
||||
imagePath={imageFile.filepath}
|
||||
height={height}
|
||||
width={width}
|
||||
altText={imageFile.filename ?? 'Uploaded Image'}
|
||||
placeholderDimensions={{
|
||||
height: height + 'px',
|
||||
width: width + 'px',
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
} else if (part.type === ContentTypes.IMAGE_FILE) {
|
||||
const imageFile = part[ContentTypes.IMAGE_FILE];
|
||||
const height = imageFile.height ?? 1920;
|
||||
const width = imageFile.width ?? 1080;
|
||||
return (
|
||||
<Image
|
||||
imagePath={imageFile.filepath}
|
||||
height={height}
|
||||
width={width}
|
||||
altText={imageFile.filename ?? 'Uploaded Image'}
|
||||
placeholderDimensions={{
|
||||
height: height + 'px',
|
||||
width: width + 'px',
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
return null;
|
||||
},
|
||||
);
|
||||
|
||||
export default Part;
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { useAgentsMapContext } from '~/Providers';
|
||||
import Icon from '~/components/Endpoints/Icon';
|
||||
|
||||
interface AgentUpdateProps {
|
||||
currentAgentId: string;
|
||||
}
|
||||
|
||||
const AgentUpdate: React.FC<AgentUpdateProps> = ({ currentAgentId }) => {
|
||||
const agentsMap = useAgentsMapContext() || {};
|
||||
const currentAgent = useMemo(() => agentsMap[currentAgentId], [agentsMap, currentAgentId]);
|
||||
if (!currentAgentId) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<div className="relative">
|
||||
<div className="absolute -left-6 flex h-full w-4 items-center justify-center">
|
||||
<div className="relative h-full w-4">
|
||||
<div className="absolute left-0 top-0 h-1/2 w-px border border-border-medium"></div>
|
||||
<div className="absolute left-0 top-1/2 h-px w-3 border border-border-medium"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="my-4 flex items-center gap-2">
|
||||
<div className="flex h-6 w-6 items-center justify-center overflow-hidden rounded-full">
|
||||
<Icon
|
||||
endpoint={EModelEndpoint.agents}
|
||||
agentName={currentAgent?.name ?? ''}
|
||||
iconURL={currentAgent?.avatar?.filepath}
|
||||
isCreatedByUser={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="font-medium text-text-primary">{currentAgent?.name}</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default AgentUpdate;
|
||||
@@ -0,0 +1,17 @@
|
||||
import { memo } from 'react';
|
||||
|
||||
const EmptyTextPart = memo(() => {
|
||||
return (
|
||||
<div className="text-message mb-[0.625rem] flex min-h-[20px] flex-col items-start gap-3 overflow-visible">
|
||||
<div className="markdown prose dark:prose-invert light w-full break-words dark:text-gray-100">
|
||||
<div className="absolute">
|
||||
<p className="submitting relative">
|
||||
<span className="result-thinking" />
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
export default EmptyTextPart;
|
||||
@@ -29,9 +29,7 @@ const TextPart = memo(({ text, isCreatedByUser, showCursor }: TextPartProps) =>
|
||||
|
||||
const content: ContentType = useMemo(() => {
|
||||
if (!isCreatedByUser) {
|
||||
return (
|
||||
<Markdown content={text} showCursor={showCursorState} isLatestMessage={isLatestMessage} />
|
||||
);
|
||||
return <Markdown content={text} isLatestMessage={isLatestMessage} />;
|
||||
} else if (enableUserMsgMarkdown) {
|
||||
return <MarkdownLite content={text} />;
|
||||
} else {
|
||||
|
||||
@@ -142,7 +142,7 @@ const AdminSettings = () => {
|
||||
<Button
|
||||
size={'sm'}
|
||||
variant={'outline'}
|
||||
className="btn btn-neutral border-token-border-light relative mb-4 h-9 w-full gap-1 rounded-lg font-medium"
|
||||
className="btn btn-neutral border-token-border-light relative h-9 w-full gap-1 rounded-lg font-medium"
|
||||
>
|
||||
<ShieldEllipsis className="cursor-pointer" aria-hidden="true" />
|
||||
{localize('com_ui_admin_settings')}
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
import React from 'react';
|
||||
import { Settings2 } from 'lucide-react';
|
||||
import { Button } from '~/components/ui';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { Panel } from '~/common';
|
||||
|
||||
interface AdvancedButtonProps {
|
||||
setActivePanel: (panel: Panel) => void;
|
||||
}
|
||||
|
||||
const AdvancedButton: React.FC<AdvancedButtonProps> = ({ setActivePanel }) => {
|
||||
const localize = useLocalize();
|
||||
|
||||
return (
|
||||
<Button
|
||||
size={'sm'}
|
||||
variant={'outline'}
|
||||
className="btn btn-neutral border-token-border-light relative h-9 w-full gap-1 rounded-lg font-medium"
|
||||
onClick={() => setActivePanel(Panel.advanced)}
|
||||
>
|
||||
<Settings2 className="h-4 w-4 cursor-pointer" aria-hidden="true" />
|
||||
{localize('com_ui_advanced')}
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
export default AdvancedButton;
|
||||
@@ -0,0 +1,55 @@
|
||||
import { useMemo } from 'react';
|
||||
import { ChevronLeft } from 'lucide-react';
|
||||
import { AgentCapabilities } from 'librechat-data-provider';
|
||||
import { useFormContext, Controller } from 'react-hook-form';
|
||||
import type { AgentForm, AgentPanelProps } from '~/common';
|
||||
import MaxAgentSteps from './MaxAgentSteps';
|
||||
import AgentChain from './AgentChain';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { Panel } from '~/common';
|
||||
|
||||
export default function AdvancedPanel({
|
||||
agentsConfig,
|
||||
setActivePanel,
|
||||
}: Pick<AgentPanelProps, 'setActivePanel' | 'agentsConfig'>) {
|
||||
const localize = useLocalize();
|
||||
const methods = useFormContext<AgentForm>();
|
||||
const { control, watch } = methods;
|
||||
const currentAgentId = watch('id');
|
||||
const chainEnabled = useMemo(
|
||||
() => agentsConfig?.capabilities.includes(AgentCapabilities.chain) ?? false,
|
||||
[agentsConfig],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="scrollbar-gutter-stable h-full min-h-[40vh] overflow-auto pb-12 text-sm">
|
||||
<div className="advanced-panel relative flex flex-col items-center px-16 py-4 text-center">
|
||||
<div className="absolute left-0 top-4">
|
||||
<button
|
||||
type="button"
|
||||
className="btn btn-neutral relative"
|
||||
onClick={() => {
|
||||
setActivePanel(Panel.builder);
|
||||
}}
|
||||
>
|
||||
<div className="advanced-panel-content flex w-full items-center justify-center gap-2">
|
||||
<ChevronLeft />
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
<div className="mb-2 mt-2 text-xl font-medium">{localize('com_ui_advanced_settings')}</div>
|
||||
</div>
|
||||
<div className="flex flex-col gap-4 px-2">
|
||||
<MaxAgentSteps />
|
||||
{chainEnabled && (
|
||||
<Controller
|
||||
name="agent_ids"
|
||||
control={control}
|
||||
defaultValue={[]}
|
||||
render={({ field }) => <AgentChain field={field} currentAgentId={currentAgentId} />}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
179
client/src/components/SidePanel/Agents/Advanced/AgentChain.tsx
Normal file
179
client/src/components/SidePanel/Agents/Advanced/AgentChain.tsx
Normal file
@@ -0,0 +1,179 @@
|
||||
import { X, Link2, PlusCircle } from 'lucide-react';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import React, { useState, useMemo, useCallback, useEffect } from 'react';
|
||||
import type { ControllerRenderProps } from 'react-hook-form';
|
||||
import type { AgentForm, OptionWithIcon } from '~/common';
|
||||
import ControlCombobox from '~/components/ui/ControlCombobox';
|
||||
import { HoverCard, HoverCardPortal, HoverCardContent, HoverCardTrigger } from '~/components/ui';
|
||||
import { CircleHelpIcon } from '~/components/svg';
|
||||
import { useAgentsMapContext } from '~/Providers';
|
||||
import Icon from '~/components/Endpoints/Icon';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { ESide } from '~/common';
|
||||
|
||||
interface AgentChainProps {
|
||||
field: ControllerRenderProps<AgentForm, 'agent_ids'>;
|
||||
currentAgentId: string;
|
||||
}
|
||||
|
||||
/** TODO: make configurable */
|
||||
const MAX_AGENTS = 10;
|
||||
|
||||
const AgentChain: React.FC<AgentChainProps> = ({ field, currentAgentId }) => {
|
||||
const localize = useLocalize();
|
||||
const [newAgentId, setNewAgentId] = useState('');
|
||||
const agentsMap = useAgentsMapContext() || {};
|
||||
const agentIds = field.value || [];
|
||||
|
||||
const agents = useMemo(() => Object.values(agentsMap), [agentsMap]);
|
||||
|
||||
const selectableAgents = useMemo(
|
||||
() =>
|
||||
agents
|
||||
.filter((agent) => agent?.id !== currentAgentId)
|
||||
.map(
|
||||
(agent) =>
|
||||
({
|
||||
label: agent?.name || '',
|
||||
value: agent?.id,
|
||||
icon: (
|
||||
<Icon
|
||||
endpoint={EModelEndpoint.agents}
|
||||
agentName={agent?.name ?? ''}
|
||||
iconURL={agent?.avatar?.filepath}
|
||||
isCreatedByUser={false}
|
||||
/>
|
||||
),
|
||||
}) as OptionWithIcon,
|
||||
),
|
||||
[agents, currentAgentId],
|
||||
);
|
||||
|
||||
const getAgentDetails = useCallback((id: string) => agentsMap[id], [agentsMap]);
|
||||
|
||||
useEffect(() => {
|
||||
if (newAgentId && agentIds.length < MAX_AGENTS) {
|
||||
field.onChange([...agentIds, newAgentId]);
|
||||
setNewAgentId('');
|
||||
}
|
||||
}, [newAgentId, agentIds, field]);
|
||||
|
||||
const removeAgentAt = (index: number) => {
|
||||
field.onChange(agentIds.filter((_, i) => i !== index));
|
||||
};
|
||||
|
||||
const updateAgentAt = (index: number, id: string) => {
|
||||
const updated = [...agentIds];
|
||||
updated[index] = id;
|
||||
field.onChange(updated);
|
||||
};
|
||||
|
||||
return (
|
||||
<HoverCard openDelay={50}>
|
||||
<div className="flex items-center justify-between gap-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<label className="font-semibold text-text-primary">
|
||||
{localize('com_ui_agent_chain')}
|
||||
</label>
|
||||
<HoverCardTrigger>
|
||||
<CircleHelpIcon className="h-4 w-4 text-text-tertiary" />
|
||||
</HoverCardTrigger>
|
||||
</div>
|
||||
<div className="text-xs text-text-secondary">
|
||||
{agentIds.length} / {MAX_AGENTS}
|
||||
</div>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
{/* Current fixed agent */}
|
||||
<div className="flex h-10 items-center justify-between rounded-md border border-border-medium bg-surface-primary-contrast px-3 py-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="flex h-6 w-6 items-center justify-center overflow-hidden rounded-full">
|
||||
<Icon
|
||||
endpoint={EModelEndpoint.agents}
|
||||
agentName={getAgentDetails(currentAgentId)?.name ?? ''}
|
||||
iconURL={getAgentDetails(currentAgentId)?.avatar?.filepath}
|
||||
isCreatedByUser={false}
|
||||
/>
|
||||
</div>
|
||||
<div className="font-medium text-text-primary">
|
||||
{getAgentDetails(currentAgentId)?.name}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{<Link2 className="mx-auto text-text-secondary" size={14} />}
|
||||
{agentIds.map((agentId, idx) => (
|
||||
<React.Fragment key={agentId}>
|
||||
<div className="flex h-10 items-center gap-2 rounded-md border border-border-medium bg-surface-tertiary pr-2">
|
||||
<ControlCombobox
|
||||
isCollapsed={false}
|
||||
ariaLabel={localize('com_ui_agent_var', { 0: localize('com_ui_select') })}
|
||||
selectedValue={agentId}
|
||||
setValue={(id) => updateAgentAt(idx, id)}
|
||||
selectPlaceholder={localize('com_ui_agent_var', { 0: localize('com_ui_select') })}
|
||||
searchPlaceholder={localize('com_ui_agent_var', { 0: localize('com_ui_search') })}
|
||||
items={selectableAgents}
|
||||
displayValue={getAgentDetails(agentId)?.name ?? ''}
|
||||
SelectIcon={
|
||||
<Icon
|
||||
endpoint={EModelEndpoint.agents}
|
||||
isCreatedByUser={false}
|
||||
agentName={getAgentDetails(agentId)?.name ?? ''}
|
||||
iconURL={getAgentDetails(agentId)?.avatar?.filepath}
|
||||
/>
|
||||
}
|
||||
className="flex-1 border-border-heavy"
|
||||
containerClassName="px-0"
|
||||
/>
|
||||
{/* Future Settings button? */}
|
||||
{/* <button className="hover:bg-surface-hover p-1 rounded transition">
|
||||
<Settings size={16} className="text-text-secondary" />
|
||||
</button> */}
|
||||
<button
|
||||
className="rounded-xl p-1 transition hover:bg-surface-hover"
|
||||
onClick={() => removeAgentAt(idx)}
|
||||
>
|
||||
<X size={18} className="text-text-secondary" />
|
||||
</button>
|
||||
</div>
|
||||
{idx < agentIds.length - 1 && (
|
||||
<Link2 className="mx-auto text-text-secondary" size={14} />
|
||||
)}
|
||||
</React.Fragment>
|
||||
))}
|
||||
|
||||
{agentIds.length < MAX_AGENTS && (
|
||||
<>
|
||||
{agentIds.length > 0 && <Link2 className="mx-auto text-text-secondary" size={14} />}
|
||||
<ControlCombobox
|
||||
isCollapsed={false}
|
||||
ariaLabel={localize('com_ui_agent_var', { 0: localize('com_ui_add') })}
|
||||
selectedValue=""
|
||||
setValue={setNewAgentId}
|
||||
selectPlaceholder={localize('com_ui_agent_var', { 0: localize('com_ui_add') })}
|
||||
searchPlaceholder={localize('com_ui_agent_var', { 0: localize('com_ui_search') })}
|
||||
items={selectableAgents}
|
||||
className="h-10 w-full border-dashed border-border-heavy text-center text-text-secondary hover:text-text-primary"
|
||||
containerClassName="px-0"
|
||||
SelectIcon={<PlusCircle size={16} className="text-text-secondary" />}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
{agentIds.length >= MAX_AGENTS && (
|
||||
<p className="pt-1 text-center text-xs italic text-text-tertiary">
|
||||
{localize('com_ui_agent_chain_max', { 0: MAX_AGENTS })}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
<HoverCardPortal>
|
||||
<HoverCardContent side={ESide.Top} className="w-80">
|
||||
<div className="space-y-2">
|
||||
<p className="text-sm text-text-secondary">{localize('com_ui_agent_chain_info')}</p>
|
||||
</div>
|
||||
</HoverCardContent>
|
||||
</HoverCardPortal>
|
||||
</HoverCard>
|
||||
);
|
||||
};
|
||||
|
||||
export default AgentChain;
|
||||
@@ -0,0 +1,52 @@
|
||||
import { useFormContext, Controller } from 'react-hook-form';
|
||||
import type { AgentForm } from '~/common';
|
||||
import {
|
||||
HoverCard,
|
||||
FormInput,
|
||||
HoverCardPortal,
|
||||
HoverCardContent,
|
||||
HoverCardTrigger,
|
||||
} from '~/components/ui';
|
||||
import { CircleHelpIcon } from '~/components/svg';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { ESide } from '~/common';
|
||||
|
||||
export default function AdvancedPanel() {
|
||||
const localize = useLocalize();
|
||||
const methods = useFormContext<AgentForm>();
|
||||
const { control } = methods;
|
||||
|
||||
return (
|
||||
<HoverCard openDelay={50}>
|
||||
<Controller
|
||||
name="recursion_limit"
|
||||
control={control}
|
||||
render={({ field }) => (
|
||||
<FormInput
|
||||
field={field}
|
||||
containerClass="w-1/2"
|
||||
inputClass="w-full"
|
||||
label={localize('com_ui_agent_recursion_limit')}
|
||||
placeholder={localize('com_nav_theme_system')}
|
||||
type="number"
|
||||
labelClass="w-fit"
|
||||
labelAdjacent={
|
||||
<HoverCardTrigger>
|
||||
<CircleHelpIcon className="h-4 w-4 text-text-tertiary" />
|
||||
</HoverCardTrigger>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
<HoverCardPortal>
|
||||
<HoverCardContent side={ESide.Top} className="w-80">
|
||||
<div className="space-y-2">
|
||||
<p className="text-sm text-text-secondary">
|
||||
{localize('com_ui_agent_recursion_limit_info')}
|
||||
</p>
|
||||
</div>
|
||||
</HoverCardContent>
|
||||
</HoverCardPortal>
|
||||
</HoverCard>
|
||||
);
|
||||
}
|
||||
@@ -1,31 +1,19 @@
|
||||
import React, { useState, useMemo, useCallback } from 'react';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { Controller, useWatch, useFormContext } from 'react-hook-form';
|
||||
import {
|
||||
QueryKeys,
|
||||
SystemRoles,
|
||||
Permissions,
|
||||
EModelEndpoint,
|
||||
PermissionTypes,
|
||||
AgentCapabilities,
|
||||
} from 'librechat-data-provider';
|
||||
import { QueryKeys, EModelEndpoint, AgentCapabilities } from 'librechat-data-provider';
|
||||
import type { TPlugin } from 'librechat-data-provider';
|
||||
import type { AgentForm, AgentPanelProps, IconComponentTypes } from '~/common';
|
||||
import { cn, defaultTextProps, removeFocusOutlines, getEndpointField, getIconKey } from '~/utils';
|
||||
import { useCreateAgentMutation, useUpdateAgentMutation } from '~/data-provider';
|
||||
import { useLocalize, useAuthContext, useHasAccess } from '~/hooks';
|
||||
import { useToastContext, useFileMapContext } from '~/Providers';
|
||||
import { icons } from '~/components/Chat/Menus/Endpoints/Icons';
|
||||
import Action from '~/components/SidePanel/Builder/Action';
|
||||
import { ToolSelectDialog } from '~/components/Tools';
|
||||
import DuplicateAgent from './DuplicateAgent';
|
||||
import { processAgentOption } from '~/utils';
|
||||
import AdminSettings from './AdminSettings';
|
||||
import DeleteButton from './DeleteButton';
|
||||
import AgentAvatar from './AgentAvatar';
|
||||
import { Spinner } from '~/components';
|
||||
import FileContext from './FileContext';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import FileSearch from './FileSearch';
|
||||
import ShareAgent from './ShareAgent';
|
||||
import Artifacts from './Artifacts';
|
||||
import AgentTool from './AgentTool';
|
||||
import CodeForm from './Code/Form';
|
||||
@@ -42,11 +30,10 @@ export default function AgentConfig({
|
||||
setAction,
|
||||
actions = [],
|
||||
agentsConfig,
|
||||
endpointsConfig,
|
||||
createMutation,
|
||||
setActivePanel,
|
||||
setCurrentAgentId,
|
||||
endpointsConfig,
|
||||
}: AgentPanelProps) {
|
||||
const { user } = useAuthContext();
|
||||
const fileMap = useFileMapContext();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
@@ -65,11 +52,6 @@ export default function AgentConfig({
|
||||
const tools = useWatch({ control, name: 'tools' });
|
||||
const agent_id = useWatch({ control, name: 'id' });
|
||||
|
||||
const hasAccessToShareAgents = useHasAccess({
|
||||
permissionType: PermissionTypes.AGENTS,
|
||||
permission: Permissions.SHARED_GLOBAL,
|
||||
});
|
||||
|
||||
const toolsEnabled = useMemo(
|
||||
() => agentsConfig?.capabilities.includes(AgentCapabilities.tools),
|
||||
[agentsConfig],
|
||||
@@ -82,6 +64,10 @@ export default function AgentConfig({
|
||||
() => agentsConfig?.capabilities.includes(AgentCapabilities.artifacts) ?? false,
|
||||
[agentsConfig],
|
||||
);
|
||||
const ocrEnabled = useMemo(
|
||||
() => agentsConfig?.capabilities.includes(AgentCapabilities.ocr) ?? false,
|
||||
[agentsConfig],
|
||||
);
|
||||
const fileSearchEnabled = useMemo(
|
||||
() => agentsConfig?.capabilities.includes(AgentCapabilities.file_search) ?? false,
|
||||
[agentsConfig],
|
||||
@@ -91,6 +77,26 @@ export default function AgentConfig({
|
||||
[agentsConfig],
|
||||
);
|
||||
|
||||
const context_files = useMemo(() => {
|
||||
if (typeof agent === 'string') {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (agent?.id !== agent_id) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (agent.context_files) {
|
||||
return agent.context_files;
|
||||
}
|
||||
|
||||
const _agent = processAgentOption({
|
||||
agent,
|
||||
fileMap,
|
||||
});
|
||||
return _agent.context_files ?? [];
|
||||
}, [agent, agent_id, fileMap]);
|
||||
|
||||
const knowledge_files = useMemo(() => {
|
||||
if (typeof agent === 'string') {
|
||||
return [];
|
||||
@@ -131,46 +137,6 @@ export default function AgentConfig({
|
||||
return _agent.code_files ?? [];
|
||||
}, [agent, agent_id, fileMap]);
|
||||
|
||||
/* Mutations */
|
||||
const update = useUpdateAgentMutation({
|
||||
onSuccess: (data) => {
|
||||
showToast({
|
||||
message: `${localize('com_assistants_update_success')} ${
|
||||
data.name ?? localize('com_ui_agent')
|
||||
}`,
|
||||
});
|
||||
},
|
||||
onError: (err) => {
|
||||
const error = err as Error;
|
||||
showToast({
|
||||
message: `${localize('com_agents_update_error')}${
|
||||
error.message ? ` ${localize('com_ui_error')}: ${error.message}` : ''
|
||||
}`,
|
||||
status: 'error',
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const create = useCreateAgentMutation({
|
||||
onSuccess: (data) => {
|
||||
setCurrentAgentId(data.id);
|
||||
showToast({
|
||||
message: `${localize('com_assistants_create_success')} ${
|
||||
data.name ?? localize('com_ui_agent')
|
||||
}`,
|
||||
});
|
||||
},
|
||||
onError: (err) => {
|
||||
const error = err as Error;
|
||||
showToast({
|
||||
message: `${localize('com_agents_create_error')}${
|
||||
error.message ? ` ${localize('com_ui_error')}: ${error.message}` : ''
|
||||
}`,
|
||||
status: 'error',
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const handleAddActions = useCallback(() => {
|
||||
if (!agent_id) {
|
||||
showToast({
|
||||
@@ -200,26 +166,14 @@ export default function AgentConfig({
|
||||
Icon = icons[iconKey];
|
||||
}
|
||||
|
||||
const renderSaveButton = () => {
|
||||
if (create.isLoading || update.isLoading) {
|
||||
return <Spinner className="icon-md" aria-hidden="true" />;
|
||||
}
|
||||
|
||||
if (agent_id) {
|
||||
return localize('com_ui_save');
|
||||
}
|
||||
|
||||
return localize('com_ui_create');
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="h-auto bg-white px-4 pb-8 pt-3 dark:bg-transparent">
|
||||
<div className="h-auto bg-white px-4 pt-3 dark:bg-transparent">
|
||||
{/* Avatar & Name */}
|
||||
<div className="mb-4">
|
||||
<AgentAvatar
|
||||
createMutation={create}
|
||||
agent_id={agent_id}
|
||||
createMutation={createMutation}
|
||||
avatar={agent?.['avatar'] ?? null}
|
||||
/>
|
||||
<label className={labelClass} htmlFor="name">
|
||||
@@ -334,17 +288,19 @@ export default function AgentConfig({
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
{(codeEnabled || fileSearchEnabled || artifactsEnabled) && (
|
||||
{(codeEnabled || fileSearchEnabled || artifactsEnabled || ocrEnabled) && (
|
||||
<div className="mb-4 flex w-full flex-col items-start gap-3">
|
||||
<label className="text-token-text-primary block font-medium">
|
||||
{localize('com_assistants_capabilities')}
|
||||
</label>
|
||||
{/* Code Execution */}
|
||||
{codeEnabled && <CodeForm agent_id={agent_id} files={code_files} />}
|
||||
{/* File Search */}
|
||||
{fileSearchEnabled && <FileSearch agent_id={agent_id} files={knowledge_files} />}
|
||||
{/* File Context (OCR) */}
|
||||
{ocrEnabled && <FileContext agent_id={agent_id} files={context_files} />}
|
||||
{/* Artifacts */}
|
||||
{artifactsEnabled && <Artifacts />}
|
||||
{/* File Search */}
|
||||
{fileSearchEnabled && <FileSearch agent_id={agent_id} files={knowledge_files} />}
|
||||
</div>
|
||||
)}
|
||||
{/* Agent Tools & Actions */}
|
||||
@@ -404,34 +360,6 @@ export default function AgentConfig({
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{user?.role === SystemRoles.ADMIN && <AdminSettings />}
|
||||
{/* Context Button */}
|
||||
<div className="flex items-center justify-end gap-2">
|
||||
<DeleteButton
|
||||
agent_id={agent_id}
|
||||
setCurrentAgentId={setCurrentAgentId}
|
||||
createMutation={create}
|
||||
/>
|
||||
{(agent?.author === user?.id || user?.role === SystemRoles.ADMIN) &&
|
||||
hasAccessToShareAgents && (
|
||||
<ShareAgent
|
||||
agent_id={agent_id}
|
||||
agentName={agent?.name ?? ''}
|
||||
projectIds={agent?.projectIds ?? []}
|
||||
isCollaborative={agent?.isCollaborative}
|
||||
/>
|
||||
)}
|
||||
{agent && agent.author === user?.id && <DuplicateAgent agent_id={agent_id} />}
|
||||
{/* Submit Button */}
|
||||
<button
|
||||
className="btn btn-primary focus:shadow-outline flex h-9 w-full items-center justify-center px-4 py-2 font-semibold text-white hover:bg-green-600 focus:border-green-500"
|
||||
type="submit"
|
||||
disabled={create.isLoading || update.isLoading}
|
||||
aria-busy={create.isLoading || update.isLoading}
|
||||
>
|
||||
{renderSaveButton()}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<ToolSelectDialog
|
||||
isOpen={showToolDialog}
|
||||
|
||||
86
client/src/components/SidePanel/Agents/AgentFooter.tsx
Normal file
86
client/src/components/SidePanel/Agents/AgentFooter.tsx
Normal file
@@ -0,0 +1,86 @@
|
||||
import React from 'react';
|
||||
import { useWatch, useFormContext } from 'react-hook-form';
|
||||
import { SystemRoles, Permissions, PermissionTypes } from 'librechat-data-provider';
|
||||
import type { AgentForm, AgentPanelProps } from '~/common';
|
||||
import { useLocalize, useAuthContext, useHasAccess } from '~/hooks';
|
||||
import { useUpdateAgentMutation } from '~/data-provider';
|
||||
import AdvancedButton from './Advanced/AdvancedButton';
|
||||
import DuplicateAgent from './DuplicateAgent';
|
||||
import AdminSettings from './AdminSettings';
|
||||
import DeleteButton from './DeleteButton';
|
||||
import { Spinner } from '~/components';
|
||||
import ShareAgent from './ShareAgent';
|
||||
import { Panel } from '~/common';
|
||||
|
||||
export default function AgentFooter({
|
||||
activePanel,
|
||||
createMutation,
|
||||
updateMutation,
|
||||
setActivePanel,
|
||||
setCurrentAgentId,
|
||||
}: Pick<
|
||||
AgentPanelProps,
|
||||
'setCurrentAgentId' | 'createMutation' | 'activePanel' | 'setActivePanel'
|
||||
> & {
|
||||
updateMutation: ReturnType<typeof useUpdateAgentMutation>;
|
||||
}) {
|
||||
const localize = useLocalize();
|
||||
const { user } = useAuthContext();
|
||||
|
||||
const methods = useFormContext<AgentForm>();
|
||||
|
||||
const { control } = methods;
|
||||
const agent = useWatch({ control, name: 'agent' });
|
||||
const agent_id = useWatch({ control, name: 'id' });
|
||||
|
||||
const hasAccessToShareAgents = useHasAccess({
|
||||
permissionType: PermissionTypes.AGENTS,
|
||||
permission: Permissions.SHARED_GLOBAL,
|
||||
});
|
||||
|
||||
const renderSaveButton = () => {
|
||||
if (createMutation.isLoading || updateMutation.isLoading) {
|
||||
return <Spinner className="icon-md" aria-hidden="true" />;
|
||||
}
|
||||
|
||||
if (agent_id) {
|
||||
return localize('com_ui_save');
|
||||
}
|
||||
|
||||
return localize('com_ui_create');
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="mx-1 mb-1 flex w-full flex-col gap-2">
|
||||
{activePanel !== Panel.advanced && <AdvancedButton setActivePanel={setActivePanel} />}
|
||||
{user?.role === SystemRoles.ADMIN && <AdminSettings />}
|
||||
{/* Context Button */}
|
||||
<div className="flex items-center justify-end gap-2">
|
||||
<DeleteButton
|
||||
agent_id={agent_id}
|
||||
setCurrentAgentId={setCurrentAgentId}
|
||||
createMutation={createMutation}
|
||||
/>
|
||||
{(agent?.author === user?.id || user?.role === SystemRoles.ADMIN) &&
|
||||
hasAccessToShareAgents && (
|
||||
<ShareAgent
|
||||
agent_id={agent_id}
|
||||
agentName={agent?.name ?? ''}
|
||||
projectIds={agent?.projectIds ?? []}
|
||||
isCollaborative={agent?.isCollaborative}
|
||||
/>
|
||||
)}
|
||||
{agent && agent.author === user?.id && <DuplicateAgent agent_id={agent_id} />}
|
||||
{/* Submit Button */}
|
||||
<button
|
||||
className="btn btn-primary focus:shadow-outline flex h-9 w-full items-center justify-center px-4 py-2 font-semibold text-white hover:bg-green-600 focus:border-green-500"
|
||||
type="submit"
|
||||
disabled={createMutation.isLoading || updateMutation.isLoading}
|
||||
aria-busy={createMutation.isLoading || updateMutation.isLoading}
|
||||
>
|
||||
{renderSaveButton()}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -19,8 +19,10 @@ import { useSelectAgent, useLocalize, useAuthContext } from '~/hooks';
|
||||
import AgentPanelSkeleton from './AgentPanelSkeleton';
|
||||
import { createProviderOption } from '~/utils';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import AdvancedPanel from './Advanced/AdvancedPanel';
|
||||
import AgentConfig from './AgentConfig';
|
||||
import AgentSelect from './AgentSelect';
|
||||
import AgentFooter from './AgentFooter';
|
||||
import { Button } from '~/components';
|
||||
import ModelPanel from './ModelPanel';
|
||||
import { Panel } from '~/common';
|
||||
@@ -130,6 +132,7 @@ export default function AgentPanel({
|
||||
agent_ids,
|
||||
end_after_tools,
|
||||
hide_sequential_outputs,
|
||||
recursion_limit,
|
||||
} = data;
|
||||
|
||||
const model = _model ?? '';
|
||||
@@ -151,6 +154,7 @@ export default function AgentPanel({
|
||||
agent_ids,
|
||||
end_after_tools,
|
||||
hide_sequential_outputs,
|
||||
recursion_limit,
|
||||
},
|
||||
});
|
||||
return;
|
||||
@@ -175,6 +179,7 @@ export default function AgentPanel({
|
||||
agent_ids,
|
||||
end_after_tools,
|
||||
hide_sequential_outputs,
|
||||
recursion_limit,
|
||||
});
|
||||
},
|
||||
[agent_id, create, update, showToast, localize],
|
||||
@@ -276,12 +281,25 @@ export default function AgentPanel({
|
||||
<AgentConfig
|
||||
actions={actions}
|
||||
setAction={setAction}
|
||||
createMutation={create}
|
||||
agentsConfig={agentsConfig}
|
||||
setActivePanel={setActivePanel}
|
||||
endpointsConfig={endpointsConfig}
|
||||
setCurrentAgentId={setCurrentAgentId}
|
||||
/>
|
||||
)}
|
||||
{canEditAgent && !agentQuery.isInitialLoading && activePanel === Panel.advanced && (
|
||||
<AdvancedPanel setActivePanel={setActivePanel} agentsConfig={agentsConfig} />
|
||||
)}
|
||||
{canEditAgent && !agentQuery.isInitialLoading && (
|
||||
<AgentFooter
|
||||
createMutation={create}
|
||||
updateMutation={update}
|
||||
activePanel={activePanel}
|
||||
setActivePanel={setActivePanel}
|
||||
setCurrentAgentId={setCurrentAgentId}
|
||||
/>
|
||||
)}
|
||||
</form>
|
||||
</FormProvider>
|
||||
);
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Skeleton } from '~/components/ui';
|
||||
|
||||
export default function AgentPanelSkeleton() {
|
||||
return (
|
||||
<div className="h-auto bg-white px-4 pb-8 pt-3 dark:bg-transparent">
|
||||
<div className="h-auto bg-white dark:bg-transparent">
|
||||
{/* Avatar */}
|
||||
<div className="mb-4">
|
||||
<div className="flex w-full items-center justify-center gap-4">
|
||||
|
||||
@@ -81,10 +81,29 @@ export default function AgentSelect({
|
||||
return;
|
||||
}
|
||||
|
||||
if (capabilities[name] !== undefined) {
|
||||
formValues[name] = value;
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
name === 'agent_ids' &&
|
||||
Array.isArray(value) &&
|
||||
value.every((item) => typeof item === 'string')
|
||||
) {
|
||||
formValues[name] = value;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!keys.has(name)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (name === 'recursion_limit' && typeof value === 'number') {
|
||||
formValues[name] = value;
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof value !== 'number' && typeof value !== 'object') {
|
||||
formValues[name] = value;
|
||||
}
|
||||
|
||||
128
client/src/components/SidePanel/Agents/FileContext.tsx
Normal file
128
client/src/components/SidePanel/Agents/FileContext.tsx
Normal file
@@ -0,0 +1,128 @@
|
||||
import { useState, useRef } from 'react';
|
||||
import {
|
||||
EModelEndpoint,
|
||||
EToolResources,
|
||||
mergeFileConfig,
|
||||
fileConfig as defaultFileConfig,
|
||||
} from 'librechat-data-provider';
|
||||
import type { ExtendedFile } from '~/common';
|
||||
import { useFileHandling, useLocalize, useLazyEffect } from '~/hooks';
|
||||
import FileRow from '~/components/Chat/Input/Files/FileRow';
|
||||
import { useGetFileConfig } from '~/data-provider';
|
||||
import { HoverCard, HoverCardContent, HoverCardPortal, HoverCardTrigger } from '~/components/ui';
|
||||
import { AttachmentIcon, CircleHelpIcon } from '~/components/svg';
|
||||
import { useChatContext } from '~/Providers';
|
||||
import { ESide } from '~/common';
|
||||
|
||||
export default function FileContext({
|
||||
agent_id,
|
||||
files: _files,
|
||||
}: {
|
||||
agent_id: string;
|
||||
files?: [string, ExtendedFile][];
|
||||
}) {
|
||||
const localize = useLocalize();
|
||||
const { setFilesLoading } = useChatContext();
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
const [files, setFiles] = useState<Map<string, ExtendedFile>>(new Map());
|
||||
|
||||
const { data: fileConfig = defaultFileConfig } = useGetFileConfig({
|
||||
select: (data) => mergeFileConfig(data),
|
||||
});
|
||||
|
||||
const { handleFileChange } = useFileHandling({
|
||||
overrideEndpoint: EModelEndpoint.agents,
|
||||
additionalMetadata: { agent_id, tool_resource: EToolResources.ocr },
|
||||
fileSetter: setFiles,
|
||||
});
|
||||
|
||||
useLazyEffect(
|
||||
() => {
|
||||
if (_files) {
|
||||
setFiles(new Map(_files));
|
||||
}
|
||||
},
|
||||
[_files],
|
||||
750,
|
||||
);
|
||||
|
||||
const endpointFileConfig = fileConfig.endpoints[EModelEndpoint.agents];
|
||||
const isUploadDisabled = endpointFileConfig.disabled ?? false;
|
||||
|
||||
if (isUploadDisabled) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const handleButtonClick = () => {
|
||||
// necessary to reset the input
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = '';
|
||||
}
|
||||
fileInputRef.current?.click();
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="w-full">
|
||||
<HoverCard openDelay={50}>
|
||||
<div className="mb-2 flex items-center gap-2">
|
||||
<HoverCardTrigger asChild>
|
||||
<span className="flex items-center gap-2">
|
||||
<label className="text-token-text-primary block font-medium">
|
||||
{localize('com_agents_file_context')}
|
||||
</label>
|
||||
<CircleHelpIcon className="h-4 w-4 text-text-tertiary" />
|
||||
</span>
|
||||
</HoverCardTrigger>
|
||||
<HoverCardPortal>
|
||||
<HoverCardContent side={ESide.Top} className="w-80">
|
||||
<div className="space-y-2">
|
||||
<p className="text-sm text-text-secondary">
|
||||
{localize('com_agents_file_context_info')}
|
||||
</p>
|
||||
</div>
|
||||
</HoverCardContent>
|
||||
</HoverCardPortal>
|
||||
</div>
|
||||
</HoverCard>
|
||||
<div className="flex flex-col gap-3">
|
||||
{/* File Context (OCR) Files */}
|
||||
<FileRow
|
||||
files={files}
|
||||
setFiles={setFiles}
|
||||
setFilesLoading={setFilesLoading}
|
||||
agent_id={agent_id}
|
||||
tool_resource={EToolResources.ocr}
|
||||
Wrapper={({ children }) => <div className="flex flex-wrap gap-2">{children}</div>}
|
||||
/>
|
||||
<div>
|
||||
<button
|
||||
type="button"
|
||||
disabled={!agent_id}
|
||||
className="btn btn-neutral border-token-border-light relative h-9 w-full rounded-lg font-medium"
|
||||
onClick={handleButtonClick}
|
||||
>
|
||||
<div className="flex w-full items-center justify-center gap-1">
|
||||
<AttachmentIcon className="text-token-text-primary h-4 w-4" />
|
||||
<input
|
||||
multiple={true}
|
||||
type="file"
|
||||
style={{ display: 'none' }}
|
||||
tabIndex={-1}
|
||||
ref={fileInputRef}
|
||||
disabled={!agent_id}
|
||||
onChange={handleFileChange}
|
||||
/>
|
||||
{localize('com_ui_upload_file_context')}
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
{/* Disabled Message */}
|
||||
{agent_id ? null : (
|
||||
<div className="text-xs text-text-secondary">
|
||||
{localize('com_agents_file_context_disabled')}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -77,7 +77,7 @@ export default function Parameters({
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="scrollbar-gutter-stable h-full min-h-[50vh] overflow-auto pb-12 text-sm">
|
||||
<div className="mx-1 mb-1 flex h-full min-h-[50vh] w-full flex-col gap-2 text-sm">
|
||||
<div className="model-panel relative flex flex-col items-center px-16 py-4 text-center">
|
||||
<div className="absolute left-0 top-4">
|
||||
<button
|
||||
@@ -224,19 +224,17 @@ export default function Parameters({
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
{/* Reset Parameters Button */}
|
||||
<div className="mt-6 flex justify-center">
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleResetParameters}
|
||||
className="btn btn-neutral flex w-full items-center justify-center gap-2 px-4 py-2 text-sm"
|
||||
>
|
||||
<RotateCcw className="h-4 w-4" aria-hidden="true" />
|
||||
{localize('com_ui_reset_var', { 0: localize('com_ui_model_parameters') })}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{/* Reset Parameters Button */}
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleResetParameters}
|
||||
className="btn btn-neutral my-1 flex w-full items-center justify-center gap-2 px-4 py-2 text-sm"
|
||||
>
|
||||
<RotateCcw className="h-4 w-4" aria-hidden="true" />
|
||||
{localize('com_ui_reset_var', { 0: localize('com_ui_model_parameters') })}
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
import { AgentCapabilities } from 'librechat-data-provider';
|
||||
import { useFormContext, Controller } from 'react-hook-form';
|
||||
import type { AgentForm } from '~/common';
|
||||
import {
|
||||
Checkbox,
|
||||
HoverCard,
|
||||
// HoverCardContent,
|
||||
// HoverCardPortal,
|
||||
// HoverCardTrigger,
|
||||
} from '~/components/ui';
|
||||
// import { CircleHelpIcon } from '~/components/svg';
|
||||
// import { useLocalize } from '~/hooks';
|
||||
// import { ESide } from '~/common';
|
||||
|
||||
export default function HideSequential() {
|
||||
// const localize = useLocalize();
|
||||
const methods = useFormContext<AgentForm>();
|
||||
const { control, setValue, getValues } = methods;
|
||||
|
||||
return (
|
||||
<>
|
||||
<HoverCard openDelay={50}>
|
||||
<div className="my-2 flex items-center">
|
||||
<Controller
|
||||
name={AgentCapabilities.hide_sequential_outputs}
|
||||
control={control}
|
||||
render={({ field }) => (
|
||||
<Checkbox
|
||||
{...field}
|
||||
checked={field.value}
|
||||
onCheckedChange={field.onChange}
|
||||
className="relative float-left mr-2 inline-flex h-4 w-4 cursor-pointer"
|
||||
value={field.value?.toString()}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
className="flex items-center space-x-2"
|
||||
onClick={() =>
|
||||
|
||||
setValue(
|
||||
AgentCapabilities.hide_sequential_outputs,
|
||||
!getValues(AgentCapabilities.hide_sequential_outputs),
|
||||
{
|
||||
shouldDirty: true,
|
||||
},
|
||||
)
|
||||
}
|
||||
>
|
||||
<label
|
||||
className="form-check-label text-token-text-primary w-full cursor-pointer"
|
||||
htmlFor={AgentCapabilities.hide_sequential_outputs}
|
||||
>
|
||||
Hide Sequential Agent Outputs except the last agent's
|
||||
</label>
|
||||
{/* <HoverCardTrigger>
|
||||
<CircleHelpIcon className="h-5 w-5 text-gray-500" />
|
||||
</HoverCardTrigger> */}
|
||||
</button>
|
||||
{/* <HoverCardPortal>
|
||||
<HoverCardContent side={ESide.Top} className="w-80">
|
||||
<div className="space-y-2">
|
||||
<p className="text-sm text-text-secondary">
|
||||
{localize('com_agents_ttg_info')}
|
||||
</p>
|
||||
</div>
|
||||
</HoverCardContent>
|
||||
</HoverCardPortal> */}
|
||||
</div>
|
||||
</HoverCard>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -1,153 +0,0 @@
|
||||
import { Plus, X } from 'lucide-react';
|
||||
import React, { useRef, useState } from 'react';
|
||||
import { Transition } from 'react-transition-group';
|
||||
import { Constants } from 'librechat-data-provider';
|
||||
import { cn, defaultTextProps, removeFocusOutlines } from '~/utils';
|
||||
import { TooltipAnchor } from '~/components/ui';
|
||||
import HideSequential from './HideSequential';
|
||||
|
||||
interface SequentialAgentsProps {
|
||||
field: {
|
||||
value: string[];
|
||||
onChange: (value: string[]) => void;
|
||||
};
|
||||
}
|
||||
|
||||
const labelClass = 'mb-2 text-token-text-primary block font-medium';
|
||||
const inputClass = cn(
|
||||
defaultTextProps,
|
||||
'flex w-full px-3 py-2 dark:border-gray-800 dark:bg-gray-800 rounded-xl mb-2',
|
||||
removeFocusOutlines,
|
||||
);
|
||||
|
||||
const maxAgents = 5;
|
||||
|
||||
const SequentialAgents: React.FC<SequentialAgentsProps> = ({ field }) => {
|
||||
const inputRefs = useRef<(HTMLInputElement | null)[]>([]);
|
||||
const nodeRef = useRef(null);
|
||||
const [newAgentId, setNewAgentId] = useState('');
|
||||
|
||||
const handleAddAgentId = () => {
|
||||
if (newAgentId.trim() && field.value.length < maxAgents) {
|
||||
const newValues = [...field.value, newAgentId];
|
||||
field.onChange(newValues);
|
||||
setNewAgentId('');
|
||||
}
|
||||
};
|
||||
|
||||
const handleDeleteAgentId = (index: number) => {
|
||||
const newValues = field.value.filter((_, i) => i !== index);
|
||||
field.onChange(newValues);
|
||||
};
|
||||
|
||||
const defaultStyle = {
|
||||
transition: 'opacity 200ms ease-in-out',
|
||||
opacity: 0,
|
||||
};
|
||||
|
||||
const triggerShake = (element: HTMLElement) => {
|
||||
element.classList.remove('shake');
|
||||
void element.offsetWidth;
|
||||
element.classList.add('shake');
|
||||
setTimeout(() => {
|
||||
element.classList.remove('shake');
|
||||
}, 200);
|
||||
};
|
||||
|
||||
const transitionStyles = {
|
||||
entering: { opacity: 1 },
|
||||
entered: { opacity: 1 },
|
||||
exiting: { opacity: 0 },
|
||||
exited: { opacity: 0 },
|
||||
};
|
||||
|
||||
const hasReachedMax = field.value.length >= Constants.MAX_CONVO_STARTERS;
|
||||
|
||||
return (
|
||||
<div className="relative">
|
||||
<label className={labelClass} htmlFor="agent_ids">
|
||||
Sequential Agents
|
||||
</label>
|
||||
<div className="mt-4 space-y-2">
|
||||
<HideSequential />
|
||||
{/* Display existing agents first */}
|
||||
{field.value.map((agentId, index) => (
|
||||
<div key={index} className="relative">
|
||||
<input
|
||||
ref={(el) => (inputRefs.current[index] = el)}
|
||||
value={agentId}
|
||||
onChange={(e) => {
|
||||
const newValue = [...field.value];
|
||||
newValue[index] = e.target.value;
|
||||
field.onChange(newValue);
|
||||
}}
|
||||
className={`${inputClass} pr-10`}
|
||||
type="text"
|
||||
maxLength={64}
|
||||
/>
|
||||
<TooltipAnchor
|
||||
side="top"
|
||||
description={'Remove agent ID'}
|
||||
className="absolute right-1 top-1 flex size-7 items-center justify-center rounded-lg transition-colors duration-200 hover:bg-surface-hover"
|
||||
onClick={() => handleDeleteAgentId(index)}
|
||||
>
|
||||
<X className="size-4" />
|
||||
</TooltipAnchor>
|
||||
</div>
|
||||
))}
|
||||
{/* Input for new agent at the bottom */}
|
||||
<div className="relative">
|
||||
<input
|
||||
ref={(el) => (inputRefs.current[field.value.length] = el)}
|
||||
value={newAgentId}
|
||||
maxLength={64}
|
||||
className={`${inputClass} pr-10`}
|
||||
type="text"
|
||||
placeholder={hasReachedMax ? 'Max agents reached' : 'Enter agent ID (e.g. agent_1234)'}
|
||||
onChange={(e) => setNewAgentId(e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
if (hasReachedMax) {
|
||||
triggerShake(e.currentTarget);
|
||||
} else {
|
||||
handleAddAgentId();
|
||||
}
|
||||
}
|
||||
}}
|
||||
/>
|
||||
<Transition
|
||||
nodeRef={nodeRef}
|
||||
in={field.value.length < Constants.MAX_CONVO_STARTERS}
|
||||
timeout={200}
|
||||
unmountOnExit
|
||||
>
|
||||
{(state: string) => (
|
||||
<div
|
||||
ref={nodeRef}
|
||||
style={{
|
||||
...defaultStyle,
|
||||
...transitionStyles[state as keyof typeof transitionStyles],
|
||||
transition: state === 'entering' ? 'none' : defaultStyle.transition,
|
||||
}}
|
||||
className="absolute right-1 top-1"
|
||||
>
|
||||
<TooltipAnchor
|
||||
side="top"
|
||||
description={hasReachedMax ? 'Max agents reached' : 'Add agent ID'}
|
||||
className="flex size-7 items-center justify-center rounded-lg transition-colors duration-200 hover:bg-surface-hover"
|
||||
onClick={handleAddAgentId}
|
||||
disabled={hasReachedMax}
|
||||
>
|
||||
<Plus className="size-4" />
|
||||
</TooltipAnchor>
|
||||
</div>
|
||||
)}
|
||||
</Transition>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default SequentialAgents;
|
||||
@@ -1,21 +1,23 @@
|
||||
import { ArrowUpDown } from 'lucide-react';
|
||||
import type { ColumnDef } from '@tanstack/react-table';
|
||||
import type { TFile } from 'librechat-data-provider';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
import PanelFileCell from './PanelFileCell';
|
||||
import { Button } from '~/components/ui';
|
||||
import { formatDate } from '~/utils';
|
||||
|
||||
export const columns: ColumnDef<TFile>[] = [
|
||||
export const columns: ColumnDef<TFile | undefined>[] = [
|
||||
{
|
||||
accessorKey: 'filename',
|
||||
header: ({ column }) => {
|
||||
const localize = useLocalize();
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="hover:bg-surface-hover"
|
||||
onClick={() => column.toggleSorting(column.getIsSorted() === 'asc')}
|
||||
>
|
||||
Name
|
||||
{localize('com_ui_name')}
|
||||
<ArrowUpDown className="ml-2 h-4 w-4" />
|
||||
</Button>
|
||||
);
|
||||
@@ -31,20 +33,21 @@ export const columns: ColumnDef<TFile>[] = [
|
||||
size: '10%',
|
||||
},
|
||||
header: ({ column }) => {
|
||||
const localize = useLocalize();
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="hover:bg-surface-hover"
|
||||
onClick={() => column.toggleSorting(column.getIsSorted() === 'asc')}
|
||||
>
|
||||
Date
|
||||
{localize('com_ui_date')}
|
||||
<ArrowUpDown className="ml-2 h-4 w-4" />
|
||||
</Button>
|
||||
);
|
||||
},
|
||||
cell: ({ row }) => (
|
||||
<span className="flex justify-end text-xs">
|
||||
{formatDate(row.original.updatedAt?.toString() ?? '')}
|
||||
{formatDate(row.original?.updatedAt?.toString() ?? '')}
|
||||
</span>
|
||||
),
|
||||
},
|
||||
|
||||
@@ -6,7 +6,6 @@ import { getFileType } from '~/utils';
|
||||
|
||||
export default function PanelFileCell({ row }: { row: Row<TFile | undefined> }) {
|
||||
const file = row.original;
|
||||
|
||||
return (
|
||||
<div className="flex w-full items-center gap-2">
|
||||
{file?.type.startsWith('image') === true ? (
|
||||
|
||||
@@ -159,6 +159,7 @@ export default function DataTable<TData, TValue>({ columns, data }: DataTablePro
|
||||
filename: fileData.filename,
|
||||
source: fileData.source,
|
||||
size: fileData.bytes,
|
||||
metadata: fileData.metadata,
|
||||
});
|
||||
},
|
||||
[addFile, fileMap, conversation, localize, showToast, fileConfig.endpoints],
|
||||
|
||||
@@ -665,6 +665,7 @@ export const settings: Record<string, SettingsConfiguration | undefined> = {
|
||||
[`${EModelEndpoint.bedrock}-${BedrockProviders.Meta}`]: bedrockGeneral,
|
||||
[`${EModelEndpoint.bedrock}-${BedrockProviders.AI21}`]: bedrockGeneral,
|
||||
[`${EModelEndpoint.bedrock}-${BedrockProviders.Amazon}`]: bedrockGeneral,
|
||||
[`${EModelEndpoint.bedrock}-${BedrockProviders.DeepSeek}`]: bedrockGeneral,
|
||||
[EModelEndpoint.google]: googleConfig,
|
||||
};
|
||||
|
||||
@@ -708,6 +709,7 @@ export const presetSettings: Record<
|
||||
[`${EModelEndpoint.bedrock}-${BedrockProviders.Meta}`]: bedrockGeneralColumns,
|
||||
[`${EModelEndpoint.bedrock}-${BedrockProviders.AI21}`]: bedrockGeneralColumns,
|
||||
[`${EModelEndpoint.bedrock}-${BedrockProviders.Amazon}`]: bedrockGeneralColumns,
|
||||
[`${EModelEndpoint.bedrock}-${BedrockProviders.DeepSeek}`]: bedrockGeneralColumns,
|
||||
[EModelEndpoint.google]: {
|
||||
col1: googleCol1,
|
||||
col2: googleCol2,
|
||||
|
||||
62
client/src/components/ui/FormInput.tsx
Normal file
62
client/src/components/ui/FormInput.tsx
Normal file
@@ -0,0 +1,62 @@
|
||||
import React from 'react';
|
||||
import { Label, Input } from '~/components/ui';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
export default function FormInput({
|
||||
field,
|
||||
label,
|
||||
labelClass,
|
||||
inputClass,
|
||||
containerClass,
|
||||
labelAdjacent,
|
||||
placeholder = '',
|
||||
type = 'string',
|
||||
}: {
|
||||
field: any;
|
||||
label: string;
|
||||
labelClass?: string;
|
||||
inputClass?: string;
|
||||
placeholder?: string;
|
||||
containerClass?: string;
|
||||
type?: 'string' | 'number';
|
||||
labelAdjacent?: React.ReactNode;
|
||||
}) {
|
||||
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const value = e.target.value;
|
||||
|
||||
if (type !== 'number') {
|
||||
field.onChange(value);
|
||||
return;
|
||||
}
|
||||
|
||||
if (value === '') {
|
||||
field.onChange(value);
|
||||
} else if (!isNaN(Number(value))) {
|
||||
field.onChange(Number(value));
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={cn('flex w-full flex-col items-center gap-2', containerClass)}>
|
||||
<div className="flex w-full items-center justify-start gap-2">
|
||||
<Label
|
||||
htmlFor={`${field.name}-input`}
|
||||
className={cn('text-left text-sm font-semibold text-text-primary', labelClass)}
|
||||
>
|
||||
{label}
|
||||
</Label>
|
||||
{labelAdjacent}
|
||||
</div>
|
||||
<Input
|
||||
id={`${field.name}-input`}
|
||||
value={field.value ?? ''}
|
||||
onChange={handleChange}
|
||||
placeholder={placeholder}
|
||||
className={cn(
|
||||
'flex h-10 max-h-10 w-full resize-none border-none bg-surface-secondary px-3 py-2',
|
||||
inputClass,
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -29,6 +29,7 @@ export * from './InputOTP';
|
||||
export { default as Combobox } from './Combobox';
|
||||
export { default as Dropdown } from './Dropdown';
|
||||
export { default as FileUpload } from './FileUpload';
|
||||
export { default as FormInput } from './FormInput';
|
||||
export { default as DropdownPopup } from './DropdownPopup';
|
||||
export { default as DelayedRender } from './DelayedRender';
|
||||
export { default as ThemeSelector } from './ThemeSelector';
|
||||
|
||||
@@ -63,8 +63,9 @@ export const useUploadFileMutation = (
|
||||
|
||||
const update = {};
|
||||
const prevResources = agent.tool_resources ?? {};
|
||||
const prevResource: t.ExecuteCodeResource | t.AgentFileSearchResource = agent
|
||||
.tool_resources?.[tool_resource] ?? {
|
||||
const prevResource: t.ExecuteCodeResource | t.AgentFileResource = agent.tool_resources?.[
|
||||
tool_resource
|
||||
] ?? {
|
||||
file_ids: [],
|
||||
};
|
||||
if (!prevResource.file_ids) {
|
||||
|
||||
@@ -23,6 +23,7 @@ type TStepEvent = {
|
||||
event: string;
|
||||
data:
|
||||
| Agents.MessageDeltaEvent
|
||||
| Agents.AgentUpdate
|
||||
| Agents.RunStep
|
||||
| Agents.ToolEndEvent
|
||||
| {
|
||||
@@ -87,6 +88,17 @@ export default function useStepHandler({
|
||||
if (contentPart.tool_call_ids != null) {
|
||||
update.tool_call_ids = contentPart.tool_call_ids;
|
||||
}
|
||||
updatedContent[index] = update;
|
||||
} else if (
|
||||
contentType.startsWith(ContentTypes.AGENT_UPDATE) &&
|
||||
ContentTypes.AGENT_UPDATE in contentPart &&
|
||||
contentPart.agent_update
|
||||
) {
|
||||
const update: Agents.AgentUpdate = {
|
||||
type: ContentTypes.AGENT_UPDATE,
|
||||
agent_update: contentPart.agent_update,
|
||||
};
|
||||
|
||||
updatedContent[index] = update;
|
||||
} else if (
|
||||
contentType.startsWith(ContentTypes.THINK) &&
|
||||
@@ -191,29 +203,20 @@ export default function useStepHandler({
|
||||
});
|
||||
}
|
||||
} else if (event === 'on_agent_update') {
|
||||
const { runId, message } = data as { runId?: string; message: string };
|
||||
const responseMessageId = runId ?? '';
|
||||
const { agent_update } = data as Agents.AgentUpdate;
|
||||
const responseMessageId = agent_update.runId || '';
|
||||
if (!responseMessageId) {
|
||||
console.warn('No message id found in agent update event');
|
||||
return;
|
||||
}
|
||||
|
||||
const responseMessage = messages[messages.length - 1] as TMessage;
|
||||
|
||||
const response = {
|
||||
...responseMessage,
|
||||
parentMessageId: userMessage.messageId,
|
||||
conversationId: userMessage.conversationId,
|
||||
messageId: responseMessageId,
|
||||
content: [
|
||||
{
|
||||
type: ContentTypes.TEXT,
|
||||
text: message,
|
||||
},
|
||||
],
|
||||
} as TMessage;
|
||||
|
||||
setMessages([...messages.slice(0, -1), response]);
|
||||
const response = messageMap.current.get(responseMessageId);
|
||||
if (response) {
|
||||
const updatedResponse = updateContent(response, agent_update.index, data);
|
||||
messageMap.current.set(responseMessageId, updatedResponse);
|
||||
const currentMessages = getMessages() || [];
|
||||
setMessages([...currentMessages.slice(0, -1), updatedResponse]);
|
||||
}
|
||||
} else if (event === 'on_message_delta') {
|
||||
const messageDelta = data as Agents.MessageDeltaEvent;
|
||||
const runStep = stepMap.current.get(messageDelta.id);
|
||||
|
||||
@@ -11,6 +11,9 @@
|
||||
"com_agents_create_error": "There was an error creating your agent.",
|
||||
"com_agents_description_placeholder": "Optional: Describe your Agent here",
|
||||
"com_agents_enable_file_search": "Enable File Search",
|
||||
"com_agents_file_context": "File Context (OCR)",
|
||||
"com_agents_file_context_disabled": "Agent must be created before uploading files for File Context.",
|
||||
"com_agents_file_context_info": "Files uploaded as \"Context\" are processed using OCR to extract text, which is then added to the Agent's instructions. Ideal for documents, images with text, or PDFs where you need the full text content of a file",
|
||||
"com_agents_file_search_disabled": "Agent must be created before uploading files for File Search.",
|
||||
"com_agents_file_search_info": "When enabled, the agent will be informed of the exact filenames listed below, allowing it to retrieve relevant context from these files.",
|
||||
"com_agents_instructions_placeholder": "The system instructions that the agent uses",
|
||||
@@ -462,13 +465,20 @@
|
||||
"com_ui_admin_access_warning": "Disabling Admin access to this feature may cause unexpected UI issues requiring refresh. If saved, the only way to revert is via the interface setting in librechat.yaml config which affects all roles.",
|
||||
"com_ui_admin_settings": "Admin Settings",
|
||||
"com_ui_advanced": "Advanced",
|
||||
"com_ui_advanced_settings": "Advanced Settings",
|
||||
"com_ui_agent": "Agent",
|
||||
"com_ui_agent_chain": "Agent Chain (Mixture-of-Agents)",
|
||||
"com_ui_agent_chain_info": "Enables creating sequences of agents. Each agent can access outputs from previous agents in the chain. Based on the \"Mixture-of-Agents\" architecture where agents use previous outputs as auxiliary information.",
|
||||
"com_ui_agent_chain_max": "You have reached the maximum of {{0}} agents.",
|
||||
"com_ui_agent_delete_error": "There was an error deleting the agent",
|
||||
"com_ui_agent_deleted": "Successfully deleted agent",
|
||||
"com_ui_agent_duplicate_error": "There was an error duplicating the agent",
|
||||
"com_ui_agent_duplicated": "Agent duplicated successfully",
|
||||
"com_ui_agent_editing_allowed": "Other users can already edit this agent",
|
||||
"com_ui_agent_recursion_limit": "Max Agent Steps",
|
||||
"com_ui_agent_recursion_limit_info": "Limits how many steps the agent can take in a run before giving a final response. Default is 25 steps. A step is either an AI API request or a tool usage round. For example, a basic tool interaction takes 3 steps: initial request, tool usage, and follow-up request.",
|
||||
"com_ui_agent_shared_to_all": "something needs to go here. was empty",
|
||||
"com_ui_agent_var": "{{0}} agent",
|
||||
"com_ui_agents": "Agents",
|
||||
"com_ui_agents_allow_create": "Allow creating Agents",
|
||||
"com_ui_agents_allow_share_global": "Allow sharing Agents to all users",
|
||||
@@ -811,12 +821,14 @@
|
||||
"com_ui_upload_code_files": "Upload for Code Interpreter",
|
||||
"com_ui_upload_delay": "Uploading \"{{0}}\" is taking more time than anticipated. Please wait while the file finishes indexing for retrieval.",
|
||||
"com_ui_upload_error": "There was an error uploading your file",
|
||||
"com_ui_upload_file_context": "Upload File Context",
|
||||
"com_ui_upload_file_search": "Upload for File Search",
|
||||
"com_ui_upload_files": "Upload files",
|
||||
"com_ui_upload_image": "Upload an image",
|
||||
"com_ui_upload_image_input": "Upload Image",
|
||||
"com_ui_upload_invalid": "Invalid file for upload. Must be an image not exceeding the limit",
|
||||
"com_ui_upload_invalid_var": "Invalid file for upload. Must be an image not exceeding {{0}} MB",
|
||||
"com_ui_upload_ocr_text": "Upload as Text",
|
||||
"com_ui_upload_success": "Successfully uploaded file",
|
||||
"com_ui_upload_type": "Select Upload Type",
|
||||
"com_ui_use_2fa_code": "Use 2FA Code Instead",
|
||||
|
||||
@@ -11,6 +11,9 @@
|
||||
"com_agents_create_error": "Houve um erro ao criar seu agente.",
|
||||
"com_agents_description_placeholder": "Opcional: Descreva seu Agente aqui",
|
||||
"com_agents_enable_file_search": "Habilitar pesquisa de arquivos",
|
||||
"com_agents_file_context": "Contexto de arquivo (OCR)",
|
||||
"com_agents_file_context_disabled": "O agente deve ser criado antes de carregar arquivos para o Contexto de Arquivo.",
|
||||
"com_agents_file_context_info": "Os arquivos carregados como \"Contexto\" são processados usando OCR para extrair texto, que é então adicionado às instruções do Agente. Ideal para documentos, imagens com texto ou PDFs onde você precisa do conteúdo de texto completo de um arquivo",
|
||||
"com_agents_file_search_disabled": "O agente deve ser criado antes de carregar arquivos para Pesquisa de Arquivos.",
|
||||
"com_agents_file_search_info": "Quando ativado, o agente será informado dos nomes exatos dos arquivos listados abaixo, permitindo que ele recupere o contexto relevante desses arquivos.",
|
||||
"com_agents_instructions_placeholder": "As instruções do sistema que o agente usa",
|
||||
@@ -811,12 +814,14 @@
|
||||
"com_ui_upload_code_files": "Carregar para o interpretador de código",
|
||||
"com_ui_upload_delay": "O upload de \"{{0}}\" está demorando mais do que o esperado. Por favor, aguarde enquanto o arquivo termina de ser indexado para recuperação.",
|
||||
"com_ui_upload_error": "Houve um erro ao carregar seu arquivo",
|
||||
"com_ui_upload_file_context": "Contexto de upload de arquivo",
|
||||
"com_ui_upload_file_search": "Upload para pesquisa de arquivos",
|
||||
"com_ui_upload_files": "Carregar arquivos",
|
||||
"com_ui_upload_image": "Carregar uma imagem",
|
||||
"com_ui_upload_image_input": "Upload de imagem",
|
||||
"com_ui_upload_invalid": "Arquivo inválido para upload. Deve ser uma imagem não excedendo o limite",
|
||||
"com_ui_upload_invalid_var": "Arquivo inválido para upload. Deve ser uma imagem não excedendo {{0}} MB",
|
||||
"com_ui_upload_ocr_text": "Carregar como texto",
|
||||
"com_ui_upload_success": "Arquivo carregado com sucesso",
|
||||
"com_ui_upload_type": "Selecione o tipo de upload",
|
||||
"com_ui_use_2fa_code": "Use o código 2FA em vez disso",
|
||||
|
||||
@@ -361,4 +361,14 @@ div[role="tabpanel"][data-state="active"][data-orientation="horizontal"][aria-la
|
||||
|
||||
.cm-content:focus {
|
||||
outline: none !important;
|
||||
}
|
||||
|
||||
p.whitespace-pre-wrap a, li a {
|
||||
color: #0066cc;
|
||||
text-decoration: underline;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.dark p.whitespace-pre-wrap a, .dark li a {
|
||||
color: #52a0ff;
|
||||
}
|
||||
@@ -58,6 +58,9 @@ export const processAgentOption = ({
|
||||
label: _agent?.name ?? '',
|
||||
value: _agent?.id ?? '',
|
||||
icon: isGlobal ? <EarthIcon className="icon-md text-green-400" /> : null,
|
||||
context_files: _agent?.tool_resources?.ocr?.file_ids
|
||||
? ([] as Array<[string, ExtendedFile]>)
|
||||
: undefined,
|
||||
knowledge_files: _agent?.tool_resources?.file_search?.file_ids
|
||||
? ([] as Array<[string, ExtendedFile]>)
|
||||
: undefined,
|
||||
@@ -83,7 +86,7 @@ export const processAgentOption = ({
|
||||
const source =
|
||||
tool_resource === EToolResources.file_search
|
||||
? FileSources.vectordb
|
||||
: file?.source ?? FileSources.local;
|
||||
: (file?.source ?? FileSources.local);
|
||||
|
||||
if (file) {
|
||||
list?.push([
|
||||
@@ -97,6 +100,7 @@ export const processAgentOption = ({
|
||||
height: file.height,
|
||||
size: file.bytes,
|
||||
preview: file.filepath,
|
||||
metadata: file.metadata,
|
||||
progress: 1,
|
||||
source,
|
||||
},
|
||||
@@ -117,6 +121,16 @@ export const processAgentOption = ({
|
||||
}
|
||||
};
|
||||
|
||||
if (agent.context_files && _agent?.tool_resources?.ocr?.file_ids) {
|
||||
_agent.tool_resources.ocr.file_ids.forEach((file_id) =>
|
||||
handleFile({
|
||||
file_id,
|
||||
list: agent.context_files,
|
||||
tool_resource: EToolResources.ocr,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (agent.knowledge_files && _agent?.tool_resources?.file_search?.file_ids) {
|
||||
_agent.tool_resources.file_search.file_ids.forEach((file_id) =>
|
||||
handleFile({
|
||||
|
||||
69
config/assign-group.js
Normal file
69
config/assign-group.js
Normal file
@@ -0,0 +1,69 @@
|
||||
const path = require('path');
|
||||
require('module-alias')({ base: path.resolve(__dirname, '..', 'api') });
|
||||
const { askQuestion, silentExit } = require('./helpers');
|
||||
const User = require('~/models/User');
|
||||
const Group = require('~/models/Group');
|
||||
const connect = require('./connect');
|
||||
|
||||
(async () => {
|
||||
await connect();
|
||||
|
||||
console.purple('---------------------------------------');
|
||||
console.purple('Assign a Group to a User');
|
||||
console.purple('---------------------------------------');
|
||||
|
||||
// Read arguments from CLI or prompt the user
|
||||
const userEmail = process.argv[2] || (await askQuestion('User email: '));
|
||||
const groupName = process.argv[3] || (await askQuestion('Group name to assign: '));
|
||||
|
||||
// Validate email format
|
||||
if (!userEmail.includes('@')) {
|
||||
console.red('Error: Invalid email address!');
|
||||
silentExit(1);
|
||||
}
|
||||
|
||||
// Find the group by name
|
||||
const group = await Group.findOne({ name: groupName });
|
||||
if (!group) {
|
||||
console.red('Error: No group with that name was found!');
|
||||
silentExit(1);
|
||||
}
|
||||
|
||||
// Find the user by email
|
||||
const user = await User.findOne({ email: userEmail });
|
||||
if (!user) {
|
||||
console.red('Error: No user with that email was found!');
|
||||
silentExit(1);
|
||||
}
|
||||
console.purple(`Found user: ${user.email}`);
|
||||
|
||||
// Assign the group to the user if not already assigned
|
||||
try {
|
||||
if (!Array.isArray(user.groups)) {
|
||||
user.groups = [];
|
||||
}
|
||||
|
||||
// Convert both user group IDs and the target group ID to strings for comparison
|
||||
const groupIdStr = group._id.toString();
|
||||
const userGroupIds = user.groups.map(id => id.toString());
|
||||
|
||||
if (!userGroupIds.includes(groupIdStr)) {
|
||||
user.groups.push(group._id);
|
||||
await user.save();
|
||||
console.green(`User ${user.email} successfully assigned to group ${group.name}!`);
|
||||
} else {
|
||||
console.yellow(`User ${user.email} is already assigned to group ${group.name}.`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.red('Error assigning group to user: ' + error.message);
|
||||
silentExit(1);
|
||||
}
|
||||
|
||||
silentExit(0);
|
||||
})();
|
||||
|
||||
process.on('uncaughtException', (err) => {
|
||||
console.error('There was an uncaught error:');
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
62
config/create-group.js
Normal file
62
config/create-group.js
Normal file
@@ -0,0 +1,62 @@
|
||||
const path = require('path');
|
||||
require('module-alias')({ base: path.resolve(__dirname, '..', 'api') });
|
||||
const { askQuestion, silentExit } = require('./helpers');
|
||||
const Group = require('~/models/Group');
|
||||
const connect = require('./connect');
|
||||
|
||||
(async () => {
|
||||
await connect();
|
||||
|
||||
console.purple('---------------------------------------');
|
||||
console.purple('Create a New Group');
|
||||
console.purple('---------------------------------------');
|
||||
|
||||
// Prompt for basic group info.
|
||||
const groupName = process.argv[2] || (await askQuestion('Group name: '));
|
||||
const groupDescription =
|
||||
process.argv[3] || (await askQuestion('Group description (optional): '));
|
||||
|
||||
// Ask for the group type (local or openid; defaults to local)
|
||||
let groupType =
|
||||
process.argv[4] ||
|
||||
(await askQuestion('Group type (local/openid, default is local): '));
|
||||
groupType = groupType.trim().toLowerCase() || 'local';
|
||||
|
||||
let groupData;
|
||||
if (groupType === 'openid') {
|
||||
// For OpenID groups, prompt for an external ID.
|
||||
const externalId =
|
||||
process.argv[5] ||
|
||||
(await askQuestion('External ID for OpenID group: '));
|
||||
groupData = {
|
||||
name: groupName,
|
||||
description: groupDescription,
|
||||
provider: 'openid',
|
||||
externalId: externalId.trim(),
|
||||
};
|
||||
} else {
|
||||
// For local groups, we only need name and description.
|
||||
groupData = {
|
||||
name: groupName,
|
||||
description: groupDescription,
|
||||
provider: 'local',
|
||||
};
|
||||
}
|
||||
|
||||
// Create the group document
|
||||
let group;
|
||||
try {
|
||||
group = await Group.create(groupData);
|
||||
} catch (error) {
|
||||
console.red('Error creating group: ' + error.message);
|
||||
silentExit(1);
|
||||
}
|
||||
console.green(`Group created successfully with id: ${group._id}`);
|
||||
silentExit(0);
|
||||
})();
|
||||
|
||||
process.on('uncaughtException', (err) => {
|
||||
console.error('There was an uncaught error:');
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -2,7 +2,7 @@ const path = require('path');
|
||||
const { v5: uuidv5 } = require('uuid');
|
||||
require('module-alias')({ base: path.resolve(__dirname, '..', 'api') });
|
||||
const { askQuestion, askMultiLineQuestion, silentExit } = require('./helpers');
|
||||
const Banner = require('~/models/schema/banner');
|
||||
const { Banner } = require('~/models/Banner');
|
||||
const connect = require('./connect');
|
||||
|
||||
(async () => {
|
||||
|
||||
@@ -7,6 +7,9 @@ version: 1.2.1
|
||||
# Cache settings: Set to true to enable caching
|
||||
cache: true
|
||||
|
||||
# File strategy s3/firebase
|
||||
# fileStrategy: "s3"
|
||||
|
||||
# Custom interface configuration
|
||||
interface:
|
||||
customWelcome: "Welcome to LibreChat! Enjoy your experience."
|
||||
@@ -101,6 +104,31 @@ registration:
|
||||
# userMax: 50
|
||||
# userWindowInMinutes: 60 # Rate limit window for conversation imports per user
|
||||
|
||||
|
||||
# Example Model Specifications
|
||||
#modelSpecs:
|
||||
# enforce: true
|
||||
# prioritize: true
|
||||
# list:
|
||||
# - name: "4o-mini"
|
||||
# label: "4o-mini"
|
||||
# groups:
|
||||
# - "67b9a5c64165f31925e9b25a"
|
||||
# - "67b9a5c64165f31925e9b25f"
|
||||
# description: "The most advanced frontier model from Azure OpenAI, suitable to solve complex multi-step problems."
|
||||
# iconURL: "https://www.librechat.ai/librechat.png"
|
||||
# badges:
|
||||
# - text: "Test"
|
||||
# color: "#FF0000"
|
||||
# - text: "Beta"
|
||||
# color: "#00FF00"
|
||||
# preset:
|
||||
# default: true
|
||||
# endpoint: "azureOpenAI"
|
||||
# model: "gpt-4o-mini"
|
||||
# modelLabel: "4o-mini"
|
||||
|
||||
|
||||
# Example Actions Object Structure
|
||||
actions:
|
||||
allowedDomains:
|
||||
@@ -109,32 +137,32 @@ actions:
|
||||
- "google.com"
|
||||
|
||||
# Example MCP Servers Object Structure
|
||||
mcpServers:
|
||||
everything:
|
||||
# type: sse # type can optionally be omitted
|
||||
url: http://localhost:3001/sse
|
||||
timeout: 60000 # 1 minute timeout for this server, this is the default timeout for MCP servers.
|
||||
puppeteer:
|
||||
type: stdio
|
||||
command: npx
|
||||
args:
|
||||
- -y
|
||||
- "@modelcontextprotocol/server-puppeteer"
|
||||
timeout: 300000 # 5 minutes timeout for this server
|
||||
filesystem:
|
||||
# type: stdio
|
||||
command: npx
|
||||
args:
|
||||
- -y
|
||||
- "@modelcontextprotocol/server-filesystem"
|
||||
- /home/user/LibreChat/
|
||||
iconPath: /home/user/LibreChat/client/public/assets/logo.svg
|
||||
mcp-obsidian:
|
||||
command: npx
|
||||
args:
|
||||
- -y
|
||||
- "mcp-obsidian"
|
||||
- /path/to/obsidian/vault
|
||||
# mcpServers:
|
||||
# everything:
|
||||
# # type: sse # type can optionally be omitted
|
||||
# url: http://localhost:3001/sse
|
||||
# timeout: 60000 # 1 minute timeout for this server, this is the default timeout for MCP servers.
|
||||
# puppeteer:
|
||||
# type: stdio
|
||||
# command: npx
|
||||
# args:
|
||||
# - -y
|
||||
# - "@modelcontextprotocol/server-puppeteer"
|
||||
# timeout: 300000 # 5 minutes timeout for this server
|
||||
# filesystem:
|
||||
# # type: stdio
|
||||
# command: npx
|
||||
# args:
|
||||
# - -y
|
||||
# - "@modelcontextprotocol/server-filesystem"
|
||||
# - /home/user/LibreChat/
|
||||
# iconPath: /home/user/LibreChat/client/public/assets/logo.svg
|
||||
# mcp-obsidian:
|
||||
# command: npx
|
||||
# args:
|
||||
# - -y
|
||||
# - "mcp-obsidian"
|
||||
# - /path/to/obsidian/vault
|
||||
|
||||
# Definition of custom endpoints
|
||||
endpoints:
|
||||
@@ -152,8 +180,10 @@ endpoints:
|
||||
# # (optional) Assistant Capabilities available to all users. Omit the ones you wish to exclude. Defaults to list below.
|
||||
# capabilities: ["code_interpreter", "retrieval", "actions", "tools", "image_vision"]
|
||||
# agents:
|
||||
# (optional) Maximum recursion depth for agents, defaults to 25
|
||||
# (optional) Default recursion depth for agents, defaults to 25
|
||||
# recursionLimit: 50
|
||||
# (optional) Max recursion depth for agents, defaults to 25
|
||||
# maxRecursionLimit: 100
|
||||
# (optional) Disable the builder interface for agents
|
||||
# disableBuilder: false
|
||||
# (optional) Agent Capabilities available to all users. Omit the ones you wish to exclude. Defaults to list below.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user