Compare commits

..

5 Commits

238 changed files with 2766 additions and 5777 deletions

View File

@@ -119,7 +119,6 @@ BINGAI_TOKEN=user_provided
# BEDROCK_AWS_DEFAULT_REGION=us-east-1 # A default region must be provided
# BEDROCK_AWS_ACCESS_KEY_ID=someAccessKey
# BEDROCK_AWS_SECRET_ACCESS_KEY=someSecretAccessKey
# BEDROCK_AWS_SESSION_TOKEN=someSessionToken
# Note: This example list is not meant to be exhaustive. If omitted, all known, supported model IDs will be included for you.
# BEDROCK_AWS_MODELS=anthropic.claude-3-5-sonnet-20240620-v1:0,meta.llama3-1-8b-instruct-v1:0
@@ -141,7 +140,7 @@ GOOGLE_KEY=user_provided
# GOOGLE_REVERSE_PROXY=
# Gemini API (AI Studio)
# GOOGLE_MODELS=gemini-exp-1121,gemini-exp-1114,gemini-1.5-flash-latest,gemini-1.0-pro,gemini-1.0-pro-001,gemini-1.0-pro-latest,gemini-1.0-pro-vision-latest,gemini-1.5-pro-latest,gemini-pro,gemini-pro-vision
# GOOGLE_MODELS=gemini-1.5-flash-latest,gemini-1.0-pro,gemini-1.0-pro-001,gemini-1.0-pro-latest,gemini-1.0-pro-vision-latest,gemini-1.5-pro-latest,gemini-pro,gemini-pro-vision
# Vertex AI
# GOOGLE_MODELS=gemini-1.5-flash-preview-0514,gemini-1.5-pro-preview-0514,gemini-1.0-pro-vision-001,gemini-1.0-pro-002,gemini-1.0-pro-001,gemini-pro-vision,gemini-1.0-pro
@@ -178,10 +177,10 @@ OPENAI_API_KEY=user_provided
DEBUG_OPENAI=false
# TITLE_CONVO=false
# OPENAI_TITLE_MODEL=gpt-4o-mini
# OPENAI_TITLE_MODEL=gpt-3.5-turbo
# OPENAI_SUMMARIZE=true
# OPENAI_SUMMARY_MODEL=gpt-4o-mini
# OPENAI_SUMMARY_MODEL=gpt-3.5-turbo
# OPENAI_FORCE_PROMPT=true

View File

@@ -50,8 +50,6 @@ class BaseClient {
/** The key for the usage object's output tokens
* @type {string} */
this.outputTokensKey = 'completion_tokens';
/** @type {Set<string>} */
this.savedMessageIds = new Set();
}
setOptions() {
@@ -86,7 +84,7 @@ class BaseClient {
return this.options.agent.id;
}
return this.modelOptions?.model ?? this.model;
return this.modelOptions.model;
}
/**
@@ -510,7 +508,7 @@ class BaseClient {
conversationId,
parentMessageId: userMessage.messageId,
isCreatedByUser: false,
model: this.modelOptions?.model ?? this.model,
model: this.modelOptions.model,
sender: this.sender,
text: generation,
};
@@ -547,7 +545,6 @@ class BaseClient {
if (!isEdited && !this.skipSaveUserMessage) {
this.userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user);
this.savedMessageIds.add(userMessage.messageId);
if (typeof opts?.getReqData === 'function') {
opts.getReqData({
userMessagePromise: this.userMessagePromise,
@@ -566,8 +563,8 @@ class BaseClient {
user: this.user,
tokenType: 'prompt',
amount: promptTokens,
model: this.modelOptions.model,
endpoint: this.options.endpoint,
model: this.modelOptions?.model ?? this.model,
endpointTokenConfig: this.options.endpointTokenConfig,
},
});
@@ -577,7 +574,6 @@ class BaseClient {
const completion = await this.sendCompletion(payload, opts);
this.abortController.requestCompleted = true;
/** @type {TMessage} */
const responseMessage = {
messageId: responseMessageId,
conversationId,
@@ -639,16 +635,7 @@ class BaseClient {
responseMessage.attachments = (await Promise.all(this.artifactPromises)).filter((a) => a);
}
if (this.options.attachments) {
try {
saveOptions.files = this.options.attachments.map((attachments) => attachments.file_id);
} catch (error) {
logger.error('[BaseClient] Error mapping attachments for conversation', error);
}
}
this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
this.savedMessageIds.add(responseMessage.messageId);
const messageCache = getLogStores(CacheKeys.MESSAGES);
messageCache.set(
responseMessageId,
@@ -915,9 +902,8 @@ class BaseClient {
// Note: gpt-3.5-turbo and gpt-4 may update over time. Use default for these as well as for unknown models
let tokensPerMessage = 3;
let tokensPerName = 1;
const model = this.modelOptions?.model ?? this.model;
if (model === 'gpt-3.5-turbo-0301') {
if (this.modelOptions.model === 'gpt-3.5-turbo-0301') {
tokensPerMessage = 4;
tokensPerName = -1;
}
@@ -975,15 +961,6 @@ class BaseClient {
return _messages;
}
const seen = new Set();
const attachmentsProcessed =
this.options.attachments && !(this.options.attachments instanceof Promise);
if (attachmentsProcessed) {
for (const attachment of this.options.attachments) {
seen.add(attachment.file_id);
}
}
/**
*
* @param {TMessage} message
@@ -994,19 +971,7 @@ class BaseClient {
this.message_file_map = {};
}
const fileIds = [];
for (const file of message.files) {
if (seen.has(file.file_id)) {
continue;
}
fileIds.push(file.file_id);
seen.add(file.file_id);
}
if (fileIds.length === 0) {
return message;
}
const fileIds = message.files.map((file) => file.file_id);
const files = await getFiles({
file_id: { $in: fileIds },
});

View File

@@ -227,16 +227,6 @@ class ChatGPTClient extends BaseClient {
this.azure = !serverless && azureOptions;
this.azureEndpoint =
!serverless && genAzureChatCompletion(this.azure, modelOptions.model, this);
if (serverless === true) {
this.options.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
this.options.headers['api-key'] = this.apiKey;
}
}
if (this.options.defaultQuery) {
opts.defaultQuery = this.options.defaultQuery;
}
if (this.options.headers) {

View File

@@ -35,7 +35,6 @@ const endpointPrefix = `https://${loc}-aiplatform.googleapis.com`;
const tokenizersCache = {};
const settings = endpointSettings[EModelEndpoint.google];
const EXCLUDED_GENAI_MODELS = /gemini-(?:1\.0|1-0|pro)/;
class GoogleClient extends BaseClient {
constructor(credentials, options = {}) {
@@ -367,7 +366,7 @@ class GoogleClient extends BaseClient {
);
}
if (!this.project_id && !EXCLUDED_GENAI_MODELS.test(this.modelOptions.model)) {
if (!this.project_id && this.modelOptions.model.includes('1.5')) {
return await this.buildGenerativeMessages(messages);
}
@@ -605,12 +604,15 @@ class GoogleClient extends BaseClient {
} else if (this.project_id) {
logger.debug('Creating VertexAI client');
return new ChatVertexAI(clientOptions);
} else if (!EXCLUDED_GENAI_MODELS.test(model)) {
} else if (model.includes('1.5')) {
logger.debug('Creating GenAI client');
return new GenAI(this.apiKey).getGenerativeModel({
...clientOptions,
model,
});
return new GenAI(this.apiKey).getGenerativeModel(
{
...clientOptions,
model,
},
{ apiVersion: 'v1beta' },
);
}
logger.debug('Creating Chat Google Generative AI client');
@@ -672,7 +674,7 @@ class GoogleClient extends BaseClient {
}
const modelName = clientOptions.modelName ?? clientOptions.model ?? '';
if (!EXCLUDED_GENAI_MODELS.test(modelName) && !this.project_id) {
if (modelName?.includes('1.5') && !this.project_id) {
const client = model;
const requestOptions = {
contents: _payload,
@@ -695,7 +697,7 @@ class GoogleClient extends BaseClient {
requestOptions.safetySettings = _payload.safetySettings;
const delay = modelName.includes('flash') ? 8 : 15;
const delay = modelName.includes('flash') ? 8 : 14;
const result = await client.generateContentStream(requestOptions);
for await (const chunk of result.stream) {
const chunkText = chunk.text();
@@ -710,6 +712,7 @@ class GoogleClient extends BaseClient {
const stream = await model.stream(messages, {
signal: abortController.signal,
timeout: 7000,
safetySettings: _payload.safetySettings,
});
@@ -717,7 +720,7 @@ class GoogleClient extends BaseClient {
if (!this.options.streamRate) {
if (this.isGenerativeModel) {
delay = 15;
delay = 12;
}
if (modelName.includes('flash')) {
delay = 5;
@@ -771,8 +774,8 @@ class GoogleClient extends BaseClient {
const messages = this.isTextModel ? _payload.trim() : _messages;
const modelName = clientOptions.modelName ?? clientOptions.model ?? '';
if (!EXCLUDED_GENAI_MODELS.test(modelName) && !this.project_id) {
logger.debug('Identified titling model as GenAI version');
if (modelName?.includes('1.5') && !this.project_id) {
logger.debug('Identified titling model as 1.5 version');
/** @type {GenerativeModel} */
const client = model;
const requestOptions = {

View File

@@ -688,7 +688,7 @@ class OpenAIClient extends BaseClient {
}
initializeLLM({
model = 'gpt-4o-mini',
model = 'gpt-3.5-turbo',
modelName,
temperature = 0.2,
presence_penalty = 0,
@@ -793,7 +793,7 @@ class OpenAIClient extends BaseClient {
const { OPENAI_TITLE_MODEL } = process.env ?? {};
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-4o-mini';
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
if (model === Constants.CURRENT_MODEL) {
model = this.modelOptions.model;
}
@@ -838,12 +838,6 @@ class OpenAIClient extends BaseClient {
this.options.dropParams = azureConfig.groupMap[groupName].dropParams;
this.options.forcePrompt = azureConfig.groupMap[groupName].forcePrompt;
this.azure = !serverless && azureOptions;
if (serverless === true) {
this.options.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
this.options.headers['api-key'] = this.apiKey;
}
}
const titleChatCompletion = async () => {
@@ -982,7 +976,7 @@ ${convo}
let prompt;
// TODO: remove the gpt fallback and make it specific to endpoint
const { OPENAI_SUMMARY_MODEL = 'gpt-4o-mini' } = process.env ?? {};
const { OPENAI_SUMMARY_MODEL = 'gpt-3.5-turbo' } = process.env ?? {};
let model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL;
if (model === Constants.CURRENT_MODEL) {
model = this.modelOptions.model;
@@ -1175,10 +1169,6 @@ ${convo}
opts.defaultHeaders = { ...opts.defaultHeaders, ...this.options.headers };
}
if (this.options.defaultQuery) {
opts.defaultQuery = this.options.defaultQuery;
}
if (this.options.proxy) {
opts.httpAgent = new HttpsProxyAgent(this.options.proxy);
}
@@ -1217,12 +1207,6 @@ ${convo}
this.azure = !serverless && azureOptions;
this.azureEndpoint =
!serverless && genAzureChatCompletion(this.azure, modelOptions.model, this);
if (serverless === true) {
this.options.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
this.options.headers['api-key'] = this.apiKey;
}
}
if (this.azure || this.options.azure) {
@@ -1324,11 +1308,6 @@ ${convo}
/** @type {(value: void | PromiseLike<void>) => void} */
let streamResolve;
if (this.isO1Model === true && this.azure && modelOptions.stream) {
delete modelOptions.stream;
delete modelOptions.stop;
}
if (modelOptions.stream) {
streamPromise = new Promise((resolve) => {
streamResolve = resolve;

View File

@@ -105,7 +105,7 @@ class PluginsClient extends OpenAIClient {
chatHistory: new ChatMessageHistory(pastMessages),
});
const { loadedTools } = await loadTools({
this.tools = await loadTools({
user,
model,
tools: this.options.tools,
@@ -119,15 +119,12 @@ class PluginsClient extends OpenAIClient {
processFileURL,
message,
},
useSpecs: true,
});
if (loadedTools.length === 0) {
if (this.tools.length === 0) {
return;
}
this.tools = loadedTools;
logger.debug('[PluginsClient] Requested Tools', this.options.tools);
logger.debug(
'[PluginsClient] Loaded Tools',

View File

@@ -17,7 +17,7 @@ const { isEnabled } = require('~/server/utils');
*
* @example
* const llm = createLLM({
* modelOptions: { modelName: 'gpt-4o-mini', temperature: 0.2 },
* modelOptions: { modelName: 'gpt-3.5-turbo', temperature: 0.2 },
* configOptions: { basePath: 'https://example.api/path' },
* callbacks: { onMessage: handleMessage },
* openAIApiKey: 'your-api-key'

View File

@@ -3,7 +3,7 @@ const { ChatOpenAI } = require('@langchain/openai');
const { getBufferString, ConversationSummaryBufferMemory } = require('langchain/memory');
const chatPromptMemory = new ConversationSummaryBufferMemory({
llm: new ChatOpenAI({ modelName: 'gpt-4o-mini', temperature: 0 }),
llm: new ChatOpenAI({ modelName: 'gpt-3.5-turbo', temperature: 0 }),
maxTokenLimit: 10,
returnMessages: true,
});

View File

@@ -204,7 +204,7 @@ const formatAgentMessages = (payload) => {
new ToolMessage({
tool_call_id: tool_call.id,
name: tool_call.name,
content: output || '',
content: output,
}),
);
} else {

View File

@@ -61,7 +61,7 @@ describe('BaseClient', () => {
const options = {
// debug: true,
modelOptions: {
model: 'gpt-4o-mini',
model: 'gpt-3.5-turbo',
temperature: 0,
},
};

View File

@@ -221,7 +221,7 @@ describe('OpenAIClient', () => {
it('should set isChatCompletion based on useOpenRouter, reverseProxyUrl, or model', () => {
client.setOptions({ reverseProxyUrl: null });
// true by default since default model will be gpt-4o-mini
// true by default since default model will be gpt-3.5-turbo
expect(client.isChatCompletion).toBe(true);
client.isChatCompletion = undefined;
@@ -230,7 +230,7 @@ describe('OpenAIClient', () => {
expect(client.isChatCompletion).toBe(false);
client.isChatCompletion = undefined;
client.setOptions({ modelOptions: { model: 'gpt-4o-mini' }, reverseProxyUrl: null });
client.setOptions({ modelOptions: { model: 'gpt-3.5-turbo' }, reverseProxyUrl: null });
expect(client.isChatCompletion).toBe(true);
});

View File

@@ -19,8 +19,6 @@ class DALLE3 extends Tool {
this.userId = fields.userId;
this.fileStrategy = fields.fileStrategy;
/** @type {boolean} */
this.isAgent = fields.isAgent;
if (fields.processFileURL) {
/** @type {processFileURL} Necessary for output to contain all image metadata. */
this.processFileURL = fields.processFileURL.bind(this);
@@ -110,19 +108,6 @@ class DALLE3 extends Tool {
return `![generated image](${imageUrl})`;
}
returnValue(value) {
if (this.isAgent === true && typeof value === 'string') {
return [value, {}];
} else if (this.isAgent === true && typeof value === 'object') {
return [
'DALL-E displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.',
value,
];
}
return value;
}
async _call(data) {
const { prompt, quality = 'standard', size = '1024x1024', style = 'vivid' } = data;
if (!prompt) {
@@ -141,23 +126,18 @@ class DALLE3 extends Tool {
});
} catch (error) {
logger.error('[DALL-E-3] Problem generating the image:', error);
return this
.returnValue(`Something went wrong when trying to generate the image. The DALL-E API may be unavailable:
Error Message: ${error.message}`);
return `Something went wrong when trying to generate the image. The DALL-E API may be unavailable:
Error Message: ${error.message}`;
}
if (!resp) {
return this.returnValue(
'Something went wrong when trying to generate the image. The DALL-E API may be unavailable',
);
return 'Something went wrong when trying to generate the image. The DALL-E API may be unavailable';
}
const theImageUrl = resp.data[0].url;
if (!theImageUrl) {
return this.returnValue(
'No image URL returned from OpenAI API. There may be a problem with the API or your configuration.',
);
return 'No image URL returned from OpenAI API. There may be a problem with the API or your configuration.';
}
const imageBasename = getImageBasename(theImageUrl);
@@ -177,11 +157,11 @@ Error Message: ${error.message}`);
try {
const result = await this.processFileURL({
URL: theImageUrl,
basePath: 'images',
userId: this.userId,
fileName: imageName,
fileStrategy: this.fileStrategy,
userId: this.userId,
URL: theImageUrl,
fileName: imageName,
basePath: 'images',
context: FileContext.image_generation,
});
@@ -195,7 +175,7 @@ Error Message: ${error.message}`);
this.result = `Failed to save the image locally. ${error.message}`;
}
return this.returnValue(this.result);
return this.result;
}
}

View File

@@ -10,50 +10,20 @@ const { logger } = require('~/config');
* @param {Object} options
* @param {ServerRequest} options.req
* @param {Agent['tool_resources']} options.tool_resources
* @returns {Promise<{
* files: Array<{ file_id: string; filename: string }>,
* toolContext: string
* }>}
*/
const primeFiles = async (options) => {
const { tool_resources } = options;
const file_ids = tool_resources?.[EToolResources.file_search]?.file_ids ?? [];
const agentResourceIds = new Set(file_ids);
const resourceFiles = tool_resources?.[EToolResources.file_search]?.files ?? [];
const dbFiles = ((await getFiles({ file_id: { $in: file_ids } })) ?? []).concat(resourceFiles);
let toolContext = `- Note: Semantic search is available through the ${Tools.file_search} tool but no files are currently loaded. Request the user to upload documents to search through.`;
const files = [];
for (let i = 0; i < dbFiles.length; i++) {
const file = dbFiles[i];
if (!file) {
continue;
}
if (i === 0) {
toolContext = `- Note: Use the ${Tools.file_search} tool to find relevant information within:`;
}
toolContext += `\n\t- ${file.filename}${
agentResourceIds.has(file.file_id) ? '' : ' (just attached by user)'
}`;
files.push({
file_id: file.file_id,
filename: file.filename,
});
}
return { files, toolContext };
};
/**
*
* @param {Object} options
* @param {ServerRequest} options.req
* @param {Array<{ file_id: string; filename: string }>} options.files
* @returns
*/
const createFileSearchTool = async ({ req, files }) => {
return tool(
const createFileSearchTool = async (options) => {
const { req, tool_resources } = options;
const file_ids = tool_resources?.[EToolResources.file_search]?.file_ids ?? [];
const files = (await getFiles({ file_id: { $in: file_ids } })).map((file) => ({
file_id: file.file_id,
filename: file.filename,
}));
const fileList = files.map((file) => `- ${file.filename}`).join('\n');
const toolDescription = `Performs a semantic search based on a natural language query across the following files:\n${fileList}`;
const FileSearch = tool(
async ({ query }) => {
if (files.length === 0) {
return 'No files to search. Instruct the user to add files for the search.';
@@ -117,7 +87,7 @@ const createFileSearchTool = async ({ req, files }) => {
},
{
name: Tools.file_search,
description: `Performs semantic search across attached "${Tools.file_search}" documents using natural language queries. This tool analyzes the content of uploaded files to find relevant information, quotes, and passages that best match your query. Use this to extract specific information or find relevant sections within the available documents.`,
description: toolDescription,
schema: z.object({
query: z
.string()
@@ -127,6 +97,8 @@ const createFileSearchTool = async ({ req, files }) => {
}),
},
);
return FileSearch;
};
module.exports = { createFileSearchTool, primeFiles };
module.exports = createFileSearchTool;

View File

@@ -15,8 +15,8 @@ const {
StructuredWolfram,
TavilySearchResults,
} = require('../');
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
const { primeFiles } = require('~/server/services/Files/Code/process');
const createFileSearchTool = require('./createFileSearchTool');
const { loadSpecs } = require('./loadSpecs');
const { logger } = require('~/config');
@@ -83,7 +83,7 @@ const validateTools = async (user, tools = []) => {
}
};
const loadAuthValues = async ({ userId, authFields, throwError = true }) => {
const loadAuthValues = async ({ userId, authFields }) => {
let authValues = {};
/**
@@ -98,7 +98,7 @@ const loadAuthValues = async ({ userId, authFields, throwError = true }) => {
return { authField: field, authValue: value };
}
try {
value = await getUserPluginAuthValue(userId, field, throwError);
value = await getUserPluginAuthValue(userId, field);
} catch (err) {
if (field === fields[fields.length - 1] && !value) {
throw err;
@@ -122,18 +122,15 @@ const loadAuthValues = async ({ userId, authFields, throwError = true }) => {
return authValues;
};
/** @typedef {typeof import('@langchain/core/tools').Tool} ToolConstructor */
/** @typedef {import('@langchain/core/tools').Tool} Tool */
/**
* Initializes a tool with authentication values for the given user, supporting alternate authentication fields.
* Authentication fields can have alternates separated by "||", and the first defined variable will be used.
*
* @param {string} userId The user ID for which the tool is being loaded.
* @param {Array<string>} authFields Array of strings representing the authentication fields. Supports alternate fields delimited by "||".
* @param {ToolConstructor} ToolConstructor The constructor function for the tool to be initialized.
* @param {typeof import('langchain/tools').Tool} ToolConstructor The constructor function for the tool to be initialized.
* @param {Object} options Optional parameters to be passed to the tool constructor alongside authentication values.
* @returns {() => Promise<Tool>} An Async function that, when called, asynchronously initializes and returns an instance of the tool with authentication.
* @returns {Function} An Async function that, when called, asynchronously initializes and returns an instance of the tool with authentication.
*/
const loadToolWithAuth = (userId, authFields, ToolConstructor, options = {}) => {
return async function () {
@@ -145,12 +142,11 @@ const loadToolWithAuth = (userId, authFields, ToolConstructor, options = {}) =>
const loadTools = async ({
user,
model,
isAgent,
useSpecs,
tools = [],
options = {},
functions = true,
returnMap = false,
tools = [],
options = {},
skipSpecs = false,
}) => {
const toolConstructors = {
calculator: Calculator,
@@ -178,12 +174,11 @@ const loadTools = async ({
const requestedTools = {};
if (functions === true) {
if (functions) {
toolConstructors.dalle = DALLE3;
}
const imageGenOptions = {
isAgent,
req: options.req,
fileStrategy: options.fileStrategy,
processFileURL: options.processFileURL,
@@ -194,6 +189,7 @@ const loadTools = async ({
const toolOptions = {
serpapi: { location: 'Austin,Texas,United States', hl: 'en', gl: 'us' },
dalle: imageGenOptions,
'dall-e': imageGenOptions,
'stable-diffusion': imageGenOptions,
};
@@ -207,38 +203,24 @@ const loadTools = async ({
toolAuthFields[tool.pluginKey] = tool.authConfig.map((auth) => auth.authField);
});
const toolContextMap = {};
const remainingTools = [];
for (const tool of tools) {
if (tool === Tools.execute_code) {
requestedTools[tool] = async () => {
const authValues = await loadAuthValues({
userId: user,
authFields: [EnvVar.CODE_API_KEY],
});
const codeApiKey = authValues[EnvVar.CODE_API_KEY];
const { files, toolContext } = await primeCodeFiles(options, codeApiKey);
if (toolContext) {
toolContextMap[tool] = toolContext;
}
const CodeExecutionTool = createCodeExecutionTool({
const authValues = await loadAuthValues({
userId: user,
authFields: [EnvVar.CODE_API_KEY],
});
const files = await primeFiles(options, authValues[EnvVar.CODE_API_KEY]);
requestedTools[tool] = () =>
createCodeExecutionTool({
user_id: user,
files,
...authValues,
});
CodeExecutionTool.apiKey = codeApiKey;
return CodeExecutionTool;
};
continue;
} else if (tool === Tools.file_search) {
requestedTools[tool] = async () => {
const { files, toolContext } = await primeSearchFiles(options);
if (toolContext) {
toolContextMap[tool] = toolContext;
}
return createFileSearchTool({ req: options.req, files });
};
requestedTools[tool] = () => createFileSearchTool(options);
continue;
}
@@ -259,13 +241,13 @@ const loadTools = async ({
continue;
}
if (functions === true) {
if (functions) {
remainingTools.push(tool);
}
}
let specs = null;
if (useSpecs === true && functions === true && remainingTools.length > 0) {
if (functions && remainingTools.length > 0 && skipSpecs !== true) {
specs = await loadSpecs({
llm: model,
user,
@@ -288,21 +270,23 @@ const loadTools = async ({
return requestedTools;
}
const toolPromises = [];
// load tools
let result = [];
for (const tool of tools) {
const validTool = requestedTools[tool];
if (validTool) {
toolPromises.push(
validTool().catch((error) => {
logger.error(`Error loading tool ${tool}:`, error);
return null;
}),
);
if (!validTool) {
continue;
}
const plugin = await validTool();
if (Array.isArray(plugin)) {
result = [...result, ...plugin];
} else if (plugin) {
result.push(plugin);
}
}
const loadedTools = (await Promise.all(toolPromises)).flatMap((plugin) => plugin || []);
return { loadedTools, toolContextMap };
return result;
};
module.exports = {

View File

@@ -128,14 +128,12 @@ describe('Tool Handlers', () => {
);
beforeAll(async () => {
const toolMap = await loadTools({
toolFunctions = await loadTools({
user: fakeUser._id,
model: BaseLLM,
tools: sampleTools,
returnMap: true,
useSpecs: true,
});
toolFunctions = toolMap;
loadTool1 = toolFunctions[sampleTools[0]];
loadTool2 = toolFunctions[sampleTools[1]];
loadTool3 = toolFunctions[sampleTools[2]];
@@ -197,7 +195,6 @@ describe('Tool Handlers', () => {
expect(mockPluginService.getUserPluginAuthValue).toHaveBeenCalledWith(
'userId',
'DALLE3_API_KEY',
true,
);
});
@@ -227,7 +224,6 @@ describe('Tool Handlers', () => {
user: fakeUser._id,
model: BaseLLM,
returnMap: true,
useSpecs: true,
});
expect(toolFunctions).toEqual({});
});
@@ -239,7 +235,6 @@ describe('Tool Handlers', () => {
tools: ['stable-diffusion'],
functions: true,
returnMap: true,
useSpecs: true,
});
const structuredTool = await toolFunctions['stable-diffusion']();
expect(structuredTool).toBeInstanceOf(StructuredSD);

View File

@@ -70,7 +70,6 @@ const namespaces = {
[ViolationTypes.TTS_LIMIT]: createViolationInstance(ViolationTypes.TTS_LIMIT),
[ViolationTypes.STT_LIMIT]: createViolationInstance(ViolationTypes.STT_LIMIT),
[ViolationTypes.CONVO_ACCESS]: createViolationInstance(ViolationTypes.CONVO_ACCESS),
[ViolationTypes.TOOL_CALL_LIMIT]: createViolationInstance(ViolationTypes.TOOL_CALL_LIMIT),
[ViolationTypes.FILE_UPLOAD_LIMIT]: createViolationInstance(ViolationTypes.FILE_UPLOAD_LIMIT),
[ViolationTypes.VERIFY_EMAIL_LIMIT]: createViolationInstance(ViolationTypes.VERIFY_EMAIL_LIMIT),
[ViolationTypes.RESET_PASSWORD_LIMIT]: createViolationInstance(

View File

@@ -118,43 +118,36 @@ const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
};
/**
* Removes multiple resource files from an agent in a single update.
* Removes a resource file id from an agent.
* @param {object} params
* @param {ServerRequest} params.req
* @param {string} params.agent_id
* @param {Array<{tool_resource: string, file_id: string}>} params.files
* @param {string} params.tool_resource
* @param {string} params.file_id
* @returns {Promise<Agent>} The updated agent.
*/
const removeAgentResourceFiles = async ({ agent_id, files }) => {
const removeAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
const searchParameter = { id: agent_id };
const agent = await getAgent(searchParameter);
if (!agent) {
throw new Error('Agent not found for removing resource files');
throw new Error('Agent not found for removing resource file');
}
const tool_resources = { ...agent.tool_resources } || {};
const tool_resources = agent.tool_resources || {};
const filesByResource = files.reduce((acc, { tool_resource, file_id }) => {
if (!acc[tool_resource]) {
acc[tool_resource] = new Set();
if (tool_resources[tool_resource] && tool_resources[tool_resource].file_ids) {
tool_resources[tool_resource].file_ids = tool_resources[tool_resource].file_ids.filter(
(id) => id !== file_id,
);
if (tool_resources[tool_resource].file_ids.length === 0) {
delete tool_resources[tool_resource];
}
acc[tool_resource].add(file_id);
return acc;
}, {});
Object.entries(filesByResource).forEach(([resource, fileIds]) => {
if (tool_resources[resource] && tool_resources[resource].file_ids) {
tool_resources[resource].file_ids = tool_resources[resource].file_ids.filter(
(id) => !fileIds.has(id),
);
if (tool_resources[resource].file_ids.length === 0) {
delete tool_resources[resource];
}
}
});
}
const updateData = { tool_resources };
return await updateAgent(searchParameter, updateData);
};
@@ -288,5 +281,5 @@ module.exports = {
getListAgents,
updateAgentProjects,
addAgentResourceFile,
removeAgentResourceFiles,
removeAgentResourceFile,
};

View File

@@ -15,19 +15,6 @@ const searchConversation = async (conversationId) => {
throw new Error('Error searching conversation');
}
};
/**
* Searches for a conversation by conversationId and returns associated file ids.
* @param {string} conversationId - The conversation's ID.
* @returns {Promise<string[] | null>}
*/
const getConvoFiles = async (conversationId) => {
try {
return (await Conversation.findOne({ conversationId }, 'files').lean())?.files ?? [];
} catch (error) {
logger.error('[getConvoFiles] Error getting conversation files', error);
throw new Error('Error getting conversation files');
}
};
/**
* Retrieves a single conversation for a given user and conversation ID.
@@ -75,7 +62,6 @@ const deleteNullOrEmptyConversations = async () => {
module.exports = {
Conversation,
getConvoFiles,
searchConversation,
deleteNullOrEmptyConversations,
/**
@@ -96,7 +82,6 @@ module.exports = {
update.conversationId = newConversationId;
}
/** Note: the resulting Model object is necessary for Meilisearch operations */
const conversation = await Conversation.findOneAndUpdate(
{ conversationId, user: req.user.id },
update,

View File

@@ -265,26 +265,6 @@ async function getMessages(filter, select) {
}
}
/**
* Retrieves a single message from the database.
* @async
* @function getMessage
* @param {{ user: string, messageId: string }} params - The search parameters
* @returns {Promise<TMessage | null>} The message that matches the criteria or null if not found
* @throws {Error} If there is an error in retrieving the message
*/
async function getMessage({ user, messageId }) {
try {
return await Message.findOne({
user,
messageId,
}).lean();
} catch (err) {
logger.error('Error getting message:', err);
throw err;
}
}
/**
* Deletes messages from the database.
*
@@ -312,6 +292,5 @@ module.exports = {
updateMessage,
deleteMessagesSince,
getMessages,
getMessage,
deleteMessages,
};

View File

@@ -1,96 +0,0 @@
const ToolCall = require('./schema/toolCallSchema');
/**
* Create a new tool call
* @param {ToolCallData} toolCallData - The tool call data
* @returns {Promise<ToolCallData>} The created tool call document
*/
async function createToolCall(toolCallData) {
try {
return await ToolCall.create(toolCallData);
} catch (error) {
throw new Error(`Error creating tool call: ${error.message}`);
}
}
/**
* Get a tool call by ID
* @param {string} id - The tool call document ID
* @returns {Promise<ToolCallData|null>} The tool call document or null if not found
*/
async function getToolCallById(id) {
try {
return await ToolCall.findById(id).lean();
} catch (error) {
throw new Error(`Error fetching tool call: ${error.message}`);
}
}
/**
* Get tool calls by message ID and user
* @param {string} messageId - The message ID
* @param {string} userId - The user's ObjectId
* @returns {Promise<Array>} Array of tool call documents
*/
async function getToolCallsByMessage(messageId, userId) {
try {
return await ToolCall.find({ messageId, user: userId }).lean();
} catch (error) {
throw new Error(`Error fetching tool calls: ${error.message}`);
}
}
/**
* Get tool calls by conversation ID and user
* @param {string} conversationId - The conversation ID
* @param {string} userId - The user's ObjectId
* @returns {Promise<ToolCallData[]>} Array of tool call documents
*/
async function getToolCallsByConvo(conversationId, userId) {
try {
return await ToolCall.find({ conversationId, user: userId }).lean();
} catch (error) {
throw new Error(`Error fetching tool calls: ${error.message}`);
}
}
/**
* Update a tool call
* @param {string} id - The tool call document ID
* @param {Partial<ToolCallData>} updateData - The data to update
* @returns {Promise<ToolCallData|null>} The updated tool call document or null if not found
*/
async function updateToolCall(id, updateData) {
try {
return await ToolCall.findByIdAndUpdate(id, updateData, { new: true }).lean();
} catch (error) {
throw new Error(`Error updating tool call: ${error.message}`);
}
}
/**
* Delete a tool call
* @param {string} userId - The related user's ObjectId
* @param {string} [conversationId] - The tool call conversation ID
* @returns {Promise<{ ok?: number; n?: number; deletedCount?: number }>} The result of the delete operation
*/
async function deleteToolCalls(userId, conversationId) {
try {
const query = { user: userId };
if (conversationId) {
query.conversationId = conversationId;
}
return await ToolCall.deleteMany(query);
} catch (error) {
throw new Error(`Error deleting tool call: ${error.message}`);
}
}
module.exports = {
createToolCall,
updateToolCall,
deleteToolCalls,
getToolCallById,
getToolCallsByConvo,
getToolCallsByMessage,
};

View File

@@ -18,7 +18,6 @@ const {
updateFileUsage,
} = require('./File');
const {
getMessage,
getMessages,
saveMessage,
recordMessage,
@@ -52,7 +51,6 @@ module.exports = {
getFiles,
updateFileUsage,
getMessage,
getMessages,
saveMessage,
recordMessage,

View File

@@ -58,15 +58,6 @@ const agentSchema = mongoose.Schema(
type: String,
default: undefined,
},
hide_sequential_outputs: {
type: Boolean,
},
end_after_tools: {
type: Boolean,
},
agent_ids: {
type: [String],
},
isCollaborative: {
type: Boolean,
default: undefined,

View File

@@ -26,9 +26,6 @@ const convoSchema = mongoose.Schema(
type: mongoose.Schema.Types.Mixed,
},
...conversationPreset,
agent_id: {
type: String,
},
// for bingAI only
bingConversationId: {
type: String,
@@ -50,9 +47,6 @@ const convoSchema = mongoose.Schema(
default: [],
meiliIndex: true,
},
files: {
type: [String],
},
},
{ timestamps: true },
);

View File

@@ -93,10 +93,6 @@ const conversationPreset = {
imageDetail: {
type: String,
},
/* agents */
agent_id: {
type: String,
},
/* assistants */
assistant_id: {
type: String,

View File

@@ -1,54 +0,0 @@
const mongoose = require('mongoose');
/**
* @typedef {Object} ToolCallData
* @property {string} conversationId - The ID of the conversation
* @property {string} messageId - The ID of the message
* @property {string} toolId - The ID of the tool
* @property {string | ObjectId} user - The user's ObjectId
* @property {unknown} [result] - Optional result data
* @property {TAttachment[]} [attachments] - Optional attachments data
* @property {number} [blockIndex] - Optional code block index
* @property {number} [partIndex] - Optional part index
*/
/** @type {MongooseSchema<ToolCallData>} */
const toolCallSchema = mongoose.Schema(
{
conversationId: {
type: String,
required: true,
},
messageId: {
type: String,
required: true,
},
toolId: {
type: String,
required: true,
},
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
required: true,
},
result: {
type: mongoose.Schema.Types.Mixed,
},
attachments: {
type: mongoose.Schema.Types.Mixed,
},
blockIndex: {
type: Number,
},
partIndex: {
type: Number,
},
},
{ timestamps: true },
);
toolCallSchema.index({ messageId: 1, user: 1 });
toolCallSchema.index({ conversationId: 1, user: 1 });
module.exports = mongoose.model('ToolCall', toolCallSchema);

View File

@@ -30,9 +30,6 @@ const bedrockValues = {
'amazon.titan-text-lite-v1': { prompt: 0.15, completion: 0.2 },
'amazon.titan-text-express-v1': { prompt: 0.2, completion: 0.6 },
'amazon.titan-text-premier-v1:0': { prompt: 0.5, completion: 1.5 },
'amazon.nova-micro-v1:0': { prompt: 0.035, completion: 0.14 },
'amazon.nova-lite-v1:0': { prompt: 0.06, completion: 0.24 },
'amazon.nova-pro-v1:0': { prompt: 0.8, completion: 3.2 },
};
/**
@@ -59,8 +56,8 @@ const tokenValues = Object.assign(
'claude-3-sonnet': { prompt: 3, completion: 15 },
'claude-3-5-sonnet': { prompt: 3, completion: 15 },
'claude-3.5-sonnet': { prompt: 3, completion: 15 },
'claude-3-5-haiku': { prompt: 0.8, completion: 4 },
'claude-3.5-haiku': { prompt: 0.8, completion: 4 },
'claude-3-5-haiku': { prompt: 1, completion: 5 },
'claude-3.5-haiku': { prompt: 1, completion: 5 },
'claude-3-haiku': { prompt: 0.25, completion: 1.25 },
'claude-2.1': { prompt: 8, completion: 24 },
'claude-2': { prompt: 8, completion: 24 },
@@ -86,8 +83,8 @@ const tokenValues = Object.assign(
const cacheTokenValues = {
'claude-3.5-sonnet': { write: 3.75, read: 0.3 },
'claude-3-5-sonnet': { write: 3.75, read: 0.3 },
'claude-3.5-haiku': { write: 1, read: 0.08 },
'claude-3-5-haiku': { write: 1, read: 0.08 },
'claude-3.5-haiku': { write: 1.25, read: 0.1 },
'claude-3-5-haiku': { write: 1.25, read: 0.1 },
'claude-3-haiku': { write: 0.3, read: 0.03 },
};
@@ -211,11 +208,4 @@ const getCacheMultiplier = ({ valueKey, cacheType, model, endpoint, endpointToke
return cacheTokenValues[valueKey]?.[cacheType] ?? null;
};
module.exports = {
tokenValues,
getValueKey,
getMultiplier,
getCacheMultiplier,
defaultRate,
cacheTokenValues,
};
module.exports = { tokenValues, getValueKey, getMultiplier, getCacheMultiplier, defaultRate };

View File

@@ -4,7 +4,6 @@ const {
tokenValues,
getValueKey,
getMultiplier,
cacheTokenValues,
getCacheMultiplier,
} = require('./tx');
@@ -212,7 +211,6 @@ describe('getMultiplier', () => {
describe('AWS Bedrock Model Tests', () => {
const awsModels = [
'anthropic.claude-3-5-haiku-20241022-v1:0',
'anthropic.claude-3-haiku-20240307-v1:0',
'anthropic.claude-3-sonnet-20240229-v1:0',
'anthropic.claude-3-opus-20240229-v1:0',
@@ -239,9 +237,6 @@ describe('AWS Bedrock Model Tests', () => {
'ai21.j2-ultra-v1',
'amazon.titan-text-lite-v1',
'amazon.titan-text-express-v1',
'amazon.nova-micro-v1:0',
'amazon.nova-lite-v1:0',
'amazon.nova-pro-v1:0',
];
it('should return the correct prompt multipliers for all models', () => {
@@ -265,24 +260,12 @@ describe('AWS Bedrock Model Tests', () => {
describe('getCacheMultiplier', () => {
it('should return the correct cache multiplier for a given valueKey and cacheType', () => {
expect(getCacheMultiplier({ valueKey: 'claude-3-5-sonnet', cacheType: 'write' })).toBe(
cacheTokenValues['claude-3-5-sonnet'].write,
);
expect(getCacheMultiplier({ valueKey: 'claude-3-5-sonnet', cacheType: 'read' })).toBe(
cacheTokenValues['claude-3-5-sonnet'].read,
);
expect(getCacheMultiplier({ valueKey: 'claude-3-5-haiku', cacheType: 'write' })).toBe(
cacheTokenValues['claude-3-5-haiku'].write,
);
expect(getCacheMultiplier({ valueKey: 'claude-3-5-haiku', cacheType: 'read' })).toBe(
cacheTokenValues['claude-3-5-haiku'].read,
);
expect(getCacheMultiplier({ valueKey: 'claude-3-haiku', cacheType: 'write' })).toBe(
cacheTokenValues['claude-3-haiku'].write,
);
expect(getCacheMultiplier({ valueKey: 'claude-3-haiku', cacheType: 'read' })).toBe(
cacheTokenValues['claude-3-haiku'].read,
);
expect(getCacheMultiplier({ valueKey: 'claude-3-5-sonnet', cacheType: 'write' })).toBe(3.75);
expect(getCacheMultiplier({ valueKey: 'claude-3-5-sonnet', cacheType: 'read' })).toBe(0.3);
expect(getCacheMultiplier({ valueKey: 'claude-3-5-haiku', cacheType: 'write' })).toBe(1.25);
expect(getCacheMultiplier({ valueKey: 'claude-3-5-haiku', cacheType: 'read' })).toBe(0.1);
expect(getCacheMultiplier({ valueKey: 'claude-3-haiku', cacheType: 'write' })).toBe(0.3);
expect(getCacheMultiplier({ valueKey: 'claude-3-haiku', cacheType: 'read' })).toBe(0.03);
});
it('should return null if cacheType is provided but not found in cacheTokenValues', () => {

View File

@@ -34,17 +34,17 @@
},
"homepage": "https://librechat.ai",
"dependencies": {
"@anthropic-ai/sdk": "^0.32.1",
"@anthropic-ai/sdk": "^0.16.1",
"@azure/search-documents": "^12.0.0",
"@google/generative-ai": "^0.21.0",
"@keyv/mongo": "^2.1.8",
"@keyv/redis": "^2.8.1",
"@langchain/community": "^0.3.14",
"@langchain/core": "^0.3.18",
"@langchain/google-genai": "^0.1.4",
"@langchain/community": "^0.3.13",
"@langchain/core": "^0.3.17",
"@langchain/google-genai": "^0.1.3",
"@langchain/google-vertexai": "^0.1.2",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^1.8.5",
"@librechat/agents": "^1.7.7",
"axios": "^1.7.7",
"bcryptjs": "^2.4.3",
"cheerio": "^1.0.0-rc.12",
@@ -77,7 +77,7 @@
"meilisearch": "^0.38.0",
"mime": "^3.0.0",
"module-alias": "^2.2.3",
"mongoose": "^8.8.3",
"mongoose": "^7.3.3",
"multer": "^1.4.5-lts.1",
"nanoid": "^3.3.7",
"nodejs-gpt": "^1.37.4",

View File

@@ -127,7 +127,6 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
},
};
/** @type {TMessage} */
let response = await client.sendMessage(text, messageOptions);
response.endpoint = endpointOption.endpoint;
@@ -151,13 +150,11 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
});
res.end();
if (!client.savedMessageIds.has(response.messageId)) {
await saveMessage(
req,
{ ...response, user },
{ context: 'api/server/controllers/AskController.js - response end' },
);
}
await saveMessage(
req,
{ ...response, user },
{ context: 'api/server/controllers/AskController.js - response end' },
);
}
if (!client.skipSaveUserMessage) {

View File

@@ -14,7 +14,6 @@ const { updateUserPluginsService, deleteUserKey } = require('~/server/services/U
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
const { processDeleteRequest } = require('~/server/services/Files/process');
const { deleteAllSharedLinks } = require('~/models/Share');
const { deleteToolCalls } = require('~/models/ToolCall');
const { Transaction } = require('~/models/Transaction');
const { logger } = require('~/config');
@@ -124,7 +123,6 @@ const deleteUserController = async (req, res) => {
await deleteAllSharedLinks(user.id); // delete user shared links
await deleteUserFiles(req); // delete user files
await deleteFiles(null, user.id); // delete database files in case of orphaned files from previous steps
await deleteToolCalls(user.id); // delete user tool calls
/* TODO: queue job for cleaning actions and assistants of non-existant users */
logger.info(`User deleted account. Email: ${user.email} ID: ${user.id}`);
res.status(200).send({ message: 'User deleted' });

View File

@@ -1,4 +1,4 @@
const { Tools, StepTypes, imageGenTools } = require('librechat-data-provider');
const { Tools } = require('librechat-data-provider');
const {
EnvVar,
GraphEvents,
@@ -57,9 +57,6 @@ class ModelEndHandler {
}
const usage = data?.output?.usage_metadata;
if (metadata?.model) {
usage.model = metadata.model;
}
if (usage) {
this.collectedUsage.push(usage);
@@ -92,27 +89,9 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
* Handle ON_RUN_STEP event.
* @param {string} event - The event name.
* @param {StreamEventData} data - The event data.
* @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata.
*/
handle: (event, data, metadata) => {
if (data?.stepDetails.type === StepTypes.TOOL_CALLS) {
sendEvent(res, { event, data });
} else if (metadata?.last_agent_index === metadata?.agent_index) {
sendEvent(res, { event, data });
} else if (!metadata?.hide_sequential_outputs) {
sendEvent(res, { event, data });
} else {
const agentName = metadata?.name ?? 'Agent';
const isToolCall = data?.stepDetails.type === StepTypes.TOOL_CALLS;
const action = isToolCall ? 'performing a task...' : 'thinking...';
sendEvent(res, {
event: 'on_agent_update',
data: {
runId: metadata?.run_id,
message: `${agentName} is ${action}`,
},
});
}
handle: (event, data) => {
sendEvent(res, { event, data });
aggregateContent({ event, data });
},
},
@@ -121,16 +100,9 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
* Handle ON_RUN_STEP_DELTA event.
* @param {string} event - The event name.
* @param {StreamEventData} data - The event data.
* @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata.
*/
handle: (event, data, metadata) => {
if (data?.delta.type === StepTypes.TOOL_CALLS) {
sendEvent(res, { event, data });
} else if (metadata?.last_agent_index === metadata?.agent_index) {
sendEvent(res, { event, data });
} else if (!metadata?.hide_sequential_outputs) {
sendEvent(res, { event, data });
}
handle: (event, data) => {
sendEvent(res, { event, data });
aggregateContent({ event, data });
},
},
@@ -139,16 +111,9 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
* Handle ON_RUN_STEP_COMPLETED event.
* @param {string} event - The event name.
* @param {StreamEventData & { result: ToolEndData }} data - The event data.
* @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata.
*/
handle: (event, data, metadata) => {
if (data?.result != null) {
sendEvent(res, { event, data });
} else if (metadata?.last_agent_index === metadata?.agent_index) {
sendEvent(res, { event, data });
} else if (!metadata?.hide_sequential_outputs) {
sendEvent(res, { event, data });
}
handle: (event, data) => {
sendEvent(res, { event, data });
aggregateContent({ event, data });
},
},
@@ -157,14 +122,9 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
* Handle ON_MESSAGE_DELTA event.
* @param {string} event - The event name.
* @param {StreamEventData} data - The event data.
* @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata.
*/
handle: (event, data, metadata) => {
if (metadata?.last_agent_index === metadata?.agent_index) {
sendEvent(res, { event, data });
} else if (!metadata?.hide_sequential_outputs) {
sendEvent(res, { event, data });
}
handle: (event, data) => {
sendEvent(res, { event, data });
aggregateContent({ event, data });
},
},
@@ -191,41 +151,16 @@ function createToolEndCallback({ req, res, artifactPromises }) {
return;
}
if (imageGenTools.has(output.name) && output.artifact) {
artifactPromises.push(
(async () => {
const fileMetadata = Object.assign(output.artifact, {
messageId: metadata.run_id,
toolCallId: output.tool_call_id,
conversationId: metadata.thread_id,
});
if (!res.headersSent) {
return fileMetadata;
}
if (!fileMetadata) {
return null;
}
res.write(`event: attachment\ndata: ${JSON.stringify(fileMetadata)}\n\n`);
return fileMetadata;
})().catch((error) => {
logger.error('Error processing code output:', error);
return null;
}),
);
return;
}
if (output.name !== Tools.execute_code) {
return;
}
if (!output.artifact.files) {
const { tool_call_id, artifact } = output;
if (!artifact.files) {
return;
}
for (const file of output.artifact.files) {
for (const file of artifact.files) {
const { id, name } = file;
artifactPromises.push(
(async () => {
@@ -238,10 +173,10 @@ function createToolEndCallback({ req, res, artifactPromises }) {
id,
name,
apiKey: result[EnvVar.CODE_API_KEY],
toolCallId: tool_call_id,
messageId: metadata.run_id,
toolCallId: output.tool_call_id,
session_id: artifact.session_id,
conversationId: metadata.thread_id,
session_id: output.artifact.session_id,
});
if (!res.headersSent) {
return fileMetadata;

View File

@@ -12,11 +12,9 @@ const {
Constants,
VisionModes,
openAISchema,
ContentTypes,
EModelEndpoint,
KnownEndpoints,
anthropicSchema,
isAgentsEndpoint,
bedrockOutputParser,
removeNullishValues,
} = require('librechat-data-provider');
@@ -32,10 +30,10 @@ const {
createContextHandlers,
} = require('~/app/clients/prompts');
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
const Tokenizer = require('~/server/services/Tokenizer');
const { spendTokens } = require('~/models/spendTokens');
const BaseClient = require('~/app/clients/BaseClient');
// const { sleep } = require('~/server/utils');
const { createRun } = require('./run');
const { logger } = require('~/config');
@@ -50,12 +48,6 @@ const providerParsers = {
const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deepseek]);
const noSystemModelRegex = [/\bo1\b/gi];
// const { processMemory, memoryInstructions } = require('~/server/services/Endpoints/agents/memory');
// const { getFormattedMemories } = require('~/models/Memory');
// const { getCurrentDateTime } = require('~/utils');
class AgentClient extends BaseClient {
constructor(options = {}) {
super(null, options);
@@ -70,15 +62,15 @@ class AgentClient extends BaseClient {
this.run;
const {
agentConfigs,
contentParts,
collectedUsage,
artifactPromises,
maxContextTokens,
modelOptions = {},
...clientOptions
} = options;
this.agentConfigs = agentConfigs;
this.modelOptions = modelOptions;
this.maxContextTokens = maxContextTokens;
/** @type {MessageContentComplex[]} */
this.contentParts = contentParts;
@@ -88,8 +80,6 @@ class AgentClient extends BaseClient {
this.artifactPromises = artifactPromises;
/** @type {AgentClientOptions} */
this.options = Object.assign({ endpoint: options.endpoint }, clientOptions);
/** @type {string} */
this.model = this.options.agent.model_parameters.model;
}
/**
@@ -179,7 +169,7 @@ class AgentClient extends BaseClient {
: {};
if (parseOptions) {
runOptions = parseOptions(this.options.agent.model_parameters);
runOptions = parseOptions(this.modelOptions);
}
return removeNullishValues(
@@ -234,28 +224,7 @@ class AgentClient extends BaseClient {
let promptTokens;
/** @type {string} */
let systemContent = [instructions ?? '', additional_instructions ?? '']
.filter(Boolean)
.join('\n')
.trim();
// this.systemMessage = getCurrentDateTime();
// const { withKeys, withoutKeys } = await getFormattedMemories({
// userId: this.options.req.user.id,
// });
// processMemory({
// userId: this.options.req.user.id,
// message: this.options.req.body.text,
// parentMessageId,
// memory: withKeys,
// thread_id: this.conversationId,
// }).catch((error) => {
// logger.error('Memory Agent failed to process memory', error);
// });
// this.systemMessage += '\n\n' + memoryInstructions;
// if (withoutKeys) {
// this.systemMessage += `\n\n# Existing memory about the user:\n${withoutKeys}`;
// }
let systemContent = `${instructions ?? ''}${additional_instructions ?? ''}`;
if (this.options.attachments) {
const attachments = await this.options.attachments;
@@ -276,8 +245,7 @@ class AgentClient extends BaseClient {
this.options.attachments = files;
}
/** Note: Bedrock uses legacy RAG API handling */
if (this.message_file_map && !isAgentsEndpoint(this.options.endpoint)) {
if (this.message_file_map) {
this.contextHandlers = createContextHandlers(
this.options.req,
orderedMessages[orderedMessages.length - 1].text,
@@ -351,6 +319,7 @@ class AgentClient extends BaseClient {
/** @type {sendCompletion} */
async sendCompletion(payload, opts = {}) {
this.modelOptions.user = this.user;
await this.chatCompletion({
payload,
onProgress: opts.onProgress,
@@ -370,10 +339,10 @@ class AgentClient extends BaseClient {
await spendTokens(
{
context,
model: model ?? this.modelOptions.model,
conversationId: this.conversationId,
user: this.user ?? this.options.req.user?.id,
endpointTokenConfig: this.options.endpointTokenConfig,
model: usage.model ?? model ?? this.model ?? this.options.agent.model_parameters.model,
},
{ promptTokens: usage.input_tokens, completionTokens: usage.output_tokens },
);
@@ -488,190 +457,43 @@ class AgentClient extends BaseClient {
// });
// }
const run = await createRun({
req: this.options.req,
agent: this.options.agent,
tools: this.options.tools,
runId: this.responseMessageId,
modelOptions: this.modelOptions,
customHandlers: this.options.eventHandlers,
});
const config = {
configurable: {
thread_id: this.conversationId,
last_agent_index: this.agentConfigs?.size ?? 0,
hide_sequential_outputs: this.options.agent.hide_sequential_outputs,
},
signal: abortController.signal,
streamMode: 'values',
version: 'v2',
};
const initialMessages = formatAgentMessages(payload);
if (!run) {
throw new Error('Failed to create run');
}
this.run = run;
const messages = formatAgentMessages(payload);
if (legacyContentEndpoints.has(this.options.agent.endpoint)) {
formatContentStrings(initialMessages);
formatContentStrings(messages);
}
/** @type {ReturnType<createRun>} */
let run;
/**
*
* @param {Agent} agent
* @param {BaseMessage[]} messages
* @param {number} [i]
* @param {TMessageContentParts[]} [contentData]
*/
const runAgent = async (agent, messages, i = 0, contentData = []) => {
config.configurable.model = agent.model_parameters.model;
if (i > 0) {
this.model = agent.model_parameters.model;
}
config.configurable.agent_id = agent.id;
config.configurable.name = agent.name;
config.configurable.agent_index = i;
const noSystemMessages = noSystemModelRegex.some((regex) =>
agent.model_parameters.model.match(regex),
);
const systemMessage = Object.values(agent.toolContextMap ?? {})
.join('\n')
.trim();
let systemContent = [
systemMessage,
agent.instructions ?? '',
i !== 0 ? agent.additional_instructions ?? '' : '',
]
.join('\n')
.trim();
if (noSystemMessages === true) {
agent.instructions = undefined;
agent.additional_instructions = undefined;
} else {
agent.instructions = systemContent;
agent.additional_instructions = undefined;
}
if (noSystemMessages === true && systemContent?.length) {
let latestMessage = messages.pop().content;
if (typeof latestMessage !== 'string') {
latestMessage = latestMessage[0].text;
}
latestMessage = [systemContent, latestMessage].join('\n');
messages.push(new HumanMessage(latestMessage));
}
run = await createRun({
agent,
req: this.options.req,
runId: this.responseMessageId,
signal: abortController.signal,
customHandlers: this.options.eventHandlers,
});
if (!run) {
throw new Error('Failed to create run');
}
if (i === 0) {
this.run = run;
}
if (contentData.length) {
run.Graph.contentData = contentData;
}
await run.processStream({ messages }, config, {
keepContent: i !== 0,
callbacks: {
[Callback.TOOL_ERROR]: (graph, error, toolId) => {
logger.error(
'[api/server/controllers/agents/client.js #chatCompletion] Tool Error',
error,
toolId,
);
},
},
});
};
await runAgent(this.options.agent, initialMessages);
let finalContentStart = 0;
if (this.agentConfigs && this.agentConfigs.size > 0) {
let latestMessage = initialMessages.pop().content;
if (typeof latestMessage !== 'string') {
latestMessage = latestMessage[0].text;
}
let i = 1;
let runMessages = [];
const lastFiveMessages = initialMessages.slice(-5);
for (const [agentId, agent] of this.agentConfigs) {
if (abortController.signal.aborted === true) {
break;
}
const currentRun = await run;
if (
i === this.agentConfigs.size &&
config.configurable.hide_sequential_outputs === true
) {
const content = this.contentParts.filter(
(part) => part.type === ContentTypes.TOOL_CALL,
);
this.options.res.write(
`event: message\ndata: ${JSON.stringify({
event: 'on_content_update',
data: {
runId: this.responseMessageId,
content,
},
})}\n\n`,
);
}
const _runMessages = currentRun.Graph.getRunMessages();
finalContentStart = this.contentParts.length;
runMessages = runMessages.concat(_runMessages);
const contentData = currentRun.Graph.contentData.slice();
const bufferString = getBufferString([new HumanMessage(latestMessage), ...runMessages]);
if (i === this.agentConfigs.size) {
logger.debug(`SEQUENTIAL AGENTS: Last buffer string:\n${bufferString}`);
}
try {
const contextMessages = [];
for (const message of lastFiveMessages) {
const messageType = message._getType();
if (
(!agent.tools || agent.tools.length === 0) &&
(messageType === 'tool' || (message.tool_calls?.length ?? 0) > 0)
) {
continue;
}
contextMessages.push(message);
}
const currentMessages = [...contextMessages, new HumanMessage(bufferString)];
await runAgent(agent, currentMessages, i, contentData);
} catch (err) {
logger.error(
`[api/server/controllers/agents/client.js #chatCompletion] Error running agent ${agentId} (${i})`,
err,
);
}
i++;
}
}
if (config.configurable.hide_sequential_outputs !== true) {
finalContentStart = 0;
}
this.contentParts = this.contentParts.filter((part, index) => {
// Include parts that are either:
// 1. At or after the finalContentStart index
// 2. Of type tool_call
// 3. Have tool_call_ids property
return (
index >= finalContentStart || part.type === ContentTypes.TOOL_CALL || part.tool_call_ids
);
await run.processStream({ messages }, config, {
[Callback.TOOL_ERROR]: (graph, error, toolId) => {
logger.error(
'[api/server/controllers/agents/client.js #chatCompletion] Tool Error',
error,
toolId,
);
},
});
this.recordCollectedUsage({ context: 'message' }).catch((err) => {
logger.error(
'[api/server/controllers/agents/client.js #chatCompletion] Error recording collected usage',
@@ -764,7 +586,7 @@ class AgentClient extends BaseClient {
}
getEncoding() {
return this.model?.includes('gpt-4o') ? 'o200k_base' : 'cl100k_base';
return this.modelOptions.model?.includes('gpt-4o') ? 'o200k_base' : 'cl100k_base';
}
/**

View File

@@ -94,14 +94,8 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
conversation.title =
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
if (req.body.files && client.options.attachments) {
userMessage.files = [];
const messageFiles = new Set(req.body.files.map((file) => file.file_id));
for (let attachment of client.options.attachments) {
if (messageFiles.has(attachment.file_id)) {
userMessage.files.push(attachment);
}
}
if (client.options.attachments) {
userMessage.files = client.options.attachments;
delete userMessage.image_urls;
}
@@ -115,13 +109,11 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
});
res.end();
if (!client.savedMessageIds.has(response.messageId)) {
await saveMessage(
req,
{ ...response, user },
{ context: 'api/server/controllers/agents/request.js - response end' },
);
}
await saveMessage(
req,
{ ...response, user },
{ context: 'api/server/controllers/agents/request.js - response end' },
);
}
if (!client.skipSaveUserMessage) {

View File

@@ -3,8 +3,8 @@ const { providerEndpointMap } = require('librechat-data-provider');
/**
* @typedef {import('@librechat/agents').t} t
* @typedef {import('@librechat/agents').StandardGraphConfig} StandardGraphConfig
* @typedef {import('@librechat/agents').StreamEventData} StreamEventData
* @typedef {import('@librechat/agents').ClientOptions} ClientOptions
* @typedef {import('@librechat/agents').EventHandler} EventHandler
* @typedef {import('@librechat/agents').GraphEvents} GraphEvents
* @typedef {import('@librechat/agents').IState} IState
@@ -17,16 +17,18 @@ const { providerEndpointMap } = require('librechat-data-provider');
* @param {ServerRequest} [options.req] - The server request.
* @param {string | undefined} [options.runId] - Optional run ID; otherwise, a new run ID will be generated.
* @param {Agent} options.agent - The agent for this run.
* @param {AbortSignal} options.signal - The signal for this run.
* @param {StructuredTool[] | undefined} [options.tools] - The tools to use in the run.
* @param {Record<GraphEvents, EventHandler> | undefined} [options.customHandlers] - Custom event handlers.
* @param {ClientOptions} [options.modelOptions] - Optional model to use; if not provided, it will use the default from modelMap.
* @param {boolean} [options.streaming=true] - Whether to use streaming.
* @param {boolean} [options.streamUsage=true] - Whether to stream usage information.
* @returns {Promise<Run<IState>>} A promise that resolves to a new Run instance.
*/
async function createRun({
runId,
tools,
agent,
signal,
modelOptions,
customHandlers,
streaming = true,
streamUsage = true,
@@ -38,17 +40,14 @@ async function createRun({
streaming,
streamUsage,
},
agent.model_parameters,
modelOptions,
);
/** @type {StandardGraphConfig} */
const graphConfig = {
signal,
tools,
llmConfig,
tools: agent.tools,
instructions: agent.instructions,
additional_instructions: agent.additional_instructions,
// toolEnd: agent.end_after_tools,
};
// TEMPORARY FOR TESTING

View File

@@ -111,6 +111,7 @@ const getAgentHandler = async (req, res) => {
isCollaborative: agent.isCollaborative,
});
}
return res.status(200).json(agent);
} catch (error) {
logger.error('[/Agents/:id] Error retrieving agent', error);
@@ -131,24 +132,16 @@ const updateAgentHandler = async (req, res) => {
try {
const id = req.params.id;
const { projectIds, removeProjectIds, ...updateData } = req.body;
const isAdmin = req.user.role === SystemRoles.ADMIN;
const existingAgent = await getAgent({ id });
const isAuthor = existingAgent.author.toString() === req.user.id;
if (!existingAgent) {
return res.status(404).json({ error: 'Agent not found' });
let updatedAgent;
const query = { id, author: req.user.id };
if (req.user.role === SystemRoles.ADMIN) {
delete query.author;
}
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
if (!hasEditPermission) {
return res.status(403).json({
error: 'You do not have permission to modify this non-collaborative agent',
});
if (Object.keys(updateData).length > 0) {
updatedAgent = await updateAgent(query, updateData);
}
let updatedAgent =
Object.keys(updateData).length > 0 ? await updateAgent({ id }, updateData) : existingAgent;
if (projectIds || removeProjectIds) {
updatedAgent = await updateAgentProjects({
user: req.user,

View File

@@ -1,12 +1,6 @@
const { nanoid } = require('nanoid');
const { EnvVar } = require('@librechat/agents');
const { Tools, AuthType, ToolCallTypes } = require('librechat-data-provider');
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
const { processCodeOutput } = require('~/server/services/Files/Code/process');
const { loadAuthValues, loadTools } = require('~/app/clients/tools/util');
const { createToolCall, getToolCallsByConvo } = require('~/models/ToolCall');
const { getMessage } = require('~/models/Message');
const { logger } = require('~/config');
const { Tools, AuthType } = require('librechat-data-provider');
const { loadAuthValues } = require('~/app/clients/tools/util');
const fieldsMap = {
[Tools.execute_code]: [EnvVar.CODE_API_KEY],
@@ -30,7 +24,6 @@ const verifyToolAuth = async (req, res) => {
result = await loadAuthValues({
userId: req.user.id,
authFields,
throwError: false,
});
} catch (error) {
res.status(200).json({ authenticated: false, message: AuthType.USER_PROVIDED });
@@ -55,131 +48,6 @@ const verifyToolAuth = async (req, res) => {
}
};
/**
* @param {ServerRequest} req - The request object, containing information about the HTTP request.
* @param {ServerResponse} res - The response object, used to send back the desired HTTP response.
* @returns {Promise<void>} A promise that resolves when the function has completed.
*/
const callTool = async (req, res) => {
try {
const { toolId = '' } = req.params;
if (!fieldsMap[toolId]) {
logger.warn(`[${toolId}/call] User ${req.user.id} attempted call to invalid tool`);
res.status(404).json({ message: 'Tool not found' });
return;
}
const { partIndex, blockIndex, messageId, conversationId, ...args } = req.body;
if (!messageId) {
logger.warn(`[${toolId}/call] User ${req.user.id} attempted call without message ID`);
res.status(400).json({ message: 'Message ID required' });
return;
}
const message = await getMessage({ user: req.user.id, messageId });
if (!message) {
logger.debug(`[${toolId}/call] User ${req.user.id} attempted call with invalid message ID`);
res.status(404).json({ message: 'Message not found' });
return;
}
logger.debug(`[${toolId}/call] User: ${req.user.id}`);
const { loadedTools } = await loadTools({
user: req.user.id,
tools: [toolId],
functions: true,
options: {
req,
returnMetadata: true,
processFileURL,
uploadImageBuffer,
fileStrategy: req.app.locals.fileStrategy,
},
});
const tool = loadedTools[0];
const toolCallId = `${req.user.id}_${nanoid()}`;
const result = await tool.invoke({
args,
name: toolId,
id: toolCallId,
type: ToolCallTypes.TOOL_CALL,
});
const { content, artifact } = result;
const toolCallData = {
toolId,
messageId,
partIndex,
blockIndex,
conversationId,
result: content,
user: req.user.id,
};
if (!artifact || !artifact.files || toolId !== Tools.execute_code) {
createToolCall(toolCallData).catch((error) => {
logger.error(`Error creating tool call: ${error.message}`);
});
return res.status(200).json({
result: content,
});
}
const artifactPromises = [];
for (const file of artifact.files) {
const { id, name } = file;
artifactPromises.push(
(async () => {
const fileMetadata = await processCodeOutput({
req,
id,
name,
apiKey: tool.apiKey,
messageId,
toolCallId,
conversationId,
session_id: artifact.session_id,
});
if (!fileMetadata) {
return null;
}
return fileMetadata;
})().catch((error) => {
logger.error('Error processing code output:', error);
return null;
}),
);
}
const attachments = await Promise.all(artifactPromises);
toolCallData.attachments = attachments;
createToolCall(toolCallData).catch((error) => {
logger.error(`Error creating tool call: ${error.message}`);
});
res.status(200).json({
result: content,
attachments,
});
} catch (error) {
logger.error('Error calling tool', error);
res.status(500).json({ message: 'Error calling tool' });
}
};
const getToolCalls = async (req, res) => {
try {
const { conversationId } = req.query;
const toolCalls = await getToolCallsByConvo(conversationId, req.user.id);
res.status(200).json(toolCalls);
} catch (error) {
logger.error('Error getting tool calls', error);
res.status(500).json({ message: 'Error getting tool calls' });
}
};
module.exports = {
callTool,
getToolCalls,
verifyToolAuth,
};

View File

@@ -10,7 +10,6 @@ const openAI = require('~/server/services/Endpoints/openAI');
const agents = require('~/server/services/Endpoints/agents');
const custom = require('~/server/services/Endpoints/custom');
const google = require('~/server/services/Endpoints/google');
const { getConvoFiles } = require('~/models/Conversation');
const { handleError } = require('~/server/utils');
const buildFunction = {
@@ -73,32 +72,21 @@ async function buildEndpointOption(req, res, next) {
}
}
try {
const isAgents = isAgentsEndpoint(endpoint);
const endpointFn = buildFunction[endpointType ?? endpoint];
const builder = isAgents ? (...args) => endpointFn(req, ...args) : endpointFn;
const endpointFn = buildFunction[endpointType ?? endpoint];
const builder = isAgentsEndpoint(endpoint) ? (...args) => endpointFn(req, ...args) : endpointFn;
// TODO: use object params
req.body.endpointOption = builder(endpoint, parsedBody, endpointType);
// TODO: use object params
req.body.endpointOption = builder(endpoint, parsedBody, endpointType);
// TODO: use `getModelsConfig` only when necessary
const modelsConfig = await getModelsConfig(req);
const { resendFiles = true } = req.body.endpointOption;
req.body.endpointOption.modelsConfig = modelsConfig;
if (isAgents && resendFiles && req.body.conversationId) {
const fileIds = await getConvoFiles(req.body.conversationId);
const requestFiles = req.body.files ?? [];
if (requestFiles.length || fileIds.length) {
req.body.endpointOption.attachments = processFiles(requestFiles, fileIds);
}
} else if (req.body.files) {
// hold the promise
req.body.endpointOption.attachments = processFiles(req.body.files);
}
next();
} catch (error) {
return handleError(res, { text: 'Error building endpoint option' });
// TODO: use `getModelsConfig` only when necessary
const modelsConfig = await getModelsConfig(req);
req.body.endpointOption.modelsConfig = modelsConfig;
if (req.body.files) {
// hold the promise
req.body.endpointOption.attachments = processFiles(req.body.files);
}
next();
}
module.exports = buildEndpointOption;

View File

@@ -5,7 +5,6 @@ const loginLimiter = require('./loginLimiter');
const importLimiters = require('./importLimiters');
const uploadLimiters = require('./uploadLimiters');
const registerLimiter = require('./registerLimiter');
const toolCallLimiter = require('./toolCallLimiter');
const messageLimiters = require('./messageLimiters');
const verifyEmailLimiter = require('./verifyEmailLimiter');
const resetPasswordLimiter = require('./resetPasswordLimiter');
@@ -16,7 +15,6 @@ module.exports = {
...messageLimiters,
loginLimiter,
registerLimiter,
toolCallLimiter,
createTTSLimiters,
createSTTLimiters,
verifyEmailLimiter,

View File

@@ -1,25 +0,0 @@
const rateLimit = require('express-rate-limit');
const { ViolationTypes } = require('librechat-data-provider');
const logViolation = require('~/cache/logViolation');
const toolCallLimiter = rateLimit({
windowMs: 1000,
max: 1,
handler: async (req, res) => {
const type = ViolationTypes.TOOL_CALL_LIMIT;
const errorMessage = {
type,
max: 1,
limiter: 'user',
windowInMinutes: 1,
};
await logViolation(req, res, type, errorMessage, 0);
res.status(429).json({ message: 'Too many tool call requests. Try again later' });
},
keyGenerator: function (req) {
return req.user?.id;
},
});
module.exports = toolCallLimiter;

View File

@@ -1,23 +1,19 @@
const express = require('express');
const { PermissionTypes, Permissions } = require('librechat-data-provider');
const router = express.Router();
const {
setHeaders,
handleAbort,
// validateModel,
generateCheckAccess,
validateConvoAccess,
// validateEndpoint,
buildEndpointOption,
} = require('~/server/middleware');
const { initializeClient } = require('~/server/services/Endpoints/agents');
const AgentController = require('~/server/controllers/agents/request');
const addTitle = require('~/server/services/Endpoints/agents/title');
const router = express.Router();
router.post('/abort', handleAbort());
const checkAgentAccess = generateCheckAccess(PermissionTypes.AGENTS, [Permissions.USE]);
/**
* @route POST /
* @desc Chat with an assistant
@@ -29,8 +25,7 @@ const checkAgentAccess = generateCheckAccess(PermissionTypes.AGENTS, [Permission
router.post(
'/',
// validateModel,
checkAgentAccess,
validateConvoAccess,
// validateEndpoint,
buildEndpointOption,
setHeaders,
async (req, res, next) => {

View File

@@ -1,7 +1,6 @@
const express = require('express');
const { callTool, verifyToolAuth, getToolCalls } = require('~/server/controllers/tools');
const { getAvailableTools } = require('~/server/controllers/PluginController');
const { toolCallLimiter } = require('~/server/middleware/limiters');
const { verifyToolAuth } = require('~/server/controllers/tools');
const router = express.Router();
@@ -12,13 +11,6 @@ const router = express.Router();
*/
router.get('/', getAvailableTools);
/**
* Get a list of tool calls.
* @route GET /agents/tools/calls
* @returns {ToolCallData[]} 200 - application/json
*/
router.get('/calls', getToolCalls);
/**
* Verify authentication for a specific tool
* @route GET /agents/tools/:toolId/auth
@@ -27,13 +19,4 @@ router.get('/calls', getToolCalls);
*/
router.get('/:toolId/auth', verifyToolAuth);
/**
* Execute code for a specific tool
* @route POST /agents/tools/:toolId/call
* @param {string} toolId - The ID of the tool to execute
* @param {object} req.body - Request body
* @returns {object} Result of code execution
*/
router.post('/:toolId/call', toolCallLimiter, callTool);
module.exports = router;

View File

@@ -7,7 +7,6 @@ const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
const { forkConversation } = require('~/server/utils/import/fork');
const { importConversations } = require('~/server/utils/import');
const { createImportLimiters } = require('~/server/middleware');
const { deleteToolCalls } = require('~/models/ToolCall');
const getLogStores = require('~/cache/getLogStores');
const { sleep } = require('~/server/utils');
const { logger } = require('~/config');
@@ -106,7 +105,6 @@ router.post('/clear', async (req, res) => {
try {
const dbResponse = await deleteConvos(req.user.id, filter);
await deleteToolCalls(req.user.id, filter.conversationId);
res.status(201).json(dbResponse);
} catch (error) {
logger.error('Error clearing conversations', error);

View File

@@ -107,10 +107,6 @@ router.delete('/', async (req, res) => {
}
});
function isValidID(str) {
return /^[A-Za-z0-9_-]{21}$/.test(str);
}
router.get('/code/download/:session_id/:fileId', async (req, res) => {
try {
const { session_id, fileId } = req.params;
@@ -121,11 +117,6 @@ router.get('/code/download/:session_id/:fileId', async (req, res) => {
return res.status(400).send('Bad request');
}
if (!isValidID(session_id) || !isValidID(fileId)) {
logger.debug(`${logPrefix} invalid session_id or fileId`);
return res.status(400).send('Bad request');
}
const { getDownloadStream } = getStrategyFunctions(FileSources.execute_code);
if (!getDownloadStream) {
logger.warn(
@@ -222,20 +213,21 @@ router.get('/download/:userId/:file_id', async (req, res) => {
});
router.post('/', async (req, res) => {
const file = req.file;
const metadata = req.body;
let cleanup = true;
try {
filterFile({ req });
filterFile({ req, file });
metadata.temp_file_id = metadata.file_id;
metadata.file_id = req.file_id;
if (isAgentsEndpoint(metadata.endpoint)) {
return await processAgentFileUpload({ req, res, metadata });
return await processAgentFileUpload({ req, res, file, metadata });
}
await processFileUpload({ req, res, metadata });
await processFileUpload({ req, res, file, metadata });
} catch (error) {
let message = 'Error processing file';
logger.error('[/files] Error processing file:', error);
@@ -246,7 +238,7 @@ router.post('/', async (req, res) => {
// TODO: delete remote file if it exists
try {
await fs.unlink(req.file.path);
await fs.unlink(file.path);
cleanup = false;
} catch (error) {
logger.error('[/files] Error deleting file:', error);
@@ -256,7 +248,7 @@ router.post('/', async (req, res) => {
if (cleanup) {
try {
await fs.unlink(req.file.path);
await fs.unlink(file.path);
} catch (error) {
logger.error('[/files] Error deleting file after file processing:', error);
}

View File

@@ -1,12 +1,7 @@
const path = require('path');
const fs = require('fs').promises;
const express = require('express');
const { isAgentsEndpoint } = require('librechat-data-provider');
const {
filterFile,
processImageFile,
processAgentFileUpload,
} = require('~/server/services/Files/process');
const { filterFile, processImageFile } = require('~/server/services/Files/process');
const { logger } = require('~/config');
const router = express.Router();
@@ -15,16 +10,12 @@ router.post('/', async (req, res) => {
const metadata = req.body;
try {
filterFile({ req, image: true });
filterFile({ req, file: req.file, image: true });
metadata.temp_file_id = metadata.file_id;
metadata.file_id = req.file_id;
if (isAgentsEndpoint(metadata.endpoint) && metadata.tool_resource != null) {
return await processAgentFileUpload({ req, res, metadata });
}
await processImageFile({ req, res, metadata });
await processImageFile({ req, res, file: req.file, metadata });
} catch (error) {
// TODO: delete remote file if it exists
logger.error('[/files/images] Error processing file:', error);

View File

@@ -1,7 +1,6 @@
const express = require('express');
const {
promptPermissionsSchema,
agentPermissionsSchema,
PermissionTypes,
roleDefaults,
SystemRoles,
@@ -73,37 +72,4 @@ router.put('/:roleName/prompts', checkAdmin, async (req, res) => {
}
});
/**
* PUT /api/roles/:roleName/agents
* Update agent permissions for a specific role
*/
router.put('/:roleName/agents', checkAdmin, async (req, res) => {
const { roleName: _r } = req.params;
// TODO: TEMP, use a better parsing for roleName
const roleName = _r.toUpperCase();
/** @type {TRole['AGENTS']} */
const updates = req.body;
try {
const parsedUpdates = agentPermissionsSchema.partial().parse(updates);
const role = await getRoleByName(roleName);
if (!role) {
return res.status(404).send({ message: 'Role not found' });
}
const mergedUpdates = {
[PermissionTypes.AGENTS]: {
...role[PermissionTypes.AGENTS],
...parsedUpdates,
},
};
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
res.status(200).send(updatedRole);
} catch (error) {
return res.status(400).send({ message: 'Invalid prompt permissions.', error: error.errors });
}
});
module.exports = router;

View File

@@ -8,6 +8,7 @@ const { loadDefaultInterface } = require('./start/interface');
const { azureConfigSetup } = require('./start/azureOpenAI');
const { loadAndFormatTools } = require('./ToolService');
const { initializeRoles } = require('~/models/Role');
const { cleanup } = require('./cleanup');
const paths = require('~/config/paths');
/**
@@ -17,6 +18,7 @@ const paths = require('~/config/paths');
* @param {Express.Application} app - The Express application object.
*/
const AppService = async (app) => {
cleanup();
await initializeRoles();
/** @type {TCustomConfig}*/
const config = (await loadCustomConfig()) ?? {};

View File

@@ -49,6 +49,10 @@ module.exports = {
process.env.BEDROCK_AWS_SECRET_ACCESS_KEY ?? process.env.BEDROCK_AWS_DEFAULT_REGION,
),
/* key will be part of separate config */
[EModelEndpoint.agents]: generateConfig('true', undefined, EModelEndpoint.agents),
[EModelEndpoint.agents]: generateConfig(
process.env.EXPERIMENTAL_AGENTS,
undefined,
EModelEndpoint.agents,
),
},
};

View File

@@ -2,14 +2,8 @@ const { loadAgent } = require('~/models/Agent');
const { logger } = require('~/config');
const buildOptions = (req, endpoint, parsedBody) => {
const {
agent_id,
instructions,
spec,
maxContextTokens,
resendFiles = true,
...model_parameters
} = parsedBody;
const { agent_id, instructions, spec, ...model_parameters } = parsedBody;
const agentPromise = loadAgent({
req,
agent_id,
@@ -19,14 +13,12 @@ const buildOptions = (req, endpoint, parsedBody) => {
});
const endpointOption = {
spec,
agent: agentPromise,
endpoint,
agent_id,
resendFiles,
instructions,
maxContextTokens,
spec,
model_parameters,
agent: agentPromise,
};
return endpointOption;

View File

@@ -16,8 +16,6 @@ const { getCustomEndpointConfig } = require('~/server/services/Config');
const { loadAgentTools } = require('~/server/services/ToolService');
const AgentClient = require('~/server/controllers/agents/client');
const { getModelMaxTokens } = require('~/utils');
const { getAgent } = require('~/models/Agent');
const { logger } = require('~/config');
const providerConfigMap = {
[EModelEndpoint.openAI]: initOpenAI,
@@ -27,113 +25,6 @@ const providerConfigMap = {
[Providers.OLLAMA]: initCustom,
};
/**
*
* @param {Promise<Array<MongoFile | null>> | undefined} _attachments
* @param {AgentToolResources | undefined} _tool_resources
* @returns {Promise<{ attachments: Array<MongoFile | undefined> | undefined, tool_resources: AgentToolResources | undefined }>}
*/
const primeResources = async (_attachments, _tool_resources) => {
try {
if (!_attachments) {
return { attachments: undefined, tool_resources: _tool_resources };
}
/** @type {Array<MongoFile | undefined> | undefined} */
const files = await _attachments;
const attachments = [];
const tool_resources = _tool_resources ?? {};
for (const file of files) {
if (!file) {
continue;
}
if (file.metadata?.fileIdentifier) {
const execute_code = tool_resources.execute_code ?? {};
if (!execute_code.files) {
tool_resources.execute_code = { ...execute_code, files: [] };
}
tool_resources.execute_code.files.push(file);
} else if (file.embedded === true) {
const file_search = tool_resources.file_search ?? {};
if (!file_search.files) {
tool_resources.file_search = { ...file_search, files: [] };
}
tool_resources.file_search.files.push(file);
}
attachments.push(file);
}
return { attachments, tool_resources };
} catch (error) {
logger.error('Error priming resources', error);
return { attachments: _attachments, tool_resources: _tool_resources };
}
};
const initializeAgentOptions = async ({
req,
res,
agent,
endpointOption,
tool_resources,
isInitialAgent = false,
}) => {
const { tools, toolContextMap } = await loadAgentTools({
req,
tools: agent.tools,
agent_id: agent.id,
tool_resources,
});
const provider = agent.provider;
let getOptions = providerConfigMap[provider];
if (!getOptions) {
const customEndpointConfig = await getCustomEndpointConfig(provider);
if (!customEndpointConfig) {
throw new Error(`Provider ${provider} not supported`);
}
getOptions = initCustom;
agent.provider = Providers.OPENAI;
agent.endpoint = provider.toLowerCase();
}
const model_parameters = agent.model_parameters ?? { model: agent.model };
const _endpointOption = isInitialAgent
? endpointOption
: {
model_parameters,
};
const options = await getOptions({
req,
res,
optionsOnly: true,
overrideEndpoint: provider,
overrideModel: agent.model,
endpointOption: _endpointOption,
});
agent.model_parameters = Object.assign(model_parameters, options.llmConfig);
if (options.configOptions) {
agent.model_parameters.configuration = options.configOptions;
}
if (!agent.model_parameters.model) {
agent.model_parameters.model = agent.model;
}
return {
...agent,
tools,
toolContextMap,
maxContextTokens:
agent.max_context_tokens ??
getModelMaxTokens(agent.model_parameters.model, providerEndpointMap[provider]) ??
4000,
};
};
const initializeClient = async ({ req, res, endpointOption }) => {
if (!endpointOption) {
throw new Error('Endpoint option not provided');
@@ -157,68 +48,70 @@ const initializeClient = async ({ req, res, endpointOption }) => {
throw new Error('No agent promise provided');
}
// Initialize primary agent
const primaryAgent = await endpointOption.agent;
if (!primaryAgent) {
/** @type {Agent | null} */
const agent = await endpointOption.agent;
if (!agent) {
throw new Error('Agent not found');
}
const { attachments, tool_resources } = await primeResources(
endpointOption.attachments,
primaryAgent.tool_resources,
);
const agentConfigs = new Map();
// Handle primary agent
const primaryConfig = await initializeAgentOptions({
const { tools } = await loadAgentTools({
req,
res,
agent: primaryAgent,
endpointOption,
tool_resources,
isInitialAgent: true,
tools: agent.tools,
agent_id: agent.id,
tool_resources: agent.tool_resources,
});
const agent_ids = primaryConfig.agent_ids;
if (agent_ids?.length) {
for (const agentId of agent_ids) {
const agent = await getAgent({ id: agentId });
if (!agent) {
throw new Error(`Agent ${agentId} not found`);
}
const config = await initializeAgentOptions({
req,
res,
agent,
endpointOption,
});
agentConfigs.set(agentId, config);
const provider = agent.provider;
let modelOptions = { model: agent.model };
let getOptions = providerConfigMap[provider];
if (!getOptions) {
const customEndpointConfig = await getCustomEndpointConfig(provider);
if (!customEndpointConfig) {
throw new Error(`Provider ${provider} not supported`);
}
getOptions = initCustom;
agent.provider = Providers.OPENAI;
agent.endpoint = provider.toLowerCase();
}
const sender =
primaryAgent.name ??
getResponseSender({
...endpointOption,
model: endpointOption.model_parameters.model,
});
// TODO: pass-in override settings that are specific to current run
endpointOption.model_parameters.model = agent.model;
const options = await getOptions({
req,
res,
endpointOption,
optionsOnly: true,
overrideEndpoint: provider,
overrideModel: agent.model,
});
modelOptions = Object.assign(modelOptions, options.llmConfig);
if (options.configOptions) {
modelOptions.configuration = options.configOptions;
}
const sender = getResponseSender({
...endpointOption,
model: endpointOption.model_parameters.model,
});
const client = new AgentClient({
req,
agent: primaryConfig,
agent,
tools,
sender,
attachments,
contentParts,
modelOptions,
eventHandlers,
collectedUsage,
artifactPromises,
spec: endpointOption.spec,
agentConfigs,
endpoint: EModelEndpoint.agents,
maxContextTokens: primaryConfig.maxContextTokens,
attachments: endpointOption.attachments,
maxContextTokens:
agent.max_context_tokens ??
getModelMaxTokens(modelOptions.model, providerEndpointMap[provider]) ??
4000,
});
return { client };
};

View File

@@ -135,12 +135,6 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie
clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl;
clientOptions.headers = opts.defaultHeaders;
clientOptions.azure = !serverless && azureOptions;
if (serverless === true) {
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
clientOptions.headers['api-key'] = apiKey;
}
}
}

View File

@@ -5,6 +5,7 @@ const {
getResponseSender,
} = require('librechat-data-provider');
const { getDefaultHandlers } = require('~/server/controllers/agents/callbacks');
// const { loadAgentTools } = require('~/server/services/ToolService');
const getOptions = require('~/server/services/Endpoints/bedrock/options');
const AgentClient = require('~/server/controllers/agents/client');
const { getModelMaxTokens } = require('~/utils');
@@ -19,6 +20,8 @@ const initializeClient = async ({ req, res, endpointOption }) => {
const { contentParts, aggregateContent } = createContentAggregator();
const eventHandlers = getDefaultHandlers({ res, aggregateContent, collectedUsage });
// const tools = [createTavilySearchTool()];
/** @type {Agent} */
const agent = {
id: EModelEndpoint.bedrock,
@@ -33,6 +36,8 @@ const initializeClient = async ({ req, res, endpointOption }) => {
agent.instructions = `${agent.instructions ?? ''}\n${endpointOption.artifactsPrompt}`.trim();
}
let modelOptions = { model: agent.model };
// TODO: pass-in override settings that are specific to current run
const options = await getOptions({
req,
@@ -40,34 +45,28 @@ const initializeClient = async ({ req, res, endpointOption }) => {
endpointOption,
});
agent.model_parameters = Object.assign(agent.model_parameters, options.llmConfig);
if (options.configOptions) {
agent.model_parameters.configuration = options.configOptions;
}
modelOptions = Object.assign(modelOptions, options.llmConfig);
const maxContextTokens =
agent.max_context_tokens ??
getModelMaxTokens(modelOptions.model, providerEndpointMap[agent.provider]);
const sender =
agent.name ??
getResponseSender({
...endpointOption,
model: endpointOption.model_parameters.model,
});
const sender = getResponseSender({
...endpointOption,
model: endpointOption.model_parameters.model,
});
const client = new AgentClient({
req,
agent,
sender,
// tools,
modelOptions,
contentParts,
eventHandlers,
collectedUsage,
spec: endpointOption.spec,
maxContextTokens,
endpoint: EModelEndpoint.bedrock,
resendFiles: endpointOption.resendFiles,
maxContextTokens:
endpointOption.maxContextTokens ??
agent.max_context_tokens ??
getModelMaxTokens(agent.model_parameters.model, providerEndpointMap[agent.provider]) ??
4000,
configOptions: options.configOptions,
attachments: endpointOption.attachments,
});
return { client };

View File

@@ -10,8 +10,8 @@ const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/User
const { getLLMConfig } = require('~/server/services/Endpoints/openAI/llm');
const { getCustomEndpointConfig } = require('~/server/services/Config');
const { fetchModels } = require('~/server/services/ModelService');
const { isUserProvided, sleep } = require('~/server/utils');
const getLogStores = require('~/cache/getLogStores');
const { isUserProvided } = require('~/server/utils');
const { OpenAIClient } = require('~/app');
const { PROXY } = process.env;
@@ -141,18 +141,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
},
clientOptions,
);
const options = getLLMConfig(apiKey, requestOptions);
if (!customOptions.streamRate) {
return options;
}
options.llmConfig.callbacks = [
{
handleLLMNewToken: async () => {
await sleep(customOptions.streamRate);
},
},
];
return options;
return getLLMConfig(apiKey, requestOptions);
}
if (clientOptions.reverseProxyUrl) {

View File

@@ -96,12 +96,6 @@ const initializeClient = async ({ req, res, endpointOption }) => {
apiKey = azureOptions.azureOpenAIApiKey;
clientOptions.azure = !serverless && azureOptions;
if (serverless === true) {
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
clientOptions.headers['api-key'] = apiKey;
}
} else if (useAzure || (apiKey && apiKey.includes('{"azure') && !clientOptions.azure)) {
clientOptions.azure = userProvidesKey ? JSON.parse(userValues.apiKey) : getAzureCredentials();
apiKey = clientOptions.azure.azureOpenAIApiKey;

View File

@@ -6,7 +6,7 @@ const {
} = require('librechat-data-provider');
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
const { getLLMConfig } = require('~/server/services/Endpoints/openAI/llm');
const { isEnabled, isUserProvided, sleep } = require('~/server/utils');
const { isEnabled, isUserProvided } = require('~/server/utils');
const { getAzureCredentials } = require('~/utils');
const { OpenAIClient } = require('~/app');
@@ -97,12 +97,6 @@ const initializeClient = async ({
apiKey = azureOptions.azureOpenAIApiKey;
clientOptions.azure = !serverless && azureOptions;
if (serverless === true) {
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
clientOptions.headers['api-key'] = apiKey;
}
} else if (isAzureOpenAI) {
clientOptions.azure = userProvidesKey ? JSON.parse(userValues.apiKey) : getAzureCredentials();
apiKey = clientOptions.azure.azureOpenAIApiKey;
@@ -140,18 +134,7 @@ const initializeClient = async ({
},
clientOptions,
);
const options = getLLMConfig(apiKey, requestOptions);
if (!clientOptions.streamRate) {
return options;
}
options.llmConfig.callbacks = [
{
handleLLMNewToken: async () => {
await sleep(clientOptions.streamRate);
},
},
];
return options;
return getLLMConfig(apiKey, requestOptions);
}
const client = new OpenAIClient(apiKey, Object.assign({ req, res }, clientOptions));

View File

@@ -29,7 +29,6 @@ function getLLMConfig(apiKey, options = {}) {
modelOptions = {},
reverseProxyUrl,
useOpenRouter,
defaultQuery,
headers,
proxy,
azure,
@@ -75,10 +74,6 @@ function getLLMConfig(apiKey, options = {}) {
}
}
if (defaultQuery) {
configOptions.baseOptions.defaultQuery = defaultQuery;
}
if (proxy) {
const proxyAgent = new HttpsProxyAgent(proxy);
Object.assign(configOptions, {

View File

@@ -2,6 +2,7 @@ const axios = require('axios');
const fs = require('fs').promises;
const FormData = require('form-data');
const { Readable } = require('stream');
const { createClient } = require('@deepgram/sdk');
const { extractEnvVariable, STTProviders } = require('librechat-data-provider');
const { getCustomConfig } = require('~/server/services/Config');
const { genAzureEndpoint } = require('~/utils');
@@ -18,10 +19,14 @@ class STTService {
*/
constructor(customConfig) {
this.customConfig = customConfig;
this.providerStrategies = {
this.apiStrategies = {
[STTProviders.OPENAI]: this.openAIProvider,
[STTProviders.AZURE_OPENAI]: this.azureOpenAIProvider,
};
this.sdkStrategies = {
[STTProviders.DEEPGRAM]: this.deepgramSDKProvider,
};
}
/**
@@ -106,7 +111,7 @@ class STTService {
'Content-Type': 'multipart/form-data',
...(apiKey && { Authorization: `Bearer ${apiKey}` }),
};
[headers].forEach(this.removeUndefined);
this.removeUndefined(headers);
return [url, data, headers];
}
@@ -121,9 +126,9 @@ class STTService {
*/
azureOpenAIProvider(sttSchema, audioBuffer, audioFile) {
const url = `${genAzureEndpoint({
azureOpenAIApiInstanceName: extractEnvVariable(sttSchema?.instanceName),
azureOpenAIApiDeploymentName: extractEnvVariable(sttSchema?.deploymentName),
})}/audio/transcriptions?api-version=${extractEnvVariable(sttSchema?.apiVersion)}`;
azureOpenAIApiInstanceName: sttSchema?.instanceName,
azureOpenAIApiDeploymentName: sttSchema?.deploymentName,
})}/audio/transcriptions?api-version=${sttSchema?.apiVersion}`;
const apiKey = sttSchema.apiKey ? extractEnvVariable(sttSchema.apiKey) : '';
@@ -153,6 +158,70 @@ class STTService {
return [url, formData, { ...headers, ...formData.getHeaders() }];
}
/**
* Transcribes audio using the Deepgram SDK.
* @async
* @param {Object} sttSchema - The STT schema for Deepgram.
* @param {Stream} audioReadStream - The audio data to be transcribed.
* @returns {Promise<string>} A promise that resolves to the transcribed text.
* @throws {Error} If the transcription fails.
*/
async deepgramSDKProvider(sttSchema, audioReadStream) {
const apiKey = extractEnvVariable(sttSchema.apiKey) || '';
const deepgram = createClient(apiKey);
const configOptions = {
// Model parameters
model: sttSchema.model?.model,
language: sttSchema.model?.language,
detect_language: sttSchema.model?.detect_language,
version: sttSchema.model?.version,
// Formatting parameters
smart_format: sttSchema.formatting?.smart_format,
diarize: sttSchema.formatting?.diarize,
filler_words: sttSchema.formatting?.filler_words,
numerals: sttSchema.formatting?.numerals,
punctuate: sttSchema.formatting?.punctuate,
paragraphs: sttSchema.formatting?.paragraphs,
profanity_filter: sttSchema.formatting?.profanity_filter,
redact: sttSchema.formatting?.redact,
utterances: sttSchema.formatting?.utterances,
utt_split: sttSchema.formatting?.utt_split,
// Custom vocabulary parameters
replace: sttSchema.custom_vocabulary?.replace,
keywords: sttSchema.custom_vocabulary?.keywords,
// Intelligence parameters
sentiment: sttSchema.intelligence?.sentiment,
intents: sttSchema.intelligence?.intents,
topics: sttSchema.intelligence?.topics,
};
this.removeUndefined(configOptions);
const { result, error } = await deepgram.listen.prerecorded.transcribeFile(
audioReadStream,
configOptions,
);
if (error) {
throw error;
}
return result.results?.channels[0]?.alternatives[0]?.transcript || '';
}
// TODO: Implement a better way to determine if the SDK should be used
shouldUseSDK(provider) {
if (provider === STTProviders.DEEPGRAM) {
return true;
}
return false;
}
/**
* Sends an STT request to the specified provider.
* @async
@@ -165,27 +234,29 @@ class STTService {
* @throws {Error} If the provider is invalid, the response status is not 200, or the response data is missing.
*/
async sttRequest(provider, sttSchema, { audioBuffer, audioFile }) {
const strategy = this.providerStrategies[provider];
const useSDK = this.shouldUseSDK(provider);
const strategy = useSDK ? this.sdkStrategies[provider] : this.apiStrategies[provider];
if (!strategy) {
throw new Error('Invalid provider');
throw new Error('Invalid provider or implementation');
}
const audioReadStream = Readable.from(audioBuffer);
audioReadStream.path = 'audio.wav';
const [url, data, headers] = strategy.call(this, sttSchema, audioReadStream, audioFile);
if (useSDK) {
return strategy.call(this, sttSchema, audioReadStream, audioFile);
}
const [url, data, headers] = strategy.call(this, sttSchema, audioReadStream);
try {
const response = await axios.post(url, data, { headers });
if (response.status !== 200) {
throw new Error('Invalid response from the STT API');
}
if (!response.data || !response.data.text) {
throw new Error('Missing data in response from the STT API');
}
return response.data.text.trim();
} catch (error) {
logger.error(`STT request failed for provider ${provider}:`, error);
@@ -222,9 +293,9 @@ class STTService {
} finally {
try {
await fs.unlink(req.file.path);
logger.debug('[/speech/stt] Temp. audio upload file deleted');
logger.debug('[/speech/stt] Temporary audio upload file deleted');
} catch (error) {
logger.debug('[/speech/stt] Temp. audio upload file already deleted');
logger.debug('[/speech/stt] Temporary audio upload file already deleted');
}
}
}

View File

@@ -1,9 +1,11 @@
const axios = require('axios');
const { createClient } = require('@deepgram/sdk');
const { extractEnvVariable, TTSProviders } = require('librechat-data-provider');
const { getRandomVoiceId, createChunkProcessor, splitTextIntoChunks } = require('./streamAudio');
const { getCustomConfig } = require('~/server/services/Config');
const { genAzureEndpoint } = require('~/utils');
const { logger } = require('~/config');
const { Readable } = require('stream');
/**
* Service class for handling Text-to-Speech (TTS) operations.
@@ -16,12 +18,16 @@ class TTSService {
*/
constructor(customConfig) {
this.customConfig = customConfig;
this.providerStrategies = {
this.apiStrategies = {
[TTSProviders.OPENAI]: this.openAIProvider.bind(this),
[TTSProviders.AZURE_OPENAI]: this.azureOpenAIProvider.bind(this),
[TTSProviders.ELEVENLABS]: this.elevenLabsProvider.bind(this),
[TTSProviders.LOCALAI]: this.localAIProvider.bind(this),
};
this.sdkStrategies = {
[TTSProviders.DEEPGRAM]: this.deepgramSDKProvider.bind(this),
};
}
/**
@@ -109,25 +115,22 @@ class TTSService {
openAIProvider(ttsSchema, input, voice) {
const url = ttsSchema?.url || 'https://api.openai.com/v1/audio/speech';
if (
ttsSchema?.voices &&
ttsSchema.voices.length > 0 &&
!ttsSchema.voices.includes(voice) &&
!ttsSchema.voices.includes('ALL')
) {
if (ttsSchema?.voices && ttsSchema.voices.length > 0 && !ttsSchema.voices.includes(voice)) {
throw new Error(`Voice ${voice} is not available.`);
}
const data = {
input,
model: ttsSchema?.model,
voice: ttsSchema?.voices && ttsSchema.voices.length > 0 ? voice : undefined,
voice: voice,
backend: ttsSchema?.backend,
};
const headers = {
'Content-Type': 'application/json',
Authorization: `Bearer ${extractEnvVariable(ttsSchema?.apiKey)}`,
Authorization: `${
ttsSchema.apiKey ? 'Bearer ' + extractEnvVariable(ttsSchema.apiKey) : undefined
}`,
};
return [url, data, headers];
@@ -143,23 +146,18 @@ class TTSService {
*/
azureOpenAIProvider(ttsSchema, input, voice) {
const url = `${genAzureEndpoint({
azureOpenAIApiInstanceName: extractEnvVariable(ttsSchema?.instanceName),
azureOpenAIApiDeploymentName: extractEnvVariable(ttsSchema?.deploymentName),
})}/audio/speech?api-version=${extractEnvVariable(ttsSchema?.apiVersion)}`;
azureOpenAIApiInstanceName: ttsSchema?.instanceName,
azureOpenAIApiDeploymentName: ttsSchema?.deploymentName,
})}/audio/speech?api-version=${ttsSchema?.apiVersion}`;
if (
ttsSchema?.voices &&
ttsSchema.voices.length > 0 &&
!ttsSchema.voices.includes(voice) &&
!ttsSchema.voices.includes('ALL')
) {
if (ttsSchema?.voices && ttsSchema.voices.length > 0 && !ttsSchema.voices.includes(voice)) {
throw new Error(`Voice ${voice} is not available.`);
}
const data = {
model: extractEnvVariable(ttsSchema?.model),
model: ttsSchema?.model,
input,
voice: ttsSchema?.voices && ttsSchema.voices.length > 0 ? voice : undefined,
voice: voice,
};
const headers = {
@@ -184,7 +182,7 @@ class TTSService {
ttsSchema?.url ||
`https://api.elevenlabs.io/v1/text-to-speech/${voice}${stream ? '/stream' : ''}`;
if (!ttsSchema?.voices.includes(voice) && !ttsSchema?.voices.includes('ALL')) {
if (!ttsSchema?.voices.includes(voice)) {
throw new Error(`Voice ${voice} is not available.`);
}
@@ -202,7 +200,7 @@ class TTSService {
const headers = {
'Content-Type': 'application/json',
'xi-api-key': extractEnvVariable(ttsSchema?.apiKey),
'xi-api-key': ttsSchema.apiKey ? extractEnvVariable(ttsSchema.apiKey) : '',
Accept: 'audio/mpeg',
};
@@ -220,31 +218,107 @@ class TTSService {
localAIProvider(ttsSchema, input, voice) {
const url = ttsSchema?.url;
if (
ttsSchema?.voices &&
ttsSchema.voices.length > 0 &&
!ttsSchema.voices.includes(voice) &&
!ttsSchema.voices.includes('ALL')
) {
if (ttsSchema?.voices && ttsSchema.voices.length > 0 && !ttsSchema.voices.includes(voice)) {
throw new Error(`Voice ${voice} is not available.`);
}
const data = {
input,
model: ttsSchema?.voices && ttsSchema.voices.length > 0 ? voice : undefined,
model: voice,
backend: ttsSchema?.backend,
};
const headers = {
'Content-Type': 'application/json',
Authorization: `Bearer ${extractEnvVariable(ttsSchema?.apiKey)}`,
Authorization: `${
ttsSchema.apiKey ? 'Bearer ' + extractEnvVariable(ttsSchema.apiKey) : undefined
}`,
};
if (extractEnvVariable(ttsSchema.apiKey) === '') {
delete headers.Authorization;
return [url, data, headers];
}
/**
* Converts a ReadableStream to a Node.js stream (used in Deepgram SDK).
* @async
* @param {ReadableStream} readableStream - The ReadableStream to convert.
* @returns {Promise<Readable>} The Node.js stream.
* @throws {Error} If the conversion fails.
*/
async streamToNodeStream(readableStream) {
const reader = readableStream.getReader();
const nodeStream = new Readable({
async read() {
try {
const { value, done } = await reader.read();
if (done) {
this.push(null);
} else {
this.push(Buffer.from(value));
}
} catch (err) {
this.destroy(err);
}
},
});
return nodeStream;
}
/**
* Prepares the request for Deepgram SDK TTS provider.
* @async
* @param {Object} ttsSchema - The TTS schema for Deepgram SDK.
* @param {string} input - The input text.
* @param {string} voice - The selected voice.
* @returns {Promise<Object>} The response object.
* @throws {Error} If the selected voice is not available or the request fails.
*/
async deepgramSDKProvider(ttsSchema, input, voice) {
const apiKey = extractEnvVariable(ttsSchema.apiKey) || '';
const deepgram = createClient(apiKey);
if (ttsSchema?.voices && ttsSchema.voices.length > 0 && !ttsSchema.voices.includes(voice)) {
throw new Error(`Voice ${voice} is not available.`);
}
return [url, data, headers];
const modelParts = [ttsSchema.model, voice, ttsSchema.language].filter(Boolean);
const configOptions = {
model: modelParts.join('-'),
encoding: 'linear16',
container: 'wav',
bit_rate: ttsSchema.media_settings?.bit_rate,
sample_rate: ttsSchema.media_settings?.sample_rate,
};
this.removeUndefined(configOptions);
try {
const response = await deepgram.speak.request({ text: input }, configOptions);
const audioStream = await response.getStream();
const headers = await response.getHeaders();
// Convert ReadableStream to Node.js stream
const nodeStream = await this.streamToNodeStream(audioStream);
return {
data: nodeStream,
headers,
status: 200,
};
} catch (error) {
logger.error('Deepgram TTS request failed:', error);
throw error;
}
}
// TODO: Implement a better way to determine if the SDK should be used
shouldUseSDK(provider) {
if (provider == TTSProviders.DEEPGRAM) {
return true;
}
return false;
}
/**
@@ -260,22 +334,34 @@ class TTSService {
* @throws {Error} If the provider is invalid or the request fails.
*/
async ttsRequest(provider, ttsSchema, { input, voice, stream = true }) {
const strategy = this.providerStrategies[provider];
const useSDK = this.shouldUseSDK(provider);
const strategy = useSDK ? this.sdkStrategies[provider] : this.apiStrategies[provider];
if (!strategy) {
throw new Error('Invalid provider');
}
const [url, data, headers] = strategy.call(this, ttsSchema, input, voice, stream);
if (useSDK) {
const response = await strategy.call(this, ttsSchema, input, voice, stream);
[data, headers].forEach(this.removeUndefined.bind(this));
return {
data: response.data,
headers: response.headers,
status: response.status,
};
} else {
const [url, data, headers] = strategy.call(this, ttsSchema, input, voice, stream);
const options = { headers, responseType: stream ? 'stream' : 'arraybuffer' };
[data, headers].forEach(this.removeUndefined.bind(this));
try {
return await axios.post(url, data, options);
} catch (error) {
logger.error(`TTS request failed for provider ${provider}:`, error);
throw error;
const options = { headers, responseType: stream ? 'stream' : 'arraybuffer' };
try {
return await axios.post(url, data, options);
} catch (error) {
logger.error(`TTS request failed for provider ${provider}:`, error);
throw error;
}
}
}

View File

@@ -37,6 +37,9 @@ async function getVoices(req, res) {
case TTSProviders.LOCALAI:
voices = ttsSchema.localai?.voices;
break;
case TTSProviders.DEEPGRAM:
voices = ttsSchema.deepgram?.voices;
break;
default:
throw new Error('Invalid provider');
}

View File

@@ -40,16 +40,12 @@ async function getCodeOutputDownloadStream(fileIdentifier, apiKey) {
* @param {import('fs').ReadStream | import('stream').Readable} params.stream - The read stream for the file.
* @param {string} params.filename - The name of the file.
* @param {string} params.apiKey - The API key for authentication.
* @param {string} [params.entity_id] - Optional entity ID for the file.
* @returns {Promise<string>}
* @throws {Error} If there's an error during the upload process.
*/
async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = '' }) {
async function uploadCodeEnvFile({ req, stream, filename, apiKey }) {
try {
const form = new FormData();
if (entity_id.length > 0) {
form.append('entity_id', entity_id);
}
form.append('file', stream, filename);
const baseURL = getCodeBaseURL();
@@ -71,12 +67,7 @@ async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = ''
throw new Error(`Error uploading file: ${result.message}`);
}
const fileIdentifier = `${result.session_id}/${result.files[0].fileId}`;
if (entity_id.length === 0) {
return fileIdentifier;
}
return `${fileIdentifier}?entity_id=${entity_id}`;
return `${result.session_id}/${result.files[0].fileId}`;
} catch (error) {
throw new Error(`Error uploading file: ${error.message}`);
}

View File

@@ -3,11 +3,10 @@ const { v4 } = require('uuid');
const axios = require('axios');
const { getCodeBaseURL } = require('@librechat/agents');
const {
Tools,
FileContext,
FileSources,
imageExtRegex,
EToolResources,
FileContext,
imageExtRegex,
FileSources,
} = require('librechat-data-provider');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { convertImage } = require('~/server/services/Files/images/convert');
@@ -111,20 +110,12 @@ function checkIfActive(dateString) {
async function getSessionInfo(fileIdentifier, apiKey) {
try {
const baseURL = getCodeBaseURL();
const [path, queryString] = fileIdentifier.split('?');
const session_id = path.split('/')[0];
let queryParams = {};
if (queryString) {
queryParams = Object.fromEntries(new URLSearchParams(queryString).entries());
}
const session_id = fileIdentifier.split('/')[0];
const response = await axios({
method: 'get',
url: `${baseURL}/files/${session_id}`,
params: {
detail: 'summary',
...queryParams,
},
headers: {
'User-Agent': 'LibreChat/1.0',
@@ -133,7 +124,7 @@ async function getSessionInfo(fileIdentifier, apiKey) {
timeout: 5000,
});
return response.data.find((file) => file.name.startsWith(path))?.lastModified;
return response.data.find((file) => file.name.startsWith(fileIdentifier))?.lastModified;
} catch (error) {
logger.error(`Error fetching session info: ${error.message}`, error);
return null;
@@ -146,56 +137,29 @@ async function getSessionInfo(fileIdentifier, apiKey) {
* @param {ServerRequest} options.req
* @param {Agent['tool_resources']} options.tool_resources
* @param {string} apiKey
* @returns {Promise<{
* files: Array<{ id: string; session_id: string; name: string }>,
* toolContext: string,
* }>}
* @returns {Promise<Array<{ id: string; session_id: string; name: string }>>}
*/
const primeFiles = async (options, apiKey) => {
const { tool_resources } = options;
const file_ids = tool_resources?.[EToolResources.execute_code]?.file_ids ?? [];
const agentResourceIds = new Set(file_ids);
const resourceFiles = tool_resources?.[EToolResources.execute_code]?.files ?? [];
const dbFiles = ((await getFiles({ file_id: { $in: file_ids } })) ?? []).concat(resourceFiles);
const dbFiles = await getFiles({ file_id: { $in: file_ids } });
const files = [];
const sessions = new Map();
let toolContext = '';
for (let i = 0; i < dbFiles.length; i++) {
const file = dbFiles[i];
if (!file) {
continue;
}
for (const file of dbFiles) {
if (file.metadata.fileIdentifier) {
const [path, queryString] = file.metadata.fileIdentifier.split('?');
const [session_id, id] = path.split('/');
const [session_id, id] = file.metadata.fileIdentifier.split('/');
const pushFile = () => {
if (!toolContext) {
toolContext = `- Note: The following files are available in the "${Tools.execute_code}" tool environment:`;
}
toolContext += `\n\t- /mnt/data/${file.filename}${
agentResourceIds.has(file.file_id) ? '' : ' (just attached by user)'
}`;
files.push({
id,
session_id,
name: file.filename,
});
};
if (sessions.has(session_id)) {
pushFile();
continue;
}
let queryParams = {};
if (queryString) {
queryParams = Object.fromEntries(new URLSearchParams(queryString).entries());
}
const reuploadFile = async () => {
try {
const { getDownloadStream } = getStrategyFunctions(file.source);
@@ -207,7 +171,6 @@ const primeFiles = async (options, apiKey) => {
req: options.req,
stream,
filename: file.filename,
entity_id: queryParams.entity_id,
apiKey,
});
await updateFile({ file_id: file.file_id, metadata: { fileIdentifier } });
@@ -235,7 +198,7 @@ const primeFiles = async (options, apiKey) => {
}
}
return { files, toolContext };
return files;
};
module.exports = {

View File

@@ -97,7 +97,6 @@ async function encodeAndFormat(req, files, endpoint, mode) {
filepath: file.filepath,
filename: file.filename,
embedded: !!file.embedded,
metadata: file.metadata,
};
if (file.height && file.width) {

View File

@@ -20,7 +20,7 @@ const {
const { EnvVar } = require('@librechat/agents');
const { addResourceFileId, deleteResourceFileId } = require('~/server/controllers/assistants/v2');
const { convertImage, resizeAndConvert } = require('~/server/services/Files/images');
const { addAgentResourceFile, removeAgentResourceFiles } = require('~/models/Agent');
const { addAgentResourceFile, removeAgentResourceFile } = require('~/models/Agent');
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
const { createFile, updateFileUsage, deleteFiles } = require('~/models/File');
const { loadAuthValues } = require('~/app/clients/tools/util');
@@ -29,34 +29,10 @@ const { getStrategyFunctions } = require('./strategies');
const { determineFileType } = require('~/server/utils');
const { logger } = require('~/config');
/**
*
* @param {Array<MongoFile>} files
* @param {Array<string>} [fileIds]
* @returns
*/
const processFiles = async (files, fileIds) => {
const processFiles = async (files) => {
const promises = [];
const seen = new Set();
for (let file of files) {
const { file_id } = file;
if (seen.has(file_id)) {
continue;
}
seen.add(file_id);
promises.push(updateFileUsage({ file_id }));
}
if (!fileIds) {
return await Promise.all(promises);
}
for (let file_id of fileIds) {
if (seen.has(file_id)) {
continue;
}
seen.add(file_id);
promises.push(updateFileUsage({ file_id }));
}
@@ -68,7 +44,7 @@ const processFiles = async (files, fileIds) => {
* Enqueues the delete operation to the leaky bucket queue if necessary, or adds it directly to promises.
*
* @param {object} params - The passed parameters.
* @param {ServerRequest} params.req - The express request object.
* @param {Express.Request} params.req - The express request object.
* @param {MongoFile} params.file - The file object to delete.
* @param {Function} params.deleteFile - The delete file function.
* @param {Promise[]} params.promises - The array of promises to await.
@@ -115,7 +91,7 @@ function enqueueDeleteOperation({ req, file, deleteFile, promises, resolvedFileI
*
* @param {Object} params - The params object.
* @param {MongoFile[]} params.files - The file objects to delete.
* @param {ServerRequest} params.req - The express request object.
* @param {Express.Request} params.req - The express request object.
* @param {DeleteFilesBody} params.req.body - The request body.
* @param {string} [params.req.body.agent_id] - The agent ID if file uploaded is associated to an agent.
* @param {string} [params.req.body.assistant_id] - The assistant ID if file uploaded is associated to an assistant.
@@ -152,16 +128,18 @@ const processDeleteRequest = async ({ req, files }) => {
await initializeClients();
}
const agentFiles = [];
for (const file of files) {
const source = file.source ?? FileSources.local;
if (req.body.agent_id && req.body.tool_resource) {
agentFiles.push({
tool_resource: req.body.tool_resource,
file_id: file.file_id,
});
promises.push(
removeAgentResourceFile({
req,
file_id: file.file_id,
agent_id: req.body.agent_id,
tool_resource: req.body.tool_resource,
}),
);
}
if (checkOpenAIStorage(source) && !client[source]) {
@@ -205,15 +183,6 @@ const processDeleteRequest = async ({ req, files }) => {
enqueueDeleteOperation({ req, file, deleteFile, promises, resolvedFileIds, openai });
}
if (agentFiles.length > 0) {
promises.push(
removeAgentResourceFiles({
agent_id: req.body.agent_id,
files: agentFiles,
}),
);
}
await Promise.allSettled(promises);
await deleteFiles(resolvedFileIds);
};
@@ -273,14 +242,14 @@ const processFileURL = async ({ fileStrategy, userId, URL, fileName, basePath, c
* Saves file metadata to the database with an expiry TTL.
*
* @param {Object} params - The parameters object.
* @param {ServerRequest} params.req - The Express request object.
* @param {Express.Request} params.req - The Express request object.
* @param {Express.Response} [params.res] - The Express response object.
* @param {Express.Multer.File} params.file - The uploaded file.
* @param {ImageMetadata} params.metadata - Additional metadata for the file.
* @param {boolean} params.returnFile - Whether to return the file metadata or return response as normal.
* @returns {Promise<void>}
*/
const processImageFile = async ({ req, res, metadata, returnFile = false }) => {
const { file } = req;
const processImageFile = async ({ req, res, file, metadata, returnFile = false }) => {
const source = req.app.locals.fileStrategy;
const { handleImageUpload } = getStrategyFunctions(source);
const { file_id, temp_file_id, endpoint } = metadata;
@@ -320,7 +289,7 @@ const processImageFile = async ({ req, res, metadata, returnFile = false }) => {
* returns minimal file metadata, without saving to the database.
*
* @param {Object} params - The parameters object.
* @param {ServerRequest} params.req - The Express request object.
* @param {Express.Request} params.req - The Express request object.
* @param {FileContext} params.context - The context of the file (e.g., 'avatar', 'image_generation', etc.)
* @param {boolean} [params.resize=true] - Whether to resize and convert the image to target format. Default is `true`.
* @param {{ buffer: Buffer, width: number, height: number, bytes: number, filename: string, type: string, file_id: string }} [params.metadata] - Required metadata for the file if resize is false.
@@ -366,12 +335,13 @@ const uploadImageBuffer = async ({ req, context, metadata = {}, resize = true })
* Files must be deleted from the server filesystem manually.
*
* @param {Object} params - The parameters object.
* @param {ServerRequest} params.req - The Express request object.
* @param {Express.Request} params.req - The Express request object.
* @param {Express.Response} params.res - The Express response object.
* @param {Express.Multer.File} params.file - The uploaded file.
* @param {FileMetadata} params.metadata - Additional metadata for the file.
* @returns {Promise<void>}
*/
const processFileUpload = async ({ req, res, metadata }) => {
const processFileUpload = async ({ req, res, file, metadata }) => {
const isAssistantUpload = isAssistantsEndpoint(metadata.endpoint);
const assistantSource =
metadata.endpoint === EModelEndpoint.azureAssistants ? FileSources.azure : FileSources.openai;
@@ -385,7 +355,6 @@ const processFileUpload = async ({ req, res, metadata }) => {
({ openai } = await getOpenAIClient({ req }));
}
const { file } = req;
const {
id,
bytes,
@@ -453,13 +422,13 @@ const processFileUpload = async ({ req, res, metadata }) => {
* Files must be deleted from the server filesystem manually.
*
* @param {Object} params - The parameters object.
* @param {ServerRequest} params.req - The Express request object.
* @param {Express.Request} params.req - The Express request object.
* @param {Express.Response} params.res - The Express response object.
* @param {Express.Multer.File} params.file - The uploaded file.
* @param {FileMetadata} params.metadata - Additional metadata for the file.
* @returns {Promise<void>}
*/
const processAgentFileUpload = async ({ req, res, metadata }) => {
const { file } = req;
const processAgentFileUpload = async ({ req, res, file, metadata }) => {
const { agent_id, tool_resource } = metadata;
if (agent_id && !tool_resource) {
throw new Error('No tool resource provided for agent file upload');
@@ -484,7 +453,6 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
stream,
filename: file.originalname,
apiKey: result[EnvVar.CODE_API_KEY],
entity_id: messageAttachment === true ? undefined : agent_id,
});
fileInfoMetadata = { fileIdentifier };
}
@@ -608,7 +576,7 @@ const processOpenAIFile = async ({
/**
* Process OpenAI image files, convert to target format, save and return file metadata.
* @param {object} params - The params object.
* @param {ServerRequest} params.req - The Express request object.
* @param {Express.Request} params.req - The Express request object.
* @param {Buffer} params.buffer - The image buffer.
* @param {string} params.file_id - The file ID.
* @param {string} params.filename - The filename.
@@ -740,20 +708,20 @@ async function retrieveAndProcessFile({
* Filters a file based on its size and the endpoint origin.
*
* @param {Object} params - The parameters for the function.
* @param {ServerRequest} params.req - The request object from Express.
* @param {object} params.req - The request object from Express.
* @param {string} [params.req.endpoint]
* @param {string} [params.req.file_id]
* @param {number} [params.req.width]
* @param {number} [params.req.height]
* @param {number} [params.req.version]
* @param {Express.Multer.File} params.file - The file uploaded to the server via multer.
* @param {boolean} [params.image] - Whether the file expected is an image.
* @param {boolean} [params.isAvatar] - Whether the file expected is a user or entity avatar.
* @returns {void}
*
* @throws {Error} If a file exception is caught (invalid file size or type, lack of metadata).
*/
function filterFile({ req, image, isAvatar }) {
const { file } = req;
function filterFile({ req, file, image, isAvatar }) {
const { endpoint, file_id, width, height } = req.body;
if (!file_id && !isAvatar) {

View File

@@ -7,7 +7,6 @@ const { logger } = require('~/config');
*
* @param {string} userId - The unique identifier of the user for whom the plugin authentication value is to be retrieved.
* @param {string} authField - The specific authentication field (e.g., 'API_KEY', 'URL') whose value is to be retrieved and decrypted.
* @param {boolean} throwError - Whether to throw an error if the authentication value does not exist. Defaults to `true`.
* @returns {Promise<string|null>} A promise that resolves to the decrypted authentication value if found, or `null` if no such authentication value exists for the given user and field.
*
* The function throws an error if it encounters any issue during the retrieval or decryption process, or if the authentication value does not exist.
@@ -23,7 +22,7 @@ const { logger } = require('~/config');
* @throws {Error} Throws an error if there's an issue during the retrieval or decryption process, or if the authentication value does not exist.
* @async
*/
const getUserPluginAuthValue = async (userId, authField, throwError = true) => {
const getUserPluginAuthValue = async (userId, authField) => {
try {
const pluginAuth = await PluginAuth.findOne({ userId, authField }).lean();
if (!pluginAuth) {
@@ -33,9 +32,6 @@ const getUserPluginAuthValue = async (userId, authField, throwError = true) => {
const decryptedValue = await decrypt(pluginAuth.value);
return decryptedValue;
} catch (err) {
if (!throwError) {
return null;
}
logger.error('[getUserPluginAuthValue]', err);
throw err;
}

View File

@@ -1,8 +1,8 @@
const fs = require('fs');
const path = require('path');
const { zodToJsonSchema } = require('zod-to-json-schema');
const { tool: toolFn, Tool } = require('@langchain/core/tools');
const { Calculator } = require('@langchain/community/tools/calculator');
const { tool: toolFn, Tool } = require('@langchain/core/tools');
const {
Tools,
ContentTypes,
@@ -170,7 +170,7 @@ async function processRequiredActions(client, requiredActions) {
requiredActions,
);
const tools = requiredActions.map((action) => action.tool);
const { loadedTools } = await loadTools({
const loadedTools = await loadTools({
user: client.req.user.id,
model: client.req.body.model ?? 'gpt-4o-mini',
tools,
@@ -183,6 +183,7 @@ async function processRequiredActions(client, requiredActions) {
fileStrategy: client.req.app.locals.fileStrategy,
returnMetadata: true,
},
skipSpecs: true,
});
const ToolMap = loadedTools.reduce((map, tool) => {
@@ -377,21 +378,21 @@ async function loadAgentTools({ req, agent_id, tools, tool_resources, openAIApiK
if (!tools || tools.length === 0) {
return {};
}
const { loadedTools, toolContextMap } = await loadTools({
const loadedTools = await loadTools({
user: req.user.id,
// model: req.body.model ?? 'gpt-4o-mini',
tools,
functions: true,
isAgent: agent_id != null,
options: {
req,
openAIApiKey,
tool_resources,
returnMetadata: true,
processFileURL,
uploadImageBuffer,
returnMetadata: true,
fileStrategy: req.app.locals.fileStrategy,
},
skipSpecs: true,
});
const agentTools = [];
@@ -402,19 +403,16 @@ async function loadAgentTools({ req, agent_id, tools, tool_resources, openAIApiK
continue;
}
const toolDefinition = {
name: tool.name,
schema: tool.schema,
description: tool.description,
};
if (imageGenTools.has(tool.name)) {
toolDefinition.responseFormat = 'content_and_artifact';
}
const toolInstance = toolFn(async (...args) => {
return tool['_call'](...args);
}, toolDefinition);
const toolInstance = toolFn(
async (...args) => {
return tool['_call'](...args);
},
{
name: tool.name,
description: tool.description,
schema: tool.schema,
},
);
agentTools.push(toolInstance);
}
@@ -478,7 +476,6 @@ async function loadAgentTools({ req, agent_id, tools, tool_resources, openAIApiK
return {
tools: agentTools,
toolContextMap,
};
}

View File

@@ -32,20 +32,17 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
bookmarks: interfaceConfig?.bookmarks ?? defaults.bookmarks,
prompts: interfaceConfig?.prompts ?? defaults.prompts,
multiConvo: interfaceConfig?.multiConvo ?? defaults.multiConvo,
agents: interfaceConfig?.agents ?? defaults.agents,
});
await updateAccessPermissions(roleName, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: loadedInterface.bookmarks },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: loadedInterface.multiConvo },
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
});
await updateAccessPermissions(SystemRoles.ADMIN, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: loadedInterface.bookmarks },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: loadedInterface.multiConvo },
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
});
let i = 0;

View File

@@ -7,15 +7,8 @@ jest.mock('~/models/Role', () => ({
}));
describe('loadDefaultInterface', () => {
it('should call updateAccessPermissions with the correct parameters when permission types are true', async () => {
const config = {
interface: {
prompts: true,
bookmarks: true,
multiConvo: true,
agents: true,
},
};
it('should call updateAccessPermissions with the correct parameters when prompts and bookmarks are true', async () => {
const config = { interface: { prompts: true, bookmarks: true } };
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
@@ -23,20 +16,12 @@ describe('loadDefaultInterface', () => {
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
});
});
it('should call updateAccessPermissions with false when permission types are false', async () => {
const config = {
interface: {
prompts: false,
bookmarks: false,
multiConvo: false,
agents: false,
},
};
it('should call updateAccessPermissions with false when prompts and bookmarks are false', async () => {
const config = { interface: { prompts: false, bookmarks: false } };
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
@@ -44,12 +29,11 @@ describe('loadDefaultInterface', () => {
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: false },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
});
});
it('should call updateAccessPermissions with undefined when permission types are not specified in config', async () => {
it('should call updateAccessPermissions with undefined when prompts and bookmarks are not specified in config', async () => {
const config = {};
const configDefaults = { interface: {} };
@@ -59,19 +43,11 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
});
});
it('should call updateAccessPermissions with undefined when permission types are explicitly undefined', async () => {
const config = {
interface: {
prompts: undefined,
bookmarks: undefined,
multiConvo: undefined,
agents: undefined,
},
};
it('should call updateAccessPermissions with undefined when prompts and bookmarks are explicitly undefined', async () => {
const config = { interface: { prompts: undefined, bookmarks: undefined } };
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
@@ -80,19 +56,11 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
});
});
it('should call updateAccessPermissions with mixed values for permission types', async () => {
const config = {
interface: {
prompts: true,
bookmarks: false,
multiConvo: undefined,
agents: true,
},
};
it('should call updateAccessPermissions with mixed values for prompts and bookmarks', async () => {
const config = { interface: { prompts: true, bookmarks: false } };
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
@@ -101,28 +69,19 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
});
});
it('should call updateAccessPermissions with true when config is undefined', async () => {
const config = undefined;
const configDefaults = {
interface: {
prompts: true,
bookmarks: true,
multiConvo: true,
agents: true,
},
};
const configDefaults = { interface: { prompts: true, bookmarks: true } };
await loadDefaultInterface(config, configDefaults);
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
});
});
@@ -136,7 +95,6 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
});
});
@@ -150,7 +108,6 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
});
});
@@ -164,19 +121,11 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
});
});
it('should call updateAccessPermissions with all interface options including multiConvo', async () => {
const config = {
interface: {
prompts: true,
bookmarks: false,
multiConvo: true,
agents: false,
},
};
const config = { interface: { prompts: true, bookmarks: false, multiConvo: true } };
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
@@ -185,20 +134,12 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
});
});
it('should use default values for multiConvo when config is undefined', async () => {
const config = undefined;
const configDefaults = {
interface: {
prompts: true,
bookmarks: true,
multiConvo: false,
agents: undefined,
},
};
const configDefaults = { interface: { prompts: true, bookmarks: true, multiConvo: false } };
await loadDefaultInterface(config, configDefaults);
@@ -206,7 +147,6 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
});
});
});

View File

@@ -196,11 +196,14 @@ function generateConfig(key, baseURL, endpoint) {
if (agents) {
config.capabilities = [
AgentCapabilities.execute_code,
AgentCapabilities.file_search,
AgentCapabilities.actions,
AgentCapabilities.tools,
];
if (key === 'EXPERIMENTAL_RUN_CODE') {
config.capabilities.push(AgentCapabilities.execute_code);
}
}
if (assistants && endpoint === EModelEndpoint.azureAssistants) {

View File

@@ -56,33 +56,12 @@
* @memberof typedefs
*/
/**
* @exports BaseMessage
* @typedef {import('@langchain/core/messages').BaseMessage} BaseMessage
* @memberof typedefs
*/
/**
* @exports UsageMetadata
* @typedef {import('@langchain/core/messages').UsageMetadata} UsageMetadata
* @memberof typedefs
*/
/**
* @exports GraphRunnableConfig
* @typedef {import('@langchain/core/runnables').RunnableConfig<{
* req: ServerRequest;
* thread_id: string;
* run_id: string;
* agent_id: string;
* name: string;
* agent_index: number;
* last_agent_index: number;
* hide_sequential_outputs: boolean;
* }>} GraphRunnableConfig
* @memberof typedefs
*/
/**
* @exports Ollama
* @typedef {import('ollama').Ollama} Ollama
@@ -710,12 +689,6 @@
* @memberof typedefs
*/
/**
* @exports ToolCallData
* @typedef {import('~/models/schema/toolCallSchema.js').ToolCallData} ToolCallData
* @memberof typedefs
*/
/**
* @exports MongoUser
* @typedef {import('~/models/schema/userSchema.js').MongoUser} MongoUser
@@ -830,12 +803,6 @@
* @memberof typedefs
*/
/**
* @exports AgentToolResources
* @typedef {import('librechat-data-provider').AgentToolResources} AgentToolResources
* @memberof typedefs
*/
/**
* @exports AgentCreateParams
* @typedef {import('librechat-data-provider').AgentCreateParams} AgentCreateParams

View File

@@ -49,7 +49,6 @@ const googleModels = {
/* Max I/O is combined so we subtract the amount from max response tokens for actual total */
gemini: 30720, // -2048 from max
'gemini-pro-vision': 12288, // -4096 from max
'gemini-exp': 8000,
'gemini-1.5': 1048576, // -8192 from max
'text-bison-32k': 32758, // -10 from max
'chat-bison-32k': 32758, // -10 from max
@@ -117,10 +116,6 @@ const amazonModels = {
'amazon.titan-text-lite-v1': 4000,
'amazon.titan-text-express-v1': 8000,
'amazon.titan-text-premier-v1:0': 31500, // -500 from max
// https://aws.amazon.com/ai/generative-ai/nova/
'amazon.nova-micro-v1:0': 127000, // -1000 from max,
'amazon.nova-lite-v1:0': 295000, // -5000 from max,
'amazon.nova-pro-v1:0': 295000, // -5000 from max,
};
const bedrockModels = {

View File

@@ -96,7 +96,6 @@
"remark-gfm": "^4.0.0",
"remark-math": "^6.0.0",
"remark-supersub": "^1.0.0",
"sse.js": "^2.5.0",
"tailwind-merge": "^1.9.1",
"tailwindcss-animate": "^1.0.5",
"tailwindcss-radix": "^2.8.0",

View File

@@ -1 +0,0 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>C</title><path d="M16.5921 9.1962s-.354-3.298-3.627-3.39c-3.2741-.09-4.9552 2.474-4.9552 6.14 0 3.6651 1.858 6.5972 5.0451 6.5972 3.184 0 3.5381-3.665 3.5381-3.665l6.1041.365s.36 3.31-2.196 5.836c-2.552 2.5241-5.6901 2.9371-7.8762 2.9201-2.19-.017-5.2261.034-8.1602-2.97-2.938-3.0101-3.436-5.9302-3.436-8.8002 0-2.8701.556-6.6702 4.047-9.5502C7.444.72 9.849 0 12.254 0c10.0422 0 10.7172 9.2602 10.7172 9.2602z"/></svg>

Before

Width:  |  Height:  |  Size: 496 B

View File

@@ -1 +0,0 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>C++</title><path d="M22.394 6c-.167-.29-.398-.543-.652-.69L12.926.22c-.509-.294-1.34-.294-1.848 0L2.26 5.31c-.508.293-.923 1.013-.923 1.6v10.18c0 .294.104.62.271.91.167.29.398.543.652.69l8.816 5.09c.508.293 1.34.293 1.848 0l8.816-5.09c.254-.147.485-.4.652-.69.167-.29.27-.616.27-.91V6.91c.003-.294-.1-.62-.268-.91zM12 19.11c-3.92 0-7.109-3.19-7.109-7.11 0-3.92 3.19-7.11 7.11-7.11a7.133 7.133 0 016.156 3.553l-3.076 1.78a3.567 3.567 0 00-3.08-1.78A3.56 3.56 0 008.444 12 3.56 3.56 0 0012 15.555a3.57 3.57 0 003.08-1.778l3.078 1.78A7.135 7.135 0 0112 19.11zm7.11-6.715h-.79v.79h-.79v-.79h-.79v-.79h.79v-.79h.79v.79h.79zm2.962 0h-.79v.79h-.79v-.79h-.79v-.79h.79v-.79h.79v.79h.79z"/></svg>

Before

Width:  |  Height:  |  Size: 764 B

View File

@@ -1 +0,0 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Fortran</title><path d="M19.536 0H4.464A4.463 4.463 0 0 0 0 4.464v15.073A4.463 4.463 0 0 0 4.464 24h15.073A4.463 4.463 0 0 0 24 19.536V4.464A4.463 4.463 0 0 0 19.536 0zm1.193 6.493v3.871l-.922-.005c-.507-.003-.981-.021-1.052-.041-.128-.036-.131-.05-.192-.839-.079-1.013-.143-1.462-.306-2.136-.352-1.457-1.096-2.25-2.309-2.463-.509-.089-2.731-.176-4.558-.177L10.13 4.7v5.82l.662-.033c.757-.038 1.353-.129 1.64-.252.306-.131.629-.462.781-.799.158-.352.262-.815.345-1.542.033-.286.07-.572.083-.636.024-.116.028-.117 1.036-.117h1.012v9.3h-2.062l-.035-.536c-.063-.971-.252-1.891-.479-2.331-.311-.601-.922-.871-2.151-.95a11.422 11.422 0 0 1-.666-.059l-.172-.027.02 2.926c.021 3.086.03 3.206.265 3.465.241.266.381.284 2.827.368.05.002.065.246.065 1.041v1.039H3.271v-1.039c0-.954.007-1.039.091-1.041.05-.001.543-.023 1.097-.049.891-.042 1.033-.061 1.244-.167a.712.712 0 0 0 .345-.328c.106-.206.107-.254.107-6.78 0-6.133-.006-6.584-.09-6.737a.938.938 0 0 0-.553-.436c-.104-.032-.65-.07-1.215-.086l-1.026-.027V2.622h17.458v3.871z"/></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1 +0,0 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Go</title><path d="M1.811 10.231c-.047 0-.058-.023-.035-.059l.246-.315c.023-.035.081-.058.128-.058h4.172c.046 0 .058.035.035.07l-.199.303c-.023.036-.082.07-.117.07zM.047 11.306c-.047 0-.059-.023-.035-.058l.245-.316c.023-.035.082-.058.129-.058h5.328c.047 0 .07.035.058.07l-.093.28c-.012.047-.058.07-.105.07zm2.828 1.075c-.047 0-.059-.035-.035-.07l.163-.292c.023-.035.07-.07.117-.07h2.337c.047 0 .07.035.07.082l-.023.28c0 .047-.047.082-.082.082zm12.129-2.36c-.736.187-1.239.327-1.963.514-.176.046-.187.058-.34-.117-.174-.199-.303-.327-.548-.444-.737-.362-1.45-.257-2.115.175-.795.514-1.204 1.274-1.192 2.22.011.935.654 1.706 1.577 1.835.795.105 1.46-.175 1.987-.77.105-.13.198-.27.315-.434H10.47c-.245 0-.304-.152-.222-.35.152-.362.432-.97.596-1.274a.315.315 0 01.292-.187h4.253c-.023.316-.023.631-.07.947a4.983 4.983 0 01-.958 2.29c-.841 1.11-1.94 1.8-3.33 1.986-1.145.152-2.209-.07-3.143-.77-.865-.655-1.356-1.52-1.484-2.595-.152-1.274.222-2.419.993-3.424.83-1.086 1.928-1.776 3.272-2.02 1.098-.2 2.15-.07 3.096.571.62.41 1.063.97 1.356 1.648.07.105.023.164-.117.2m3.868 6.461c-1.064-.024-2.034-.328-2.852-1.029a3.665 3.665 0 01-1.262-2.255c-.21-1.32.152-2.489.947-3.529.853-1.122 1.881-1.706 3.272-1.95 1.192-.21 2.314-.095 3.33.595.923.63 1.496 1.484 1.648 2.605.198 1.578-.257 2.863-1.344 3.962-.771.783-1.718 1.273-2.805 1.495-.315.06-.63.07-.934.106zm2.78-4.72c-.011-.153-.011-.27-.034-.387-.21-1.157-1.274-1.81-2.384-1.554-1.087.245-1.788.935-2.045 2.033-.21.912.234 1.835 1.075 2.21.643.28 1.285.244 1.905-.07.923-.48 1.425-1.228 1.484-2.233z"/></svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -1 +0,0 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Node.js</title><path d="M11.998,24c-0.321,0-0.641-0.084-0.922-0.247l-2.936-1.737c-0.438-0.245-0.224-0.332-0.08-0.383 c0.585-0.203,0.703-0.25,1.328-0.604c0.065-0.037,0.151-0.023,0.218,0.017l2.256,1.339c0.082,0.045,0.197,0.045,0.272,0l8.795-5.076 c0.082-0.047,0.134-0.141,0.134-0.238V6.921c0-0.099-0.053-0.192-0.137-0.242l-8.791-5.072c-0.081-0.047-0.189-0.047-0.271,0 L3.075,6.68C2.99,6.729,2.936,6.825,2.936,6.921v10.15c0,0.097,0.054,0.189,0.139,0.235l2.409,1.392 c1.307,0.654,2.108-0.116,2.108-0.89V7.787c0-0.142,0.114-0.253,0.256-0.253h1.115c0.139,0,0.255,0.112,0.255,0.253v10.021 c0,1.745-0.95,2.745-2.604,2.745c-0.508,0-0.909,0-2.026-0.551L2.28,18.675c-0.57-0.329-0.922-0.945-0.922-1.604V6.921 c0-0.659,0.353-1.275,0.922-1.603l8.795-5.082c0.557-0.315,1.296-0.315,1.848,0l8.794,5.082c0.57,0.329,0.924,0.944,0.924,1.603 v10.15c0,0.659-0.354,1.273-0.924,1.604l-8.794,5.078C12.643,23.916,12.324,24,11.998,24z M19.099,13.993 c0-1.9-1.284-2.406-3.987-2.763c-2.731-0.361-3.009-0.548-3.009-1.187c0-0.528,0.235-1.233,2.258-1.233 c1.807,0,2.473,0.389,2.747,1.607c0.024,0.115,0.129,0.199,0.247,0.199h1.141c0.071,0,0.138-0.031,0.186-0.081 c0.048-0.054,0.074-0.123,0.067-0.196c-0.177-2.098-1.571-3.076-4.388-3.076c-2.508,0-4.004,1.058-4.004,2.833 c0,1.925,1.488,2.457,3.895,2.695c2.88,0.282,3.103,0.703,3.103,1.269c0,0.983-0.789,1.402-2.642,1.402 c-2.327,0-2.839-0.584-3.011-1.742c-0.02-0.124-0.126-0.215-0.253-0.215h-1.137c-0.141,0-0.254,0.112-0.254,0.253 c0,1.482,0.806,3.248,4.655,3.248C17.501,17.007,19.099,15.91,19.099,13.993z"/></svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -1 +0,0 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>PHP</title><path d="M7.01 10.207h-.944l-.515 2.648h.838c.556 0 .97-.105 1.242-.314.272-.21.455-.559.55-1.049.092-.47.05-.802-.124-.995-.175-.193-.523-.29-1.047-.29zM12 5.688C5.373 5.688 0 8.514 0 12s5.373 6.313 12 6.313S24 15.486 24 12c0-3.486-5.373-6.312-12-6.312zm-3.26 7.451c-.261.25-.575.438-.917.551-.336.108-.765.164-1.285.164H5.357l-.327 1.681H3.652l1.23-6.326h2.65c.797 0 1.378.209 1.744.628.366.418.476 1.002.33 1.752a2.836 2.836 0 0 1-.305.847c-.143.255-.33.49-.561.703zm4.024.715l.543-2.799c.063-.318.039-.536-.068-.651-.107-.116-.336-.174-.687-.174H11.46l-.704 3.625H9.388l1.23-6.327h1.367l-.327 1.682h1.218c.767 0 1.295.134 1.586.401s.378.7.263 1.299l-.572 2.944h-1.389zm7.597-2.265a2.782 2.782 0 0 1-.305.847c-.143.255-.33.49-.561.703a2.44 2.44 0 0 1-.917.551c-.336.108-.765.164-1.286.164h-1.18l-.327 1.682h-1.378l1.23-6.326h2.649c.797 0 1.378.209 1.744.628.366.417.477 1.001.331 1.751zM17.766 10.207h-.943l-.516 2.648h.838c.557 0 .971-.105 1.242-.314.272-.21.455-.559.551-1.049.092-.47.049-.802-.125-.995s-.524-.29-1.047-.29z"/></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1 +0,0 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Python</title><path d="M14.25.18l.9.2.73.26.59.3.45.32.34.34.25.34.16.33.1.3.04.26.02.2-.01.13V8.5l-.05.63-.13.55-.21.46-.26.38-.3.31-.33.25-.35.19-.35.14-.33.1-.3.07-.26.04-.21.02H8.77l-.69.05-.59.14-.5.22-.41.27-.33.32-.27.35-.2.36-.15.37-.1.35-.07.32-.04.27-.02.21v3.06H3.17l-.21-.03-.28-.07-.32-.12-.35-.18-.36-.26-.36-.36-.35-.46-.32-.59-.28-.73-.21-.88-.14-1.05-.05-1.23.06-1.22.16-1.04.24-.87.32-.71.36-.57.4-.44.42-.33.42-.24.4-.16.36-.1.32-.05.24-.01h.16l.06.01h8.16v-.83H6.18l-.01-2.75-.02-.37.05-.34.11-.31.17-.28.25-.26.31-.23.38-.2.44-.18.51-.15.58-.12.64-.1.71-.06.77-.04.84-.02 1.27.05zm-6.3 1.98l-.23.33-.08.41.08.41.23.34.33.22.41.09.41-.09.33-.22.23-.34.08-.41-.08-.41-.23-.33-.33-.22-.41-.09-.41.09zm13.09 3.95l.28.06.32.12.35.18.36.27.36.35.35.47.32.59.28.73.21.88.14 1.04.05 1.23-.06 1.23-.16 1.04-.24.86-.32.71-.36.57-.4.45-.42.33-.42.24-.4.16-.36.09-.32.05-.24.02-.16-.01h-8.22v.82h5.84l.01 2.76.02.36-.05.34-.11.31-.17.29-.25.25-.31.24-.38.2-.44.17-.51.15-.58.13-.64.09-.71.07-.77.04-.84.01-1.27-.04-1.07-.14-.9-.2-.73-.25-.59-.3-.45-.33-.34-.34-.25-.34-.16-.33-.1-.3-.04-.25-.02-.2.01-.13v-5.34l.05-.64.13-.54.21-.46.26-.38.3-.32.33-.24.35-.2.35-.14.33-.1.3-.06.26-.04.21-.02.13-.01h5.84l.69-.05.59-.14.5-.21.41-.28.33-.32.27-.35.2-.36.15-.36.1-.35.07-.32.04-.28.02-.21V6.07h2.09l.14.01zm-6.47 14.25l-.23.33-.08.41.08.41.23.33.33.23.41.08.41-.08.33-.23.23-.33.08-.41-.08-.41-.23-.33-.33-.23-.41-.08-.41.08z"/></svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -1 +0,0 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Rust</title><path d="M23.8346 11.7033l-1.0073-.6236a13.7268 13.7268 0 00-.0283-.2936l.8656-.8069a.3483.3483 0 00-.1154-.578l-1.1066-.414a8.4958 8.4958 0 00-.087-.2856l.6904-.9587a.3462.3462 0 00-.2257-.5446l-1.1663-.1894a9.3574 9.3574 0 00-.1407-.2622l.49-1.0761a.3437.3437 0 00-.0274-.3361.3486.3486 0 00-.3006-.154l-1.1845.0416a6.7444 6.7444 0 00-.1873-.2268l.2723-1.153a.3472.3472 0 00-.417-.4172l-1.1532.2724a14.0183 14.0183 0 00-.2278-.1873l.0415-1.1845a.3442.3442 0 00-.49-.328l-1.076.491c-.0872-.0476-.1742-.0952-.2623-.1407l-.1903-1.1673A.3483.3483 0 0016.256.955l-.9597.6905a8.4867 8.4867 0 00-.2855-.086l-.414-1.1066a.3483.3483 0 00-.5781-.1154l-.8069.8666a9.2936 9.2936 0 00-.2936-.0284L12.2946.1683a.3462.3462 0 00-.5892 0l-.6236 1.0073a13.7383 13.7383 0 00-.2936.0284L9.9803.3374a.3462.3462 0 00-.578.1154l-.4141 1.1065c-.0962.0274-.1903.0567-.2855.086L7.744.955a.3483.3483 0 00-.5447.2258L7.009 2.348a9.3574 9.3574 0 00-.2622.1407l-1.0762-.491a.3462.3462 0 00-.49.328l.0416 1.1845a7.9826 7.9826 0 00-.2278.1873L3.8413 3.425a.3472.3472 0 00-.4171.4171l.2713 1.1531c-.0628.075-.1255.1509-.1863.2268l-1.1845-.0415a.3462.3462 0 00-.328.49l.491 1.0761a9.167 9.167 0 00-.1407.2622l-1.1662.1894a.3483.3483 0 00-.2258.5446l.6904.9587a13.303 13.303 0 00-.087.2855l-1.1065.414a.3483.3483 0 00-.1155.5781l.8656.807a9.2936 9.2936 0 00-.0283.2935l-1.0073.6236a.3442.3442 0 000 .5892l1.0073.6236c.008.0982.0182.1964.0283.2936l-.8656.8079a.3462.3462 0 00.1155.578l1.1065.4141c.0273.0962.0567.1914.087.2855l-.6904.9587a.3452.3452 0 00.2268.5447l1.1662.1893c.0456.088.0922.1751.1408.2622l-.491 1.0762a.3462.3462 0 00.328.49l1.1834-.0415c.0618.0769.1235.1528.1873.2277l-.2713 1.1541a.3462.3462 0 00.4171.4161l1.153-.2713c.075.0638.151.1255.2279.1863l-.0415 1.1845a.3442.3442 0 00.49.327l1.0761-.49c.087.0486.1741.0951.2622.1407l.1903 1.1662a.3483.3483 0 00.5447.2268l.9587-.6904a9.299 9.299 0 00.2855.087l.414 1.1066a.3452.3452 0 00.5781.1154l.8079-.8656c.0972.0111.1954.0203.2936.0294l.6236 1.0073a.3472.3472 0 00.5892 0l.6236-1.0073c.0982-.0091.1964-.0183.2936-.0294l.8069.8656a.3483.3483 0 00.578-.1154l.4141-1.1066a8.4626 8.4626 0 00.2855-.087l.9587.6904a.3452.3452 0 00.5447-.2268l.1903-1.1662c.088-.0456.1751-.0931.2622-.1407l1.0762.49a.3472.3472 0 00.49-.327l-.0415-1.1845a6.7267 6.7267 0 00.2267-.1863l1.1531.2713a.3472.3472 0 00.4171-.416l-.2713-1.1542c.0628-.0749.1255-.1508.1863-.2278l1.1845.0415a.3442.3442 0 00.328-.49l-.49-1.076c.0475-.0872.0951-.1742.1407-.2623l1.1662-.1893a.3483.3483 0 00.2258-.5447l-.6904-.9587.087-.2855 1.1066-.414a.3462.3462 0 00.1154-.5781l-.8656-.8079c.0101-.0972.0202-.1954.0283-.2936l1.0073-.6236a.3442.3442 0 000-.5892zm-6.7413 8.3551a.7138.7138 0 01.2986-1.396.714.714 0 11-.2997 1.396zm-.3422-2.3142a.649.649 0 00-.7715.5l-.3573 1.6685c-1.1035.501-2.3285.7795-3.6193.7795a8.7368 8.7368 0 01-3.6951-.814l-.3574-1.6684a.648.648 0 00-.7714-.499l-1.473.3158a8.7216 8.7216 0 01-.7613-.898h7.1676c.081 0 .1356-.0141.1356-.088v-2.536c0-.074-.0536-.0881-.1356-.0881h-2.0966v-1.6077h2.2677c.2065 0 1.1065.0587 1.394 1.2088.0901.3533.2875 1.5044.4232 1.8729.1346.413.6833 1.2381 1.2685 1.2381h3.5716a.7492.7492 0 00.1296-.0131 8.7874 8.7874 0 01-.8119.9526zM6.8369 20.024a.714.714 0 11-.2997-1.396.714.714 0 01.2997 1.396zM4.1177 8.9972a.7137.7137 0 11-1.304.5791.7137.7137 0 011.304-.579zm-.8352 1.9813l1.5347-.6824a.65.65 0 00.33-.8585l-.3158-.7147h1.2432v5.6025H3.5669a8.7753 8.7753 0 01-.2834-3.348zm6.7343-.5437V8.7836h2.9601c.153 0 1.0792.1772 1.0792.8697 0 .575-.7107.7815-1.2948.7815zm10.7574 1.4862c0 .2187-.008.4363-.0243.651h-.9c-.09 0-.1265.0586-.1265.1477v.413c0 .973-.5487 1.1846-1.0296 1.2382-.4576.0517-.9648-.1913-1.0275-.4717-.2704-1.5186-.7198-1.8436-1.4305-2.4034.8817-.5599 1.799-1.386 1.799-2.4915 0-1.1936-.819-1.9458-1.3769-2.3153-.7825-.5163-1.6491-.6195-1.883-.6195H5.4682a8.7651 8.7651 0 014.907-2.7699l1.0974 1.151a.648.648 0 00.9182.0213l1.227-1.1743a8.7753 8.7753 0 016.0044 4.2762l-.8403 1.8982a.652.652 0 00.33.8585l1.6178.7188c.0283.2875.0425.577.0425.8717zm-9.3006-9.5993a.7128.7128 0 11.984 1.0316.7137.7137 0 01-.984-1.0316zm8.3389 6.71a.7107.7107 0 01.9395-.3625.7137.7137 0 11-.9405.3635z"/></svg>

Before

Width:  |  Height:  |  Size: 4.1 KiB

View File

@@ -1 +0,0 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>ts-node</title><path d="M11.999 0c-.196 0-.392.05-.568.153L2.026 5.58a1.135 1.135 0 00-.568.983V17.43c0 .406.216.781.568.984l5.787 3.344v-7.344H4.748v-1.943h8.342v1.943h-3.065v8.622l1.406.812c.351.203.784.203 1.136 0l2.317-1.338a3.958 3.958 0 01-1.195-1.413l1.801-1.042c.361.59.806 1.06 1.48 1.25l2.174-1.256c-.127-.568-.698-.823-1.584-1.21l-.553-.238c-1.596-.68-2.655-1.532-2.655-3.334 0-1.658 1.265-2.922 3.24-2.922 1.406 0 2.417.49 3.144 1.77l-1.723 1.105c-.379-.68-.79-.948-1.421-.948-.648 0-1.06.41-1.06.948 0 .663.412.932 1.36 1.343l.553.237c1.336.573 2.255 1.155 2.676 2.107l.853-.493c.352-.203.568-.578.568-.984V6.565c0-.406-.216-.782-.568-.984L12.567.153A1.134 1.134 0 0011.999 0z"/></svg>

Before

Width:  |  Height:  |  Size: 776 B

View File

@@ -1,34 +0,0 @@
import { createContext, useContext, ReactNode, useCallback, useRef } from 'react';
type TCodeBlockContext = {
getNextIndex: (skip: boolean) => number;
resetCounter: () => void;
// codeBlocks: Map<number, string>;
};
export const CodeBlockContext = createContext<TCodeBlockContext>({} as TCodeBlockContext);
export const useCodeBlockContext = () => useContext(CodeBlockContext);
export function CodeBlockProvider({ children }: { children: ReactNode }) {
const counterRef = useRef(0);
// const codeBlocks = useRef(new Map<number, string>()).current;
const getNextIndex = useCallback((skip: boolean) => {
if (skip) {
return counterRef.current;
}
const nextIndex = counterRef.current;
counterRef.current += 1;
return nextIndex;
}, []);
const resetCounter = useCallback(() => {
counterRef.current = 0;
}, []);
return (
<CodeBlockContext.Provider value={{ getNextIndex, resetCounter }}>
{children}
</CodeBlockContext.Provider>
);
}

View File

@@ -1,9 +0,0 @@
import { createContext, useContext } from 'react';
type MessageContext = {
messageId: string;
partIndex?: number;
conversationId?: string | null;
};
export const MessageContext = createContext<MessageContext>({} as MessageContext);
export const useMessageContext = () => useContext(MessageContext);

View File

@@ -1,21 +0,0 @@
import { createContext, useContext } from 'react';
import useToolCallsMap from '~/hooks/Plugins/useToolCallsMap';
type ToolCallsMapContextType = ReturnType<typeof useToolCallsMap>;
export const ToolCallsMapContext = createContext<ToolCallsMapContextType>(
{} as ToolCallsMapContextType,
);
export const useToolCallsMapContext = () => useContext(ToolCallsMapContext);
interface ToolCallsMapProviderProps {
children: React.ReactNode;
conversationId: string;
}
export function ToolCallsMapProvider({ children, conversationId }: ToolCallsMapProviderProps) {
const toolCallsMap = useToolCallsMap({ conversationId });
return (
<ToolCallsMapContext.Provider value={toolCallsMap}>{children}</ToolCallsMapContext.Provider>
);
}

View File

@@ -9,12 +9,9 @@ export * from './FileMapContext';
export * from './AddedChatContext';
export * from './ChatFormContext';
export * from './BookmarkContext';
export * from './MessageContext';
export * from './DashboardContext';
export * from './AssistantsContext';
export * from './AgentsContext';
export * from './AssistantsMapContext';
export * from './AnnouncerContext';
export * from './AgentsMapContext';
export * from './CodeBlockContext';
export * from './ToolCallsMapContext';

View File

@@ -11,8 +11,6 @@ export type TAgentOption = OptionWithIcon &
export type TAgentCapabilities = {
[AgentCapabilities.execute_code]: boolean;
[AgentCapabilities.file_search]: boolean;
[AgentCapabilities.end_after_tools]?: boolean;
[AgentCapabilities.hide_sequential_outputs]?: boolean;
};
export type AgentForm = {
@@ -25,5 +23,4 @@ export type AgentForm = {
model_parameters: AgentModelParameters;
tools?: string[];
provider?: AgentProvider | OptionWithIcon;
agent_ids?: string[];
} & TAgentCapabilities;

View File

@@ -1,6 +1,5 @@
export * from './a11y';
export * from './artifacts';
export * from './types';
export * from './tools';
export * from './assistants-types';
export * from './agents-types';

View File

@@ -1,6 +0,0 @@
import type { AuthType } from 'librechat-data-provider';
export type ApiKeyFormData = {
apiKey: string;
authType?: string | AuthType;
};

View File

@@ -1,21 +1,36 @@
import { RefObject } from 'react';
import React from 'react';
import { FileSources } from 'librechat-data-provider';
import type * as InputNumberPrimitive from 'rc-input-number';
import type { ColumnDef } from '@tanstack/react-table';
import type { SetterOrUpdater } from 'recoil';
import type * as t from 'librechat-data-provider';
import type {
TRole,
TUser,
Agent,
Action,
TPreset,
TPlugin,
TMessage,
Assistant,
TResPlugin,
TLoginUser,
AuthTypeEnum,
TModelsConfig,
TConversation,
TStartupConfig,
EModelEndpoint,
TEndpointsConfig,
ActionMetadata,
AssistantDocument,
AssistantsEndpoint,
TMessageContentParts,
AuthorizationTypeEnum,
TSetOption as SetOption,
TokenExchangeMethodEnum,
} from 'librechat-data-provider';
import type { UseMutationResult } from '@tanstack/react-query';
import type { LucideIcon } from 'lucide-react';
export type CodeBarProps = {
lang: string;
error?: boolean;
plugin?: boolean;
blockIndex?: number;
allowExecution?: boolean;
codeRef: RefObject<HTMLElement>;
};
export enum PromptsEditorMode {
SIMPLE = 'simple',
ADVANCED = 'advanced',
@@ -50,21 +65,21 @@ export type AudioChunk = {
export type AssistantListItem = {
id: string;
name: string;
metadata: t.Assistant['metadata'];
metadata: Assistant['metadata'];
model: string;
};
export type AgentListItem = {
id: string;
name: string;
avatar: t.Agent['avatar'];
avatar: Agent['avatar'];
};
export type TPluginMap = Record<string, t.TPlugin>;
export type TPluginMap = Record<string, TPlugin>;
export type GenericSetter<T> = (value: T | ((currentValue: T) => T)) => void;
export type LastSelectedModels = Record<t.EModelEndpoint, string>;
export type LastSelectedModels = Record<EModelEndpoint, string>;
export type LocalizeFunction = (phraseKey: string, ...values: string[]) => string;
@@ -130,11 +145,11 @@ export type FileSetter =
export type ActionAuthForm = {
/* General */
type: t.AuthTypeEnum;
type: AuthTypeEnum;
saved_auth_fields: boolean;
/* API key */
api_key: string; // not nested
authorization_type: t.AuthorizationTypeEnum;
authorization_type: AuthorizationTypeEnum;
custom_auth_header: string;
/* OAuth */
oauth_client_id: string; // not nested
@@ -142,23 +157,23 @@ export type ActionAuthForm = {
authorization_url: string;
client_url: string;
scope: string;
token_exchange_method: t.TokenExchangeMethodEnum;
token_exchange_method: TokenExchangeMethodEnum;
};
export type ActionWithNullableMetadata = Omit<t.Action, 'metadata'> & {
metadata: t.ActionMetadata | null;
export type ActionWithNullableMetadata = Omit<Action, 'metadata'> & {
metadata: ActionMetadata | null;
};
export type AssistantPanelProps = {
index?: number;
action?: ActionWithNullableMetadata;
actions?: t.Action[];
actions?: Action[];
assistant_id?: string;
activePanel?: string;
endpoint: t.AssistantsEndpoint;
endpoint: AssistantsEndpoint;
version: number | string;
documentsMap: Map<string, t.AssistantDocument> | null;
setAction: React.Dispatch<React.SetStateAction<t.Action | undefined>>;
documentsMap: Map<string, AssistantDocument> | null;
setAction: React.Dispatch<React.SetStateAction<Action | undefined>>;
setCurrentAssistantId: React.Dispatch<React.SetStateAction<string | undefined>>;
setActivePanel: React.Dispatch<React.SetStateAction<Panel>>;
};
@@ -167,11 +182,11 @@ export type AgentPanelProps = {
index?: number;
agent_id?: string;
activePanel?: string;
action?: t.Action;
actions?: t.Action[];
action?: Action;
actions?: Action[];
setActivePanel: React.Dispatch<React.SetStateAction<Panel>>;
setAction: React.Dispatch<React.SetStateAction<t.Action | undefined>>;
endpointsConfig?: t.TEndpointsConfig;
setAction: React.Dispatch<React.SetStateAction<Action | undefined>>;
endpointsConfig?: TEndpointsConfig;
setCurrentAgentId: React.Dispatch<React.SetStateAction<string | undefined>>;
};
@@ -184,7 +199,7 @@ export type AgentModelPanelProps = {
export type AugmentedColumnDef<TData, TValue> = ColumnDef<TData, TValue> & DataColumnMeta;
export type TSetOption = t.TSetOption;
export type TSetOption = SetOption;
export type TSetExample = (
i: number,
@@ -219,7 +234,7 @@ export type TShowToast = {
};
export type TBaseSettingsProps = {
conversation: t.TConversation | t.TPreset | null;
conversation: TConversation | TPreset | null;
className?: string;
isPreset?: boolean;
readonly?: boolean;
@@ -240,7 +255,7 @@ export type TModelSelectProps = TSettingsProps & TModels;
export type TEditPresetProps = {
open: boolean;
onOpenChange: React.Dispatch<React.SetStateAction<boolean>>;
preset: t.TPreset;
preset: TPreset;
title?: string;
};
@@ -251,18 +266,18 @@ export type TSetOptionsPayload = {
addExample: () => void;
removeExample: () => void;
setAgentOption: TSetOption;
// getConversation: () => t.TConversation | t.TPreset | null;
// getConversation: () => TConversation | TPreset | null;
checkPluginSelection: (value: string) => boolean;
setTools: (newValue: string, remove?: boolean) => void;
setOptions?: TSetOptions;
};
export type TPresetItemProps = {
preset: t.TPreset;
value: t.TPreset;
onSelect: (preset: t.TPreset) => void;
onChangePreset: (preset: t.TPreset) => void;
onDeletePreset: (preset: t.TPreset) => void;
preset: TPreset;
value: TPreset;
onSelect: (preset: TPreset) => void;
onChangePreset: (preset: TPreset) => void;
onDeletePreset: (preset: TPreset) => void;
};
export type TOnClick = (e: React.MouseEvent<HTMLButtonElement>) => void;
@@ -287,16 +302,16 @@ export type TOptions = {
isRegenerate?: boolean;
isContinued?: boolean;
isEdited?: boolean;
overrideMessages?: t.TMessage[];
overrideMessages?: TMessage[];
};
export type TAskFunction = (props: TAskProps, options?: TOptions) => void;
export type TMessageProps = {
conversation?: t.TConversation | null;
conversation?: TConversation | null;
messageId?: string | null;
message?: t.TMessage;
messagesTree?: t.TMessage[];
message?: TMessage;
messagesTree?: TMessage[];
currentEditId: string | number | null;
isSearchView?: boolean;
siblingIdx?: number;
@@ -315,7 +330,7 @@ export type TInitialProps = {
};
export type TAdditionalProps = {
ask: TAskFunction;
message: t.TMessage;
message: TMessage;
isCreatedByUser: boolean;
siblingIdx: number;
enterEdit: (cancel: boolean) => void;
@@ -339,7 +354,7 @@ export type TDisplayProps = TText &
export type TConfigProps = {
userKey: string;
setUserKey: React.Dispatch<React.SetStateAction<string>>;
endpoint: t.EModelEndpoint | string;
endpoint: EModelEndpoint | string;
};
export type TDangerButtonProps = {
@@ -374,18 +389,18 @@ export type TResError = {
};
export type TAuthContext = {
user: t.TUser | undefined;
user: TUser | undefined;
token: string | undefined;
isAuthenticated: boolean;
error: string | undefined;
login: (data: t.TLoginUser) => void;
login: (data: TLoginUser) => void;
logout: () => void;
setError: React.Dispatch<React.SetStateAction<string | undefined>>;
roles?: Record<string, t.TRole | null | undefined>;
roles?: Record<string, TRole | null | undefined>;
};
export type TUserContext = {
user?: t.TUser | undefined;
user?: TUser | undefined;
token: string | undefined;
isAuthenticated: boolean;
redirect?: string;
@@ -396,16 +411,16 @@ export type TAuthConfig = {
test?: boolean;
};
export type IconProps = Pick<t.TMessage, 'isCreatedByUser' | 'model'> &
Pick<t.TConversation, 'chatGptLabel' | 'modelLabel' | 'jailbreak'> & {
export type IconProps = Pick<TMessage, 'isCreatedByUser' | 'model'> &
Pick<TConversation, 'chatGptLabel' | 'modelLabel' | 'jailbreak'> & {
size?: number;
button?: boolean;
iconURL?: string;
message?: boolean;
className?: string;
iconClassName?: string;
endpoint?: t.EModelEndpoint | string | null;
endpointType?: t.EModelEndpoint | null;
endpoint?: EModelEndpoint | string | null;
endpointType?: EModelEndpoint | null;
assistantName?: string;
agentName?: string;
error?: boolean;
@@ -425,7 +440,7 @@ export type VoiceOption = {
export type TMessageAudio = {
messageId?: string;
content?: t.TMessageContentParts[] | string;
content?: TMessageContentParts[] | string;
className?: string;
isLast: boolean;
index: number;
@@ -467,12 +482,12 @@ export interface ExtendedFile {
export type ContextType = { navVisible: boolean; setNavVisible: (visible: boolean) => void };
export interface SwitcherProps {
endpoint?: t.EModelEndpoint | null;
endpoint?: EModelEndpoint | null;
endpointKeyProvided: boolean;
isCollapsed: boolean;
}
export type TLoginLayoutContext = {
startupConfig: t.TStartupConfig | null;
startupConfig: TStartupConfig | null;
startupConfigError: unknown;
isFetching: boolean;
error: string | null;
@@ -482,34 +497,34 @@ export type TLoginLayoutContext = {
};
export type NewConversationParams = {
template?: Partial<t.TConversation>;
preset?: Partial<t.TPreset>;
modelsData?: t.TModelsConfig;
template?: Partial<TConversation>;
preset?: Partial<TPreset>;
modelsData?: TModelsConfig;
buildDefault?: boolean;
keepLatestMessage?: boolean;
keepAddedConvos?: boolean;
};
export type ConvoGenerator = (params: NewConversationParams) => void | t.TConversation;
export type ConvoGenerator = (params: NewConversationParams) => void | TConversation;
export type TBaseResData = {
plugin?: t.TResPlugin;
plugin?: TResPlugin;
final?: boolean;
initial?: boolean;
previousMessages?: t.TMessage[];
conversation: t.TConversation;
previousMessages?: TMessage[];
conversation: TConversation;
conversationId?: string;
runMessages?: t.TMessage[];
runMessages?: TMessage[];
};
export type TResData = TBaseResData & {
requestMessage: t.TMessage;
responseMessage: t.TMessage;
requestMessage: TMessage;
responseMessage: TMessage;
};
export type TFinalResData = TBaseResData & {
requestMessage?: t.TMessage;
responseMessage?: t.TMessage;
requestMessage?: TMessage;
responseMessage?: TMessage;
};
export type TVectorStore = {

View File

@@ -5,6 +5,7 @@ import { useChatContext, useAddedChatContext } from '~/Providers';
import { TooltipAnchor } from '~/components';
import { mainTextareaId } from '~/common';
import { useLocalize } from '~/hooks';
import { cn } from '~/utils';
function AddMultiConvo() {
const { conversation } = useChatContext();

View File

@@ -1,100 +0,0 @@
import * as Ariakit from '@ariakit/react';
import React, { useRef, useState } from 'react';
import { FileSearch, ImageUpIcon, TerminalSquareIcon } from 'lucide-react';
import { EToolResources } from 'librechat-data-provider';
import { FileUpload, TooltipAnchor, DropdownPopup } from '~/components/ui';
import { AttachmentIcon } from '~/components/svg';
import { useLocalize } from '~/hooks';
import { cn } from '~/utils';
interface AttachFileProps {
isRTL: boolean;
disabled?: boolean | null;
handleFileChange: (event: React.ChangeEvent<HTMLInputElement>) => void;
setToolResource?: React.Dispatch<React.SetStateAction<string | undefined>>;
}
const AttachFile = ({ isRTL, disabled, setToolResource, handleFileChange }: AttachFileProps) => {
const localize = useLocalize();
const isUploadDisabled = disabled ?? false;
const inputRef = useRef<HTMLInputElement>(null);
const [isPopoverActive, setIsPopoverActive] = useState(false);
const handleUploadClick = (isImage?: boolean) => {
if (!inputRef.current) {
return;
}
inputRef.current.value = '';
inputRef.current.accept = isImage === true ? 'image/*' : '';
inputRef.current.click();
inputRef.current.accept = '';
};
const dropdownItems = [
{
label: localize('com_ui_upload_image_input'),
onClick: () => {
setToolResource?.(undefined);
handleUploadClick(true);
},
icon: <ImageUpIcon className="icon-md" />,
},
{
label: localize('com_ui_upload_file_search'),
onClick: () => {
setToolResource?.(EToolResources.file_search);
handleUploadClick();
},
icon: <FileSearch className="icon-md" />,
},
{
label: localize('com_ui_upload_code_files'),
onClick: () => {
setToolResource?.(EToolResources.execute_code);
handleUploadClick();
},
icon: <TerminalSquareIcon className="icon-md" />,
},
];
const menuTrigger = (
<TooltipAnchor
render={
<Ariakit.MenuButton
disabled={isUploadDisabled}
id="attach-file-menu-button"
aria-label="Attach File Options"
className={cn(
'absolute flex size-[35px] items-center justify-center rounded-full p-1 transition-colors hover:bg-surface-hover focus:outline-none focus:ring-2 focus:ring-primary focus:ring-opacity-50',
isRTL ? 'bottom-2 right-2' : 'bottom-2 left-1 md:left-2',
)}
>
<div className="flex w-full items-center justify-center gap-2">
<AttachmentIcon />
</div>
</Ariakit.MenuButton>
}
id="attach-file-menu-button"
description={localize('com_sidepanel_attach_files')}
disabled={isUploadDisabled}
/>
);
return (
<FileUpload ref={inputRef} handleFileChange={handleFileChange}>
<div className="relative">
<DropdownPopup
menuId="attach-file-menu"
isOpen={isPopoverActive}
setIsOpen={setIsPopoverActive}
modal={true}
trigger={menuTrigger}
items={dropdownItems}
iconClassName="mr-0"
/>
</div>
</FileUpload>
);
};
export default React.memo(AttachFile);

View File

@@ -1,14 +1,12 @@
import { memo, useMemo } from 'react';
import { memo } from 'react';
import { useRecoilValue } from 'recoil';
import {
supportsFiles,
mergeFileConfig,
isAgentsEndpoint,
EndpointFileConfig,
fileConfig as defaultFileConfig,
} from 'librechat-data-provider';
import { useGetFileConfig } from '~/data-provider';
import AttachFileMenu from './AttachFileMenu';
import { useChatContext } from '~/Providers';
import { useFileHandling } from '~/hooks';
import AttachFile from './AttachFile';
@@ -22,46 +20,23 @@ function FileFormWrapper({
disableInputs: boolean;
children?: React.ReactNode;
}) {
const { handleFileChange, abortUpload } = useFileHandling();
const chatDirection = useRecoilValue(store.chatDirection).toLowerCase();
const { files, setFiles, conversation, setFilesLoading } = useChatContext();
const { endpoint: _endpoint, endpointType } = conversation ?? { endpoint: null };
const isAgents = useMemo(() => isAgentsEndpoint(_endpoint), [_endpoint]);
const { handleFileChange, abortUpload, setToolResource } = useFileHandling();
const { data: fileConfig = defaultFileConfig } = useGetFileConfig({
select: (data) => mergeFileConfig(data),
});
const isRTL = chatDirection === 'rtl';
const { endpoint: _endpoint, endpointType } = conversation ?? { endpoint: null };
const endpointFileConfig = fileConfig.endpoints[_endpoint ?? ''] as
| EndpointFileConfig
| undefined;
const endpointSupportsFiles: boolean = supportsFiles[endpointType ?? _endpoint ?? ''] ?? false;
const isUploadDisabled = (disableInputs || endpointFileConfig?.disabled) ?? false;
const renderAttachFile = () => {
if (isAgents) {
return (
<AttachFileMenu
isRTL={isRTL}
disabled={disableInputs}
setToolResource={setToolResource}
handleFileChange={handleFileChange}
/>
);
}
if (endpointSupportsFiles && !isUploadDisabled) {
return (
<AttachFile isRTL={isRTL} disabled={disableInputs} handleFileChange={handleFileChange} />
);
}
return null;
};
return (
<>
<FileRow
@@ -75,7 +50,9 @@ function FileFormWrapper({
)}
/>
{children}
{renderAttachFile()}
{endpointSupportsFiles && !isUploadDisabled && (
<AttachFile isRTL={isRTL} disabled={disableInputs} handleFileChange={handleFileChange} />
)}
</>
);
}

View File

@@ -26,15 +26,8 @@ export default function Mention({
}) {
const localize = useLocalize();
const assistantMap = useAssistantsMapContext();
const {
options,
presets,
modelSpecs,
agentsList,
modelsConfig,
endpointsConfig,
assistantListMap,
} = useMentions({ assistantMap: assistantMap || {}, includeAssistants });
const { options, presets, modelSpecs, modelsConfig, endpointsConfig, assistantListMap } =
useMentions({ assistantMap: assistantMap || {}, includeAssistants });
const { onSelectMention } = useSelectMention({
presets,
modelSpecs,
@@ -69,23 +62,18 @@ export default function Mention({
}
};
if (mention.type === 'endpoint' && mention.value === EModelEndpoint.agents) {
if (mention.type === 'endpoint' && mention.value === EModelEndpoint.assistants) {
setSearchValue('');
setInputOptions(agentsList ?? []);
setActiveIndex(0);
inputRef.current?.focus();
} else if (mention.type === 'endpoint' && mention.value === EModelEndpoint.assistants) {
setSearchValue('');
setInputOptions(assistantListMap[EModelEndpoint.assistants] ?? []);
setInputOptions(assistantListMap[EModelEndpoint.assistants]);
setActiveIndex(0);
inputRef.current?.focus();
} else if (mention.type === 'endpoint' && mention.value === EModelEndpoint.azureAssistants) {
setSearchValue('');
setInputOptions(assistantListMap[EModelEndpoint.azureAssistants] ?? []);
setInputOptions(assistantListMap[EModelEndpoint.azureAssistants]);
setActiveIndex(0);
inputRef.current?.focus();
} else if (mention.type === 'endpoint') {
const models = (modelsConfig?.[mention.value || ''] ?? []).map((model) => ({
const models = (modelsConfig?.[mention.value ?? ''] ?? []).map((model) => ({
value: mention.value,
label: model,
type: 'model',

View File

@@ -1,57 +1,47 @@
import type { FC } from 'react';
import { Close } from '@radix-ui/react-popover';
import {
EModelEndpoint,
alternateName,
PermissionTypes,
Permissions,
} from 'librechat-data-provider';
import { EModelEndpoint, alternateName } from 'librechat-data-provider';
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
import MenuSeparator from '../UI/MenuSeparator';
import { getEndpointField } from '~/utils';
import { useHasAccess } from '~/hooks';
import MenuItem from './MenuItem';
const EndpointItems: FC<{
endpoints: Array<EModelEndpoint | undefined>;
endpoints: EModelEndpoint[];
selected: EModelEndpoint | '';
}> = ({ endpoints = [], selected }) => {
const hasAccessToAgents = useHasAccess({
permissionType: PermissionTypes.AGENTS,
permission: Permissions.USE,
});
}> = ({ endpoints, selected }) => {
const { data: endpointsConfig } = useGetEndpointsQuery();
return (
<>
{endpoints.map((endpoint, i) => {
if (!endpoint) {
return null;
} else if (!endpointsConfig?.[endpoint]) {
return null;
}
if (endpoint === EModelEndpoint.agents && !hasAccessToAgents) {
return null;
}
const userProvidesKey: boolean | null | undefined =
getEndpointField(endpointsConfig, endpoint, 'userProvide') ?? false;
return (
<Close asChild key={`endpoint-${endpoint}`}>
<div key={`endpoint-${endpoint}`}>
<MenuItem
key={`endpoint-item-${endpoint}`}
title={alternateName[endpoint] || endpoint}
value={endpoint}
selected={selected === endpoint}
data-testid={`endpoint-item-${endpoint}`}
userProvidesKey={!!userProvidesKey}
// description="With DALL·E, browsing and analysis"
/>
{i !== endpoints.length - 1 && <MenuSeparator />}
</div>
</Close>
);
})}
{endpoints &&
endpoints.map((endpoint, i) => {
if (!endpoint) {
return null;
} else if (!endpointsConfig?.[endpoint]) {
return null;
}
const userProvidesKey: boolean | null | undefined = getEndpointField(
endpointsConfig,
endpoint,
'userProvide',
);
return (
<Close asChild key={`endpoint-${endpoint}`}>
<div key={`endpoint-${endpoint}`}>
<MenuItem
key={`endpoint-item-${endpoint}`}
title={alternateName[endpoint] || endpoint}
value={endpoint}
selected={selected === endpoint}
data-testid={`endpoint-item-${endpoint}`}
userProvidesKey={!!userProvidesKey}
// description="With DALL·E, browsing and analysis"
/>
{i !== endpoints.length - 1 && <MenuSeparator />}
</div>
</Close>
);
})}
</>
);
};

View File

@@ -71,7 +71,7 @@ const MenuItem: FC<MenuItemProps> = ({
<div>
<div className="flex items-center gap-2">
{showIconInMenu && <SpecIcon currentSpec={spec} endpointsConfig={endpointsConfig} />}
<div>
<div className="break-all">
{title}
<div className="text-token-text-tertiary">{description}</div>
</div>

View File

@@ -4,14 +4,12 @@ import { ContentTypes } from 'librechat-data-provider';
import type { TMessageContentParts, TAttachment, Agents } from 'librechat-data-provider';
import EditTextPart from './Parts/EditTextPart';
import { mapAttachments } from '~/utils/map';
import { MessageContext } from '~/Providers';
import store from '~/store';
import Part from './Part';
type ContentPartsProps = {
content: Array<TMessageContentParts | undefined> | undefined;
messageId: string;
conversationId?: string | null;
attachments?: TAttachment[];
isCreatedByUser: boolean;
isLast: boolean;
@@ -29,7 +27,6 @@ const ContentParts = memo(
({
content,
messageId,
conversationId,
attachments,
isCreatedByUser,
isLast,
@@ -82,23 +79,15 @@ const ContentParts = memo(
const attachments = attachmentMap[toolCallId];
return (
<MessageContext.Provider
key={`provider-${messageId}-${idx}`}
value={{
messageId,
conversationId,
partIndex: idx,
}}
>
<Part
part={part}
attachments={attachments}
isSubmitting={isSubmitting}
key={`part-${messageId}-${idx}`}
isCreatedByUser={isCreatedByUser}
showCursor={idx === content.length - 1 && isLast}
/>
</MessageContext.Provider>
<Part
part={part}
isSubmitting={isSubmitting}
attachments={attachments}
key={`display-${messageId}-${idx}`}
showCursor={idx === content.length - 1 && isLast}
messageId={messageId}
isCreatedByUser={isCreatedByUser}
/>
);
})}
</>

View File

@@ -1,4 +1,4 @@
import React, { memo, useMemo, useRef, useEffect } from 'react';
import React, { memo, useMemo } from 'react';
import remarkGfm from 'remark-gfm';
import remarkMath from 'remark-math';
import supersub from 'remark-supersub';
@@ -10,10 +10,10 @@ import remarkDirective from 'remark-directive';
import type { Pluggable } from 'unified';
import { Artifact, artifactPlugin } from '~/components/Artifacts/Artifact';
import { langSubset, preprocessLaTeX, handleDoubleClick } from '~/utils';
import { useToastContext, CodeBlockProvider, useCodeBlockContext } from '~/Providers';
import CodeBlock from '~/components/Messages/Content/CodeBlock';
import { useFileDownload } from '~/data-provider';
import useLocalize from '~/hooks/useLocalize';
import { useToastContext } from '~/Providers';
import store from '~/store';
type TCodeProps = {
@@ -25,32 +25,6 @@ type TCodeProps = {
export const code: React.ElementType = memo(({ className, children }: TCodeProps) => {
const match = /language-(\w+)/.exec(className ?? '');
const lang = match && match[1];
const isMath = lang === 'math';
const isSingleLine = typeof children === 'string' && children.split('\n').length === 1;
const { getNextIndex, resetCounter } = useCodeBlockContext();
const blockIndex = useRef(getNextIndex(isMath || isSingleLine)).current;
useEffect(() => {
resetCounter();
}, [children, resetCounter]);
if (isMath) {
return children;
} else if (isSingleLine) {
return (
<code onDoubleClick={handleDoubleClick} className={className}>
{children}
</code>
);
} else {
return <CodeBlock lang={lang ?? 'text'} codeChildren={children} blockIndex={blockIndex} />;
}
});
export const codeNoExecution: React.ElementType = memo(({ className, children }: TCodeProps) => {
const match = /language-(\w+)/.exec(className ?? '');
const lang = match && match[1];
if (lang === 'math') {
return children;
@@ -61,7 +35,7 @@ export const codeNoExecution: React.ElementType = memo(({ className, children }:
</code>
);
} else {
return <CodeBlock lang={lang ?? 'text'} codeChildren={children} allowExecution={false} />;
return <CodeBlock lang={lang ?? 'text'} codeChildren={children} />;
}
});
@@ -71,11 +45,7 @@ export const a: React.ElementType = memo(
const { showToast } = useToastContext();
const localize = useLocalize();
const {
file_id = '',
filename = '',
filepath,
} = useMemo(() => {
const { file_id, filename, filepath } = useMemo(() => {
const pattern = new RegExp(`(?:files|outputs)/${user?.id}/([^\\s]+)`);
const match = href.match(pattern);
if (match && match[0]) {
@@ -194,27 +164,25 @@ const Markdown = memo(({ content = '', showCursor, isLatestMessage }: TContentPr
: [supersub, remarkGfm, [remarkMath, { singleDollarTextMath: true }]];
return (
<CodeBlockProvider>
<ReactMarkdown
/** @ts-ignore */
remarkPlugins={remarkPlugins}
/* @ts-ignore */
rehypePlugins={rehypePlugins}
// linkTarget="_new"
components={
{
code,
a,
p,
artifact: Artifact,
} as {
[nodeType: string]: React.ElementType;
}
<ReactMarkdown
/** @ts-ignore */
remarkPlugins={remarkPlugins}
/* @ts-ignore */
rehypePlugins={rehypePlugins}
// linkTarget="_new"
components={
{
code,
a,
p,
artifact: Artifact,
} as {
[nodeType: string]: React.ElementType;
}
>
{isLatestMessage && showCursor === true ? currentContent + cursor : currentContent}
</ReactMarkdown>
</CodeBlockProvider>
}
>
{isLatestMessage && showCursor === true ? currentContent + cursor : currentContent}
</ReactMarkdown>
);
});

View File

@@ -6,51 +6,40 @@ import supersub from 'remark-supersub';
import ReactMarkdown from 'react-markdown';
import rehypeHighlight from 'rehype-highlight';
import type { PluggableList } from 'unified';
import { code, codeNoExecution, a, p } from './Markdown';
import { CodeBlockProvider } from '~/Providers';
import { langSubset } from '~/utils';
import { code, a, p } from './Markdown';
const MarkdownLite = memo(
({ content = '', codeExecution = true }: { content?: string; codeExecution?: boolean }) => {
const rehypePlugins: PluggableList = [
[rehypeKatex, { output: 'mathml' }],
[
rehypeHighlight,
const MarkdownLite = memo(({ content = '' }: { content?: string }) => {
const rehypePlugins: PluggableList = [
[rehypeKatex, { output: 'mathml' }],
[
rehypeHighlight,
{
detect: true,
ignoreMissing: true,
subset: langSubset,
},
],
];
return (
<ReactMarkdown
remarkPlugins={[supersub, remarkGfm, [remarkMath, { singleDollarTextMath: true }]]}
rehypePlugins={rehypePlugins}
// linkTarget="_new"
components={
{
detect: true,
ignoreMissing: true,
subset: langSubset,
},
],
];
return (
<CodeBlockProvider>
<ReactMarkdown
remarkPlugins={[
/** @ts-ignore */
supersub,
remarkGfm,
[remarkMath, { singleDollarTextMath: true }],
]}
/** @ts-ignore */
rehypePlugins={rehypePlugins}
// linkTarget="_new"
components={
{
code: codeExecution ? code : codeNoExecution,
a,
p,
} as {
[nodeType: string]: React.ElementType;
}
}
>
{content}
</ReactMarkdown>
</CodeBlockProvider>
);
},
);
code,
a,
p,
} as {
[nodeType: string]: React.ElementType;
}
}
>
{content}
</ReactMarkdown>
);
});
export default MarkdownLite;

View File

@@ -21,130 +21,143 @@ type PartProps = {
part?: TMessageContentParts;
isSubmitting: boolean;
showCursor: boolean;
messageId: string;
isCreatedByUser: boolean;
attachments?: TAttachment[];
};
const Part = memo(({ part, isSubmitting, attachments, showCursor, isCreatedByUser }: PartProps) => {
if (!part) {
return null;
}
if (part.type === ContentTypes.ERROR) {
return <ErrorMessage text={part[ContentTypes.TEXT].value} className="my-2" />;
} else if (part.type === ContentTypes.TEXT) {
const text = typeof part.text === 'string' ? part.text : part.text.value;
if (typeof text !== 'string') {
return null;
}
if (part.tool_call_ids != null && !text) {
return null;
}
return (
<Container>
<Text text={text} isCreatedByUser={isCreatedByUser} showCursor={showCursor} />
</Container>
);
} else if (part.type === ContentTypes.TOOL_CALL) {
const toolCall = part[ContentTypes.TOOL_CALL];
if (!toolCall) {
const Part = memo(
({ part, isSubmitting, attachments, showCursor, messageId, isCreatedByUser }: PartProps) => {
attachments && console.log(attachments);
if (!part) {
return null;
}
const isToolCall =
'args' in toolCall && (!toolCall.type || toolCall.type === ToolCallTypes.TOOL_CALL);
if (isToolCall && toolCall.name === Tools.execute_code) {
if (part.type === ContentTypes.ERROR) {
return <ErrorMessage text={part[ContentTypes.TEXT].value} className="my-2" />;
} else if (part.type === ContentTypes.TEXT) {
const text = typeof part.text === 'string' ? part.text : part.text.value;
if (typeof text !== 'string') {
return null;
}
if (part.tool_call_ids != null && !text) {
return null;
}
return (
<ExecuteCode
args={typeof toolCall.args === 'string' ? toolCall.args : ''}
output={toolCall.output ?? ''}
initialProgress={toolCall.progress ?? 0.1}
isSubmitting={isSubmitting}
attachments={attachments}
/>
<Container>
<Text
text={text}
isCreatedByUser={isCreatedByUser}
messageId={messageId}
showCursor={showCursor}
/>
</Container>
);
} else if (isToolCall) {
return (
<ToolCall
args={toolCall.args ?? ''}
name={toolCall.name || ''}
output={toolCall.output ?? ''}
initialProgress={toolCall.progress ?? 0.1}
isSubmitting={isSubmitting}
attachments={attachments}
/>
);
} else if (toolCall.type === ToolCallTypes.CODE_INTERPRETER) {
const code_interpreter = toolCall[ToolCallTypes.CODE_INTERPRETER];
return (
<CodeAnalyze
initialProgress={toolCall.progress ?? 0.1}
code={code_interpreter.input}
outputs={code_interpreter.outputs ?? []}
isSubmitting={isSubmitting}
/>
);
} else if (
toolCall.type === ToolCallTypes.RETRIEVAL ||
toolCall.type === ToolCallTypes.FILE_SEARCH
) {
return (
<RetrievalCall initialProgress={toolCall.progress ?? 0.1} isSubmitting={isSubmitting} />
);
} else if (
toolCall.type === ToolCallTypes.FUNCTION &&
ToolCallTypes.FUNCTION in toolCall &&
imageGenTools.has(toolCall.function.name)
) {
return (
<ImageGen
initialProgress={toolCall.progress ?? 0.1}
args={toolCall.function.arguments as string}
/>
);
} else if (toolCall.type === ToolCallTypes.FUNCTION && ToolCallTypes.FUNCTION in toolCall) {
if (isImageVisionTool(toolCall)) {
if (isSubmitting && showCursor) {
return (
<Container>
<Text text={''} isCreatedByUser={isCreatedByUser} showCursor={showCursor} />
</Container>
);
}
} else if (part.type === ContentTypes.TOOL_CALL) {
const toolCall = part[ContentTypes.TOOL_CALL];
if (!toolCall) {
return null;
}
const isToolCall =
'args' in toolCall && (!toolCall.type || toolCall.type === ToolCallTypes.TOOL_CALL);
if (isToolCall && toolCall.name === Tools.execute_code) {
return (
<ExecuteCode
args={typeof toolCall.args === 'string' ? toolCall.args : ''}
output={toolCall.output ?? ''}
initialProgress={toolCall.progress ?? 0.1}
isSubmitting={isSubmitting}
attachments={attachments}
/>
);
} else if (isToolCall) {
return (
<ToolCall
args={toolCall.args ?? ''}
name={toolCall.name ?? ''}
output={toolCall.output ?? ''}
initialProgress={toolCall.progress ?? 0.1}
isSubmitting={isSubmitting}
/>
);
} else if (toolCall.type === ToolCallTypes.CODE_INTERPRETER) {
const code_interpreter = toolCall[ToolCallTypes.CODE_INTERPRETER];
return (
<CodeAnalyze
initialProgress={toolCall.progress ?? 0.1}
code={code_interpreter.input}
outputs={code_interpreter.outputs ?? []}
isSubmitting={isSubmitting}
/>
);
} else if (
toolCall.type === ToolCallTypes.RETRIEVAL ||
toolCall.type === ToolCallTypes.FILE_SEARCH
) {
return (
<RetrievalCall initialProgress={toolCall.progress ?? 0.1} isSubmitting={isSubmitting} />
);
} else if (
toolCall.type === ToolCallTypes.FUNCTION &&
ToolCallTypes.FUNCTION in toolCall &&
imageGenTools.has(toolCall.function.name)
) {
return (
<ImageGen
initialProgress={toolCall.progress ?? 0.1}
args={toolCall.function.arguments as string}
/>
);
} else if (toolCall.type === ToolCallTypes.FUNCTION && ToolCallTypes.FUNCTION in toolCall) {
if (isImageVisionTool(toolCall)) {
if (isSubmitting && showCursor) {
return (
<Container>
<Text
text={''}
isCreatedByUser={isCreatedByUser}
messageId={messageId}
showCursor={showCursor}
/>
</Container>
);
}
return null;
}
return (
<ToolCall
initialProgress={toolCall.progress ?? 0.1}
isSubmitting={isSubmitting}
args={toolCall.function.arguments as string}
name={toolCall.function.name}
output={toolCall.function.output}
/>
);
}
} else if (part.type === ContentTypes.IMAGE_FILE) {
const imageFile = part[ContentTypes.IMAGE_FILE];
const height = imageFile.height ?? 1920;
const width = imageFile.width ?? 1080;
return (
<ToolCall
initialProgress={toolCall.progress ?? 0.1}
isSubmitting={isSubmitting}
args={toolCall.function.arguments as string}
name={toolCall.function.name}
output={toolCall.function.output}
<Image
imagePath={imageFile.filepath}
height={height}
width={width}
altText={imageFile.filename ?? 'Uploaded Image'}
placeholderDimensions={{
height: height + 'px',
width: width + 'px',
}}
/>
);
}
} else if (part.type === ContentTypes.IMAGE_FILE) {
const imageFile = part[ContentTypes.IMAGE_FILE];
const height = imageFile.height ?? 1920;
const width = imageFile.width ?? 1080;
return (
<Image
imagePath={imageFile.filepath}
height={height}
width={width}
altText={imageFile.filename ?? 'Uploaded Image'}
placeholderDimensions={{
height: height + 'px',
width: width + 'px',
}}
/>
);
}
return null;
});
return null;
},
);
export default Part;

View File

@@ -1,19 +0,0 @@
import { imageExtRegex } from 'librechat-data-provider';
import type { TAttachment, TFile, TAttachmentMetadata } from 'librechat-data-provider';
import Image from '~/components/Chat/Messages/Content/Image';
export default function Attachment({ attachment }: { attachment?: TAttachment }) {
if (!attachment) {
return null;
}
const { width, height, filepath = null } = attachment as TFile & TAttachmentMetadata;
const isImage =
imageExtRegex.test(attachment.filename) && width != null && height != null && filepath != null;
if (isImage) {
return (
<Image altText={attachment.filename} imagePath={filepath} height={height} width={width} />
);
}
return null;
}

View File

@@ -1,11 +1,12 @@
import React, { useMemo, useState } from 'react';
import { useRecoilValue } from 'recoil';
import type { TAttachment } from 'librechat-data-provider';
import { CodeInProgress } from './CodeProgress';
import { imageExtRegex } from 'librechat-data-provider';
import type { TFile, TAttachment, TAttachmentMetadata } from 'librechat-data-provider';
import ProgressText from '~/components/Chat/Messages/Content/ProgressText';
import FinishedIcon from '~/components/Chat/Messages/Content/FinishedIcon';
import MarkdownLite from '~/components/Chat/Messages/Content/MarkdownLite';
import { CodeInProgress } from './CodeProgress';
import Attachment from './Attachment';
import Image from '~/components/Chat/Messages/Content/Image';
import LogContent from './LogContent';
import { useProgress } from '~/hooks';
import store from '~/store';
@@ -85,10 +86,7 @@ export default function ExecuteCode({
</div>
{showCode && (
<div className="code-analyze-block mb-3 mt-0.5 overflow-hidden rounded-xl bg-black">
<MarkdownLite
content={code ? `\`\`\`${lang}\n${code}\n\`\`\`` : ''}
codeExecution={false}
/>
<MarkdownLite content={code ? `\`\`\`${lang}\n${code}\n\`\`\`` : ''} />
{output.length > 0 && (
<div className="bg-gray-700 p-4 text-xs">
<div
@@ -105,9 +103,25 @@ export default function ExecuteCode({
)}
</div>
)}
{attachments?.map((attachment, index) => (
<Attachment attachment={attachment} key={index} />
))}
{attachments?.map((attachment, index) => {
const { width, height, filepath } = attachment as TFile & TAttachmentMetadata;
const isImage =
imageExtRegex.test(attachment.filename) &&
width != null &&
height != null &&
filepath != null;
if (isImage) {
return (
<Image
key={index}
altText={attachment.filename}
imagePath={filepath}
height={height}
width={width}
/>
);
}
})}
</>
);
}

View File

@@ -1,26 +1,17 @@
import { isAfter } from 'date-fns';
import React, { useMemo } from 'react';
import { imageExtRegex } from 'librechat-data-provider';
import type { TFile, TAttachment, TAttachmentMetadata } from 'librechat-data-provider';
import Image from '~/components/Chat/Messages/Content/Image';
import type { TAttachment } from 'librechat-data-provider';
import { useLocalize } from '~/hooks';
import LogLink from './LogLink';
interface LogContentProps {
output?: string;
renderImages?: boolean;
attachments?: TAttachment[];
}
type ImageAttachment = TFile &
TAttachmentMetadata & {
height: number;
width: number;
};
const LogContent: React.FC<LogContentProps> = ({ output = '', renderImages, attachments }) => {
const LogContent: React.FC<LogContentProps> = ({ output = '', attachments }) => {
const localize = useLocalize();
const processedContent = useMemo(() => {
if (!output) {
return '';
@@ -30,29 +21,8 @@ const LogContent: React.FC<LogContentProps> = ({ output = '', renderImages, atta
return parts[0].trim();
}, [output]);
const { imageAttachments, nonImageAttachments } = useMemo(() => {
const imageAtts: ImageAttachment[] = [];
const nonImageAtts: TAttachment[] = [];
attachments?.forEach((attachment) => {
const { width, height, filepath = null } = attachment as TFile & TAttachmentMetadata;
const isImage =
imageExtRegex.test(attachment.filename) &&
width != null &&
height != null &&
filepath != null;
if (isImage) {
imageAtts.push(attachment as ImageAttachment);
} else {
nonImageAtts.push(attachment);
}
});
return {
imageAttachments: renderImages === true ? imageAtts : null,
nonImageAttachments: nonImageAtts,
};
}, [attachments, renderImages]);
const nonImageAttachments =
attachments?.filter((file) => !imageExtRegex.test(file.filename)) || [];
const renderAttachment = (file: TAttachment) => {
const now = new Date();
@@ -89,18 +59,6 @@ const LogContent: React.FC<LogContentProps> = ({ output = '', renderImages, atta
))}
</div>
)}
{imageAttachments?.map((attachment, index) => {
const { width, height, filepath } = attachment;
return (
<Image
key={index}
altText={attachment.filename}
imagePath={filepath}
height={height}
width={width}
/>
);
})}
</>
);
};

Some files were not shown because too many files have changed in this diff Show More