Compare commits

..

3 Commits

206 changed files with 3672 additions and 7874 deletions

View File

@@ -39,9 +39,6 @@ jobs:
- name: Install MCP Package
run: npm run build:mcp
- name: Install Data Schemas Package
run: npm run build:data-schemas
- name: Create empty auth.json file
run: |
mkdir -p api/data

View File

@@ -1,58 +0,0 @@
name: Publish `@librechat/data-schemas` to NPM
on:
push:
branches:
- main
paths:
- 'packages/data-schemas/package.json'
workflow_dispatch:
inputs:
reason:
description: 'Reason for manual trigger'
required: false
default: 'Manual publish requested'
jobs:
build-and-publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: '18.x'
- name: Install dependencies
run: cd packages/data-schemas && npm ci
- name: Build
run: cd packages/data-schemas && npm run build
- name: Set up npm authentication
run: echo "//registry.npmjs.org/:_authToken=${{ secrets.PUBLISH_NPM_TOKEN }}" > ~/.npmrc
- name: Check version change
id: check
working-directory: packages/data-schemas
run: |
PACKAGE_VERSION=$(node -p "require('./package.json').version")
PUBLISHED_VERSION=$(npm view @librechat/data-schemas version 2>/dev/null || echo "0.0.0")
if [ "$PACKAGE_VERSION" = "$PUBLISHED_VERSION" ]; then
echo "No version change, skipping publish"
echo "skip=true" >> $GITHUB_OUTPUT
else
echo "Version changed, proceeding with publish"
echo "skip=false" >> $GITHUB_OUTPUT
fi
- name: Pack package
if: steps.check.outputs.skip != 'true'
working-directory: packages/data-schemas
run: npm pack
- name: Publish
if: steps.check.outputs.skip != 'true'
working-directory: packages/data-schemas
run: npm publish *.tgz --access public

View File

@@ -84,11 +84,11 @@ jobs:
with:
token: ${{ secrets.GITHUB_TOKEN }}
sign-commits: true
commit-message: "chore: update CHANGELOG for release ${{ github.ref_name }}"
commit-message: "chore: update CHANGELOG for release ${GITHUB_REF##*/}"
base: main
branch: "changelog/${{ github.ref_name }}"
branch: "changelog/${GITHUB_REF##*/}"
reviewers: danny-avila
title: "chore: update CHANGELOG for release ${{ github.ref_name }}"
title: "chore: update CHANGELOG for release ${GITHUB_REF##*/}"
body: |
**Description**:
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${{ github.ref_name }} above previous releases.
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${GITHUB_REF##*/} above previous releases.

View File

@@ -1,16 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
## [Unreleased]
### ✨ New Features
- 🪄 feat: Agent Artifacts by **@danny-avila** in [#5804](https://github.com/danny-avila/LibreChat/pull/5804)
### ⚙️ Other Changes
- 🔄 chore: Enforce 18next Language Keys by **@rubentalstra** in [#5803](https://github.com/danny-avila/LibreChat/pull/5803)
- 🔃 refactor: Parent Message ID Handling on Error, Update Translations, Bump Agents by **@danny-avila** in [#5833](https://github.com/danny-avila/LibreChat/pull/5833)
---

View File

@@ -1,4 +1,4 @@
# v0.7.7
# v0.7.7-rc1
# Base node image
FROM node:20-alpine AS node
@@ -21,7 +21,7 @@ RUN \
npm config set fetch-retries 5 ; \
npm config set fetch-retry-mintimeout 15000 ; \
npm install --no-audit; \
# React client build
# Build React client
NODE_OPTIONS="--max-old-space-size=2048" npm run frontend; \
npm prune --production; \
npm cache clean --force
@@ -31,6 +31,12 @@ RUN mkdir -p /app/client/public/images /app/api/logs
# Node API setup
EXPOSE 3080
ENV HOST=0.0.0.0
# Define a health check
HEALTHCHECK --interval=10s --timeout=5s --start-period=30s --retries=3 \
CMD curl -f http://localhost:3080/health
# Start the backend service
CMD ["npm", "run", "backend"]
# Optional: for client with nginx routing

View File

@@ -1,5 +1,5 @@
# Dockerfile.multi
# v0.7.7
# v0.7.7-rc1
# Base for all builds
FROM node:20-alpine AS base-min
@@ -11,7 +11,6 @@ RUN npm config set fetch-retry-maxtimeout 600000 && \
COPY package*.json ./
COPY packages/data-provider/package*.json ./packages/data-provider/
COPY packages/mcp/package*.json ./packages/mcp/
COPY packages/data-schemas/package*.json ./packages/data-schemas/
COPY client/package*.json ./client/
COPY api/package*.json ./api/
@@ -33,13 +32,6 @@ COPY packages/mcp ./
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
RUN npm run build
# Build data-schemas
FROM base AS data-schemas-build
WORKDIR /app/packages/data-schemas
COPY packages/data-schemas ./
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
RUN npm run build
# Client build
FROM base AS client-build
WORKDIR /app/client
@@ -57,9 +49,8 @@ COPY api ./api
COPY config ./config
COPY --from=data-provider-build /app/packages/data-provider/dist ./packages/data-provider/dist
COPY --from=mcp-build /app/packages/mcp/dist ./packages/mcp/dist
COPY --from=data-schemas-build /app/packages/data-schemas/dist ./packages/data-schemas/dist
COPY --from=client-build /app/client/dist ./client/dist
WORKDIR /app/api
EXPOSE 3080
ENV HOST=0.0.0.0
CMD ["node", "server/index.js"]
CMD ["node", "server/index.js"]

View File

@@ -197,6 +197,6 @@ We thank [Locize](https://locize.com) for their translation management tools tha
<p align="center">
<a href="https://locize.com" target="_blank" rel="noopener noreferrer">
<img src="https://github.com/user-attachments/assets/d6b70894-6064-475e-bb65-92a9e23e0077" alt="Locize Logo" height="50">
<img src="https://locize.com/img/locize_color.svg" alt="Locize Logo" height="50">
</a>
</p>

View File

@@ -1121,13 +1121,9 @@ class BaseClient {
return message;
}
const files = await getFiles(
{
file_id: { $in: fileIds },
},
{},
{},
);
const files = await getFiles({
file_id: { $in: fileIds },
});
await this.addImageURLs(message, files, this.visionMode);

View File

@@ -827,8 +827,7 @@ class GoogleClient extends BaseClient {
let reply = '';
const { abortController } = options;
const model =
this.options.titleModel ?? this.modelOptions.modelName ?? this.modelOptions.model ?? '';
const model = this.modelOptions.modelName ?? this.modelOptions.model ?? '';
const safetySettings = getSafetySettings(model);
if (!EXCLUDED_GENAI_MODELS.test(model) && !this.project_id) {
logger.debug('Identified titling model as GenAI version');

View File

@@ -112,12 +112,7 @@ class OpenAIClient extends BaseClient {
const { OPENAI_FORCE_PROMPT } = process.env ?? {};
const { reverseProxyUrl: reverseProxy } = this.options;
if (
!this.useOpenRouter &&
((reverseProxy && reverseProxy.includes(KnownEndpoints.openrouter)) ||
(this.options.endpoint &&
this.options.endpoint.toLowerCase().includes(KnownEndpoints.openrouter)))
) {
if (!this.useOpenRouter && reverseProxy && reverseProxy.includes(KnownEndpoints.openrouter)) {
this.useOpenRouter = true;
}
@@ -1307,12 +1302,8 @@ ${convo}
) {
delete modelOptions.stream;
delete modelOptions.stop;
} else if (
(!this.isOmni || /^o1-(mini|preview)/i.test(modelOptions.model)) &&
modelOptions.reasoning_effort != null
) {
} else if (!this.isOmni && modelOptions.reasoning_effort != null) {
delete modelOptions.reasoning_effort;
delete modelOptions.temperature;
}
let reasoningKey = 'reasoning_content';

View File

@@ -325,37 +325,4 @@ describe('formatAgentMessages', () => {
);
expect(result[0].content).not.toContain('Analyzing the problem...');
});
it('should exclude ERROR type content parts', () => {
const payload = [
{
role: 'assistant',
content: [
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello there' },
{
type: ContentTypes.ERROR,
[ContentTypes.ERROR]:
'An error occurred while processing the request: Something went wrong',
},
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final answer' },
],
},
];
const result = formatAgentMessages(payload);
expect(result).toHaveLength(1);
expect(result[0]).toBeInstanceOf(AIMessage);
expect(result[0].content).toEqual([
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello there' },
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final answer' },
]);
// Make sure no error content exists in the result
const hasErrorContent = result[0].content.some(
(item) =>
item.type === ContentTypes.ERROR || JSON.stringify(item).includes('An error occurred'),
);
expect(hasErrorContent).toBe(false);
});
});

View File

@@ -211,8 +211,6 @@ const formatAgentMessages = (payload) => {
} else if (part.type === ContentTypes.THINK) {
hasReasoning = true;
continue;
} else if (part.type === ContentTypes.ERROR) {
continue;
} else {
currentContent.push(part);
}

View File

@@ -21,7 +21,6 @@ const {
} = require('../');
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { createMCPTool } = require('~/server/services/MCP');
const { loadSpecs } = require('./loadSpecs');
const { logger } = require('~/config');
@@ -91,6 +90,45 @@ const validateTools = async (user, tools = []) => {
}
};
const loadAuthValues = async ({ userId, authFields, throwError = true }) => {
let authValues = {};
/**
* Finds the first non-empty value for the given authentication field, supporting alternate fields.
* @param {string[]} fields Array of strings representing the authentication fields. Supports alternate fields delimited by "||".
* @returns {Promise<{ authField: string, authValue: string} | null>} An object containing the authentication field and value, or null if not found.
*/
const findAuthValue = async (fields) => {
for (const field of fields) {
let value = process.env[field];
if (value) {
return { authField: field, authValue: value };
}
try {
value = await getUserPluginAuthValue(userId, field, throwError);
} catch (err) {
if (field === fields[fields.length - 1] && !value) {
throw err;
}
}
if (value) {
return { authField: field, authValue: value };
}
}
return null;
};
for (let authField of authFields) {
const fields = authField.split('||');
const result = await findAuthValue(fields);
if (result) {
authValues[result.authField] = result.authValue;
}
}
return authValues;
};
/** @typedef {typeof import('@langchain/core/tools').Tool} ToolConstructor */
/** @typedef {import('@langchain/core/tools').Tool} Tool */
@@ -310,6 +348,7 @@ const loadTools = async ({
module.exports = {
loadToolWithAuth,
loadAuthValues,
validateTools,
loadTools,
};

View File

@@ -1,8 +1,9 @@
const { validateTools, loadTools } = require('./handleTools');
const { validateTools, loadTools, loadAuthValues } = require('./handleTools');
const handleOpenAIErrors = require('./handleOpenAIErrors');
module.exports = {
handleOpenAIErrors,
loadAuthValues,
validateTools,
loadTools,
};

View File

@@ -1,4 +1,3 @@
const axios = require('axios');
const { EventSource } = require('eventsource');
const { Time, CacheKeys } = require('librechat-data-provider');
const logger = require('./winston');
@@ -48,46 +47,9 @@ const sendEvent = (res, event) => {
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
};
/**
* Creates and configures an Axios instance with optional proxy settings.
*
* @typedef {import('axios').AxiosInstance} AxiosInstance
* @typedef {import('axios').AxiosProxyConfig} AxiosProxyConfig
*
* @returns {AxiosInstance} A configured Axios instance
* @throws {Error} If there's an issue creating the Axios instance or parsing the proxy URL
*/
function createAxiosInstance() {
const instance = axios.create();
if (process.env.proxy) {
try {
const url = new URL(process.env.proxy);
/** @type {AxiosProxyConfig} */
const proxyConfig = {
host: url.hostname.replace(/^\[|\]$/g, ''),
protocol: url.protocol.replace(':', ''),
};
if (url.port) {
proxyConfig.port = parseInt(url.port, 10);
}
instance.defaults.proxy = proxyConfig;
} catch (error) {
console.error('Error parsing proxy URL:', error);
throw new Error(`Invalid proxy URL: ${process.env.proxy}`);
}
}
return instance;
}
module.exports = {
logger,
sendEvent,
getMCPManager,
createAxiosInstance,
getFlowStateManager,
};

View File

@@ -1,126 +0,0 @@
const axios = require('axios');
const { createAxiosInstance } = require('./index');
// Mock axios
jest.mock('axios', () => ({
interceptors: {
request: { use: jest.fn(), eject: jest.fn() },
response: { use: jest.fn(), eject: jest.fn() },
},
create: jest.fn().mockReturnValue({
defaults: {
proxy: null,
},
get: jest.fn().mockResolvedValue({ data: {} }),
post: jest.fn().mockResolvedValue({ data: {} }),
put: jest.fn().mockResolvedValue({ data: {} }),
delete: jest.fn().mockResolvedValue({ data: {} }),
}),
get: jest.fn().mockResolvedValue({ data: {} }),
post: jest.fn().mockResolvedValue({ data: {} }),
put: jest.fn().mockResolvedValue({ data: {} }),
delete: jest.fn().mockResolvedValue({ data: {} }),
reset: jest.fn().mockImplementation(function () {
this.get.mockClear();
this.post.mockClear();
this.put.mockClear();
this.delete.mockClear();
this.create.mockClear();
}),
}));
describe('createAxiosInstance', () => {
const originalEnv = process.env;
beforeEach(() => {
// Reset mocks
jest.clearAllMocks();
// Create a clean copy of process.env
process.env = { ...originalEnv };
// Default: no proxy
delete process.env.proxy;
});
afterAll(() => {
// Restore original process.env
process.env = originalEnv;
});
test('creates an axios instance without proxy when no proxy env is set', () => {
const instance = createAxiosInstance();
expect(axios.create).toHaveBeenCalledTimes(1);
expect(instance.defaults.proxy).toBeNull();
});
test('configures proxy correctly with hostname and protocol', () => {
process.env.proxy = 'http://example.com';
const instance = createAxiosInstance();
expect(axios.create).toHaveBeenCalledTimes(1);
expect(instance.defaults.proxy).toEqual({
host: 'example.com',
protocol: 'http',
});
});
test('configures proxy correctly with hostname, protocol and port', () => {
process.env.proxy = 'https://proxy.example.com:8080';
const instance = createAxiosInstance();
expect(axios.create).toHaveBeenCalledTimes(1);
expect(instance.defaults.proxy).toEqual({
host: 'proxy.example.com',
protocol: 'https',
port: 8080,
});
});
test('handles proxy URLs with authentication', () => {
process.env.proxy = 'http://user:pass@proxy.example.com:3128';
const instance = createAxiosInstance();
expect(axios.create).toHaveBeenCalledTimes(1);
expect(instance.defaults.proxy).toEqual({
host: 'proxy.example.com',
protocol: 'http',
port: 3128,
// Note: The current implementation doesn't handle auth - if needed, add this functionality
});
});
test('throws error when proxy URL is invalid', () => {
process.env.proxy = 'invalid-url';
expect(() => createAxiosInstance()).toThrow('Invalid proxy URL');
expect(axios.create).toHaveBeenCalledTimes(1);
});
// If you want to test the actual URL parsing more thoroughly
test('handles edge case proxy URLs correctly', () => {
// IPv6 address
process.env.proxy = 'http://[::1]:8080';
let instance = createAxiosInstance();
expect(instance.defaults.proxy).toEqual({
host: '::1',
protocol: 'http',
port: 8080,
});
// URL with path (which should be ignored for proxy config)
process.env.proxy = 'http://proxy.example.com:8080/some/path';
instance = createAxiosInstance();
expect(instance.defaults.proxy).toEqual({
host: 'proxy.example.com',
protocol: 'http',
port: 8080,
});
});
});

View File

@@ -1,6 +1,6 @@
const { MeiliSearch } = require('meilisearch');
const { Conversation } = require('~/models/Conversation');
const { Message } = require('~/models/Message');
const Conversation = require('~/models/schema/convoSchema');
const Message = require('~/models/schema/messageSchema');
const { isEnabled } = require('~/server/utils');
const { logger } = require('~/config');

View File

@@ -1,5 +1,5 @@
const mongoose = require('mongoose');
const { actionSchema } = require('@librechat/data-schemas');
const actionSchema = require('./schema/action');
const Action = mongoose.model('action', actionSchema);

View File

@@ -9,7 +9,7 @@ const {
removeAgentFromAllProjects,
} = require('./Project');
const getLogStores = require('~/cache/getLogStores');
const { agentSchema } = require('@librechat/data-schemas');
const agentSchema = require('./schema/agent');
const Agent = mongoose.model('agent', agentSchema);

View File

@@ -1,5 +1,5 @@
const mongoose = require('mongoose');
const { assistantSchema } = require('@librechat/data-schemas');
const assistantSchema = require('./schema/assistant');
const Assistant = mongoose.model('assistant', assistantSchema);

View File

@@ -1,5 +1,5 @@
const mongoose = require('mongoose');
const { balanceSchema } = require('@librechat/data-schemas');
const balanceSchema = require('./schema/balance');
const { getMultiplier } = require('./tx');
const { logger } = require('~/config');

View File

@@ -1,9 +1,5 @@
const mongoose = require('mongoose');
const Banner = require('./schema/banner');
const logger = require('~/config/winston');
const { bannerSchema } = require('@librechat/data-schemas');
const Banner = mongoose.model('Banner', bannerSchema);
/**
* Retrieves the current active banner.
* @returns {Promise<Object|null>} The active banner object or null if no active banner is found.

View File

@@ -1,4 +1,5 @@
const { logger } = require('~/config');
// const { Categories } = require('./schema/categories');
const options = [
{

View File

@@ -15,6 +15,19 @@ const searchConversation = async (conversationId) => {
throw new Error('Error searching conversation');
}
};
/**
* Searches for a conversation by conversationId and returns associated file ids.
* @param {string} conversationId - The conversation's ID.
* @returns {Promise<string[] | null>}
*/
const getConvoFiles = async (conversationId) => {
try {
return (await Conversation.findOne({ conversationId }, 'files').lean())?.files ?? [];
} catch (error) {
logger.error('[getConvoFiles] Error getting conversation files', error);
throw new Error('Error getting conversation files');
}
};
/**
* Retrieves a single conversation for a given user and conversation ID.
@@ -60,46 +73,9 @@ const deleteNullOrEmptyConversations = async () => {
}
};
/**
* Retrieves files from a conversation that have either embedded=true
* or a metadata.fileIdentifier. Simplified and efficient query.
*
* @param {string} conversationId - The conversation ID
* @returns {Promise<MongoFile[]>} - Filtered array of matching file objects
*/
const getToolFiles = async (conversationId) => {
try {
const [result] = await Conversation.aggregate([
{ $match: { conversationId } },
{
$project: {
files: {
$filter: {
input: '$files',
as: 'file',
cond: {
$or: [
{ $eq: ['$$file.embedded', true] },
{ $ifNull: ['$$file.metadata.fileIdentifier', false] },
],
},
},
},
_id: 0,
},
},
]).exec();
return result?.files || [];
} catch (error) {
logger.error('[getConvoEmbeddedFiles] Error fetching embedded files:', error);
throw new Error('Error fetching embedded files');
}
};
module.exports = {
Conversation,
getToolFiles,
getConvoFiles,
searchConversation,
deleteNullOrEmptyConversations,
/**

View File

@@ -1,11 +1,7 @@
const mongoose = require('mongoose');
const ConversationTag = require('./schema/conversationTagSchema');
const Conversation = require('./schema/convoSchema');
const logger = require('~/config/winston');
const { conversationTagSchema } = require('@librechat/data-schemas');
const ConversationTag = mongoose.model('ConversationTag', conversationTagSchema);
/**
* Retrieves all conversation tags for a user.
* @param {string} user - The user ID.

View File

@@ -1,5 +1,5 @@
const mongoose = require('mongoose');
const { fileSchema } = require('@librechat/data-schemas');
const fileSchema = require('./schema/fileSchema');
const File = mongoose.model('File', fileSchema);
@@ -7,7 +7,7 @@ const File = mongoose.model('File', fileSchema);
* Finds a file by its file_id with additional query options.
* @param {string} file_id - The unique identifier of the file.
* @param {object} options - Query options for filtering, projection, etc.
* @returns {Promise<IMongoFile>} A promise that resolves to the file document or null.
* @returns {Promise<MongoFile>} A promise that resolves to the file document or null.
*/
const findFileById = async (file_id, options = {}) => {
return await File.findOne({ file_id, ...options }).lean();
@@ -17,20 +17,18 @@ const findFileById = async (file_id, options = {}) => {
* Retrieves files matching a given filter, sorted by the most recently updated.
* @param {Object} filter - The filter criteria to apply.
* @param {Object} [_sortOptions] - Optional sort parameters.
* @param {Object|String} [selectFields={ text: 0 }] - Fields to include/exclude in the query results.
* Default excludes the 'text' field.
* @returns {Promise<Array<IMongoFile>>} A promise that resolves to an array of file documents.
* @returns {Promise<Array<MongoFile>>} A promise that resolves to an array of file documents.
*/
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
const getFiles = async (filter, _sortOptions) => {
const sortOptions = { updatedAt: -1, ..._sortOptions };
return await File.find(filter).select(selectFields).sort(sortOptions).lean();
return await File.find(filter).sort(sortOptions).lean();
};
/**
* Creates a new file with a TTL of 1 hour.
* @param {IMongoFile} data - The file data to be created, must contain file_id.
* @param {MongoFile} data - The file data to be created, must contain file_id.
* @param {boolean} disableTTL - Whether to disable the TTL.
* @returns {Promise<IMongoFile>} A promise that resolves to the created file document.
* @returns {Promise<MongoFile>} A promise that resolves to the created file document.
*/
const createFile = async (data, disableTTL) => {
const fileData = {
@@ -50,8 +48,8 @@ const createFile = async (data, disableTTL) => {
/**
* Updates a file identified by file_id with new data and removes the TTL.
* @param {IMongoFile} data - The data to update, must contain file_id.
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
* @param {MongoFile} data - The data to update, must contain file_id.
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
*/
const updateFile = async (data) => {
const { file_id, ...update } = data;
@@ -64,8 +62,8 @@ const updateFile = async (data) => {
/**
* Increments the usage of a file identified by file_id.
* @param {IMongoFile} data - The data to update, must contain file_id and the increment value for usage.
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
* @param {MongoFile} data - The data to update, must contain file_id and the increment value for usage.
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
*/
const updateFileUsage = async (data) => {
const { file_id, inc = 1 } = data;
@@ -79,7 +77,7 @@ const updateFileUsage = async (data) => {
/**
* Deletes a file identified by file_id.
* @param {string} file_id - The unique identifier of the file to delete.
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
*/
const deleteFile = async (file_id) => {
return await File.findOneAndDelete({ file_id }).lean();
@@ -88,7 +86,7 @@ const deleteFile = async (file_id) => {
/**
* Deletes a file identified by a filter.
* @param {object} filter - The filter criteria to apply.
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
*/
const deleteFileByFilter = async (filter) => {
return await File.findOneAndDelete(filter).lean();

View File

@@ -1,4 +1,4 @@
const mongoose = require('mongoose');
const { keySchema } = require('@librechat/data-schemas');
const keySchema = require('./schema/key');
module.exports = mongoose.model('Key', keySchema);

View File

@@ -1,6 +1,6 @@
const { model } = require('mongoose');
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
const { projectSchema } = require('@librechat/data-schemas');
const projectSchema = require('~/models/schema/projectSchema');
const Project = model('Project', projectSchema);
@@ -9,7 +9,7 @@ const Project = model('Project', projectSchema);
*
* @param {string} projectId - The ID of the project to find and return as a plain object.
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
* @returns {Promise<IMongoProject>} A plain object representing the project document, or `null` if no project is found.
* @returns {Promise<MongoProject>} A plain object representing the project document, or `null` if no project is found.
*/
const getProjectById = async function (projectId, fieldsToSelect = null) {
const query = Project.findById(projectId);
@@ -27,7 +27,7 @@ const getProjectById = async function (projectId, fieldsToSelect = null) {
*
* @param {string} projectName - The name of the project to find or create.
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
* @returns {Promise<IMongoProject>} A plain object representing the project document.
* @returns {Promise<MongoProject>} A plain object representing the project document.
*/
const getProjectByName = async function (projectName, fieldsToSelect = null) {
const query = { name: projectName };
@@ -47,7 +47,7 @@ const getProjectByName = async function (projectName, fieldsToSelect = null) {
*
* @param {string} projectId - The ID of the project to update.
* @param {string[]} promptGroupIds - The array of prompt group IDs to add to the project.
* @returns {Promise<IMongoProject>} The updated project document.
* @returns {Promise<MongoProject>} The updated project document.
*/
const addGroupIdsToProject = async function (projectId, promptGroupIds) {
return await Project.findByIdAndUpdate(
@@ -62,7 +62,7 @@ const addGroupIdsToProject = async function (projectId, promptGroupIds) {
*
* @param {string} projectId - The ID of the project to update.
* @param {string[]} promptGroupIds - The array of prompt group IDs to remove from the project.
* @returns {Promise<IMongoProject>} The updated project document.
* @returns {Promise<MongoProject>} The updated project document.
*/
const removeGroupIdsFromProject = async function (projectId, promptGroupIds) {
return await Project.findByIdAndUpdate(
@@ -87,7 +87,7 @@ const removeGroupFromAllProjects = async (promptGroupId) => {
*
* @param {string} projectId - The ID of the project to update.
* @param {string[]} agentIds - The array of agent IDs to add to the project.
* @returns {Promise<IMongoProject>} The updated project document.
* @returns {Promise<MongoProject>} The updated project document.
*/
const addAgentIdsToProject = async function (projectId, agentIds) {
return await Project.findByIdAndUpdate(
@@ -102,7 +102,7 @@ const addAgentIdsToProject = async function (projectId, agentIds) {
*
* @param {string} projectId - The ID of the project to update.
* @param {string[]} agentIds - The array of agent IDs to remove from the project.
* @returns {Promise<IMongoProject>} The updated project document.
* @returns {Promise<MongoProject>} The updated project document.
*/
const removeAgentIdsFromProject = async function (projectId, agentIds) {
return await Project.findByIdAndUpdate(

View File

@@ -1,4 +1,3 @@
const mongoose = require('mongoose');
const { ObjectId } = require('mongodb');
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
const {
@@ -7,13 +6,10 @@ const {
removeGroupIdsFromProject,
removeGroupFromAllProjects,
} = require('./Project');
const { promptGroupSchema, promptSchema } = require('@librechat/data-schemas');
const { Prompt, PromptGroup } = require('./schema/promptSchema');
const { escapeRegExp } = require('~/server/utils');
const { logger } = require('~/config');
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
const Prompt = mongoose.model('Prompt', promptSchema);
/**
* Create a pipeline for the aggregation to get prompt groups
* @param {Object} query

View File

@@ -1,4 +1,3 @@
const mongoose = require('mongoose');
const {
CacheKeys,
SystemRoles,
@@ -13,11 +12,9 @@ const {
temporaryChatPermissionsSchema,
} = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores');
const { roleSchema } = require('@librechat/data-schemas');
const Role = require('~/models/schema/roleSchema');
const { logger } = require('~/config');
const Role = mongoose.model('Role', roleSchema);
/**
* Retrieve a role by name and convert the found role document to a plain object.
* If the role with the given name doesn't exist and the name is a system defined role, create it and return the lean version.
@@ -171,7 +168,6 @@ const initializeRoles = async function () {
}
};
module.exports = {
Role,
getRoleByName,
initializeRoles,
updateRoleByName,

View File

@@ -8,7 +8,7 @@ const {
} = require('librechat-data-provider');
const { updateAccessPermissions, initializeRoles } = require('~/models/Role');
const getLogStores = require('~/cache/getLogStores');
const { Role } = require('~/models/Role');
const Role = require('~/models/schema/roleSchema');
// Mock the cache
jest.mock('~/cache/getLogStores', () => {

View File

@@ -1,7 +1,7 @@
const mongoose = require('mongoose');
const signPayload = require('~/server/services/signPayload');
const { hashToken } = require('~/server/utils/crypto');
const { sessionSchema } = require('@librechat/data-schemas');
const sessionSchema = require('./schema/session');
const { logger } = require('~/config');
const Session = mongoose.model('Session', sessionSchema);

View File

@@ -1,9 +1,7 @@
const mongoose = require('mongoose');
const { nanoid } = require('nanoid');
const { Constants } = require('librechat-data-provider');
const { Conversation } = require('~/models/Conversation');
const { shareSchema } = require('@librechat/data-schemas');
const SharedLink = mongoose.model('SharedLink', shareSchema);
const SharedLink = require('./schema/shareSchema');
const { getMessages } = require('./Message');
const logger = require('~/config/winston');

View File

@@ -1,6 +1,6 @@
const mongoose = require('mongoose');
const { encryptV2 } = require('~/server/utils/crypto');
const { tokenSchema } = require('@librechat/data-schemas');
const tokenSchema = require('./schema/tokenSchema');
const { logger } = require('~/config');
/**

View File

@@ -1,11 +1,9 @@
const mongoose = require('mongoose');
const { toolCallSchema } = require('@librechat/data-schemas');
const ToolCall = mongoose.model('ToolCall', toolCallSchema);
const ToolCall = require('./schema/toolCallSchema');
/**
* Create a new tool call
* @param {IToolCallData} toolCallData - The tool call data
* @returns {Promise<IToolCallData>} The created tool call document
* @param {ToolCallData} toolCallData - The tool call data
* @returns {Promise<ToolCallData>} The created tool call document
*/
async function createToolCall(toolCallData) {
try {
@@ -18,7 +16,7 @@ async function createToolCall(toolCallData) {
/**
* Get a tool call by ID
* @param {string} id - The tool call document ID
* @returns {Promise<IToolCallData|null>} The tool call document or null if not found
* @returns {Promise<ToolCallData|null>} The tool call document or null if not found
*/
async function getToolCallById(id) {
try {
@@ -46,7 +44,7 @@ async function getToolCallsByMessage(messageId, userId) {
* Get tool calls by conversation ID and user
* @param {string} conversationId - The conversation ID
* @param {string} userId - The user's ObjectId
* @returns {Promise<IToolCallData[]>} Array of tool call documents
* @returns {Promise<ToolCallData[]>} Array of tool call documents
*/
async function getToolCallsByConvo(conversationId, userId) {
try {
@@ -59,8 +57,8 @@ async function getToolCallsByConvo(conversationId, userId) {
/**
* Update a tool call
* @param {string} id - The tool call document ID
* @param {Partial<IToolCallData>} updateData - The data to update
* @returns {Promise<IToolCallData|null>} The updated tool call document or null if not found
* @param {Partial<ToolCallData>} updateData - The data to update
* @returns {Promise<ToolCallData|null>} The updated tool call document or null if not found
*/
async function updateToolCall(id, updateData) {
try {

View File

@@ -1,6 +1,6 @@
const mongoose = require('mongoose');
const { isEnabled } = require('~/server/utils/handleText');
const { transactionSchema } = require('@librechat/data-schemas');
const transactionSchema = require('./schema/transaction');
const { getMultiplier, getCacheMultiplier } = require('./tx');
const { logger } = require('~/config');
const Balance = require('./Balance');

View File

@@ -1,5 +1,5 @@
const mongoose = require('mongoose');
const { userSchema } = require('@librechat/data-schemas');
const userSchema = require('~/models/schema/userSchema');
const User = mongoose.model('User', userSchema);

View File

@@ -4,28 +4,9 @@ const { MeiliSearch } = require('meilisearch');
const { cleanUpPrimaryKeyValue } = require('~/lib/utils/misc');
const logger = require('~/config/meiliLogger');
// Environment flags
/**
* Flag to indicate if search is enabled based on environment variables.
* @type {boolean}
*/
const searchEnabled = process.env.SEARCH && process.env.SEARCH.toLowerCase() === 'true';
/**
* Flag to indicate if MeiliSearch is enabled based on required environment variables.
* @type {boolean}
*/
const meiliEnabled = process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY && searchEnabled;
/**
* Validates the required options for configuring the mongoMeili plugin.
*
* @param {Object} options - The configuration options.
* @param {string} options.host - The MeiliSearch host.
* @param {string} options.apiKey - The MeiliSearch API key.
* @param {string} options.indexName - The name of the index.
* @throws {Error} Throws an error if any required option is missing.
*/
const validateOptions = function (options) {
const requiredKeys = ['host', 'apiKey', 'indexName'];
requiredKeys.forEach((key) => {
@@ -35,64 +16,53 @@ const validateOptions = function (options) {
});
};
/**
* Factory function to create a MeiliMongooseModel class which extends a Mongoose model.
* This class contains static and instance methods to synchronize and manage the MeiliSearch index
* corresponding to the MongoDB collection.
*
* @param {Object} config - Configuration object.
* @param {Object} config.index - The MeiliSearch index object.
* @param {Array<string>} config.attributesToIndex - List of attributes to index.
* @returns {Function} A class definition that will be loaded into the Mongoose schema.
*/
// const createMeiliMongooseModel = function ({ index, indexName, client, attributesToIndex }) {
const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
// The primary key is assumed to be the first attribute in the attributesToIndex array.
const primaryKey = attributesToIndex[0];
// MeiliMongooseModel is of type Mongoose.Model
class MeiliMongooseModel {
/**
* Synchronizes the data between the MongoDB collection and the MeiliSearch index.
* `syncWithMeili`: synchronizes the data between a MongoDB collection and a MeiliSearch index,
* only triggered if there's ever a discrepancy determined by `api\lib\db\indexSync.js`.
*
* The synchronization process involves:
* 1. Fetching all documents from the MongoDB collection and MeiliSearch index.
* 2. Comparing documents from both sources.
* 3. Deleting documents from MeiliSearch that no longer exist in MongoDB.
* 4. Adding documents to MeiliSearch that exist in MongoDB but not in the index.
* 5. Updating documents in MeiliSearch if key fields (such as `text` or `title`) differ.
* 6. Updating the `_meiliIndex` field in MongoDB to indicate the indexing status.
* 1. Fetches all documents from the MongoDB collection and the MeiliSearch index.
* 2. Compares the documents from both sources.
* 3. If a document exists in MeiliSearch but not in MongoDB, it's deleted from MeiliSearch.
* 4. If a document exists in MongoDB but not in MeiliSearch, it's added to MeiliSearch.
* 5. If a document exists in both but has different `text` or `title` fields (depending on the `primaryKey`), it's updated in MeiliSearch.
* 6. After all operations, it updates the `_meiliIndex` field in MongoDB to indicate whether the document is indexed in MeiliSearch.
*
* Note: The function processes documents in batches because MeiliSearch's
* `index.getDocuments` requires an exact limit and `index.addDocuments` does not handle
* partial failures in a batch.
* Note: This strategy does not use batch operations for Meilisearch as the `index.addDocuments` will discard
* the entire batch if there's an error with one document, and will not throw an error if there's an issue.
* Also, `index.getDocuments` needs an exact limit on the amount of documents to return, so we build the map in batches.
*
* @returns {Promise<void>} Resolves when the synchronization is complete.
* @returns {Promise} A promise that resolves when the synchronization is complete.
*
* @throws {Error} Throws an error if there's an issue with adding a document to MeiliSearch.
*/
static async syncWithMeili() {
try {
let moreDocuments = true;
// Retrieve all MongoDB documents from the collection as plain JavaScript objects.
const mongoDocuments = await this.find().lean();
const format = (doc) => _.pick(doc, attributesToIndex);
// Helper function to format a document by selecting only the attributes to index
// and omitting keys starting with '$'.
const format = (doc) =>
_.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$'));
// Build a map of MongoDB documents for quick lookup based on the primary key.
// Prepare for comparison
const mongoMap = new Map(mongoDocuments.map((doc) => [doc[primaryKey], format(doc)]));
const indexMap = new Map();
let offset = 0;
const batchSize = 1000;
// Fetch documents from the MeiliSearch index in batches.
while (moreDocuments) {
const batch = await index.getDocuments({ limit: batchSize, offset });
if (batch.results.length === 0) {
moreDocuments = false;
}
for (const doc of batch.results) {
indexMap.set(doc[primaryKey], format(doc));
}
offset += batchSize;
}
@@ -100,12 +70,13 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
const updateOps = [];
// Process documents present in the MeiliSearch index.
// Iterate over Meili index documents
for (const [id, doc] of indexMap) {
const update = {};
update[primaryKey] = id;
if (mongoMap.has(id)) {
// If document exists in MongoDB, check for discrepancies in key fields.
// Case: Update
// If document also exists in MongoDB, would be update case
if (
(doc.text && doc.text !== mongoMap.get(id).text) ||
(doc.title && doc.title !== mongoMap.get(id).title)
@@ -121,7 +92,8 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
await index.addDocuments([doc]);
}
} else {
// If the document does not exist in MongoDB, delete it from MeiliSearch.
// Case: Delete
// If document does not exist in MongoDB, its a delete case from meili index
await index.deleteDocument(id);
updateOps.push({
updateOne: { filter: update, update: { $set: { _meiliIndex: false } } },
@@ -129,25 +101,24 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
}
}
// Process documents present in MongoDB.
// Iterate over MongoDB documents
for (const [id, doc] of mongoMap) {
const update = {};
update[primaryKey] = id;
// If the document is missing in the Meili index, add it.
// Case: Insert
// If document does not exist in Meili Index, Its an insert case
if (!indexMap.has(id)) {
await index.addDocuments([doc]);
updateOps.push({
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
});
} else if (doc._meiliIndex === false) {
// If the document exists but is marked as not indexed, update the flag.
updateOps.push({
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
});
}
}
// Execute bulk update operations in MongoDB to update the _meiliIndex flags.
if (updateOps.length > 0) {
await this.collection.bulkWrite(updateOps);
logger.debug(
@@ -161,47 +132,34 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
}
}
/**
* Updates settings for the MeiliSearch index.
*
* @param {Object} settings - The settings to update on the MeiliSearch index.
* @returns {Promise<Object>} Promise resolving to the update result.
*/
// Set one or more settings of the meili index
static async setMeiliIndexSettings(settings) {
return await index.updateSettings(settings);
}
/**
* Searches the MeiliSearch index and optionally populates the results with data from MongoDB.
*
* @param {string} q - The search query.
* @param {Object} params - Additional search parameters for MeiliSearch.
* @param {boolean} populate - Whether to populate search hits with full MongoDB documents.
* @returns {Promise<Object>} The search results with populated hits if requested.
*/
// Search the index
static async meiliSearch(q, params, populate) {
const data = await index.search(q, params);
// Populate hits with content from mongodb
if (populate) {
// Build a query using the primary key values from the search hits.
// Find objects into mongodb matching `objectID` from Meili search
const query = {};
// query[primaryKey] = { $in: _.map(data.hits, primaryKey) };
query[primaryKey] = _.map(data.hits, (hit) => cleanUpPrimaryKeyValue(hit[primaryKey]));
// logger.debug('query', query);
const hitsFromMongoose = await this.find(
query,
_.reduce(
this.schema.obj,
function (results, value, key) {
return { ...results, [key]: 1 };
},
{ _id: 1, __v: 1 },
),
).lean();
// Build a projection object, including only keys that do not start with '$'.
const projection = Object.keys(this.schema.obj).reduce(
(results, key) => {
if (!key.startsWith('$')) {
results[key] = 1;
}
return results;
},
{ _id: 1, __v: 1 },
);
// Retrieve the full documents from MongoDB.
const hitsFromMongoose = await this.find(query, projection).lean();
// Merge the MongoDB documents with the search hits.
// Add additional data from mongodb into Meili search hits
const populatedHits = data.hits.map(function (hit) {
const query = {};
query[primaryKey] = hit[primaryKey];
@@ -218,21 +176,10 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
return data;
}
/**
* Preprocesses the current document for indexing.
*
* This method:
* - Picks only the defined attributes to index.
* - Omits any keys starting with '$'.
* - Replaces pipe characters ('|') in `conversationId` with '--'.
* - Extracts and concatenates text from an array of content items.
*
* @returns {Object} The preprocessed object ready for indexing.
*/
preprocessObjectForIndex() {
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
k.startsWith('$'),
);
const object = _.pick(this.toJSON(), attributesToIndex);
// NOTE: MeiliSearch does not allow | in primary key, so we replace it with - for Bing convoIds
// object.conversationId = object.conversationId.replace(/\|/g, '-');
if (object.conversationId && object.conversationId.includes('|')) {
object.conversationId = object.conversationId.replace(/\|/g, '--');
}
@@ -248,53 +195,32 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
return object;
}
/**
* Adds the current document to the MeiliSearch index.
*
* The method preprocesses the document, adds it to MeiliSearch, and then updates
* the MongoDB document's `_meiliIndex` flag to true.
*
* @returns {Promise<void>}
*/
// Push new document to Meili
async addObjectToMeili() {
const object = this.preprocessObjectForIndex();
try {
// logger.debug('Adding document to Meili', object);
await index.addDocuments([object]);
} catch (error) {
// Error handling can be enhanced as needed.
logger.error('[addObjectToMeili] Error adding document to Meili', error);
// logger.debug('Error adding document to Meili');
// logger.error(error);
}
await this.collection.updateMany({ _id: this._id }, { $set: { _meiliIndex: true } });
}
/**
* Updates the current document in the MeiliSearch index.
*
* @returns {Promise<void>}
*/
// Update an existing document in Meili
async updateObjectToMeili() {
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
k.startsWith('$'),
);
const object = _.pick(this.toJSON(), attributesToIndex);
await index.updateDocuments([object]);
}
/**
* Deletes the current document from the MeiliSearch index.
*
* @returns {Promise<void>}
*/
// Delete a document from Meili
async deleteObjectFromMeili() {
await index.deleteDocument(this._id);
}
/**
* Post-save hook to synchronize the document with MeiliSearch.
*
* If the document is already indexed (i.e. `_meiliIndex` is true), it updates it;
* otherwise, it adds the document to the index.
*/
// * schema.post('save')
postSaveHook() {
if (this._meiliIndex) {
this.updateObjectToMeili();
@@ -303,24 +229,14 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
}
}
/**
* Post-update hook to update the document in MeiliSearch.
*
* This hook is triggered after a document update, ensuring that changes are
* propagated to the MeiliSearch index if the document is indexed.
*/
// * schema.post('update')
postUpdateHook() {
if (this._meiliIndex) {
this.updateObjectToMeili();
}
}
/**
* Post-remove hook to delete the document from MeiliSearch.
*
* This hook is triggered after a document is removed, ensuring that the document
* is also removed from the MeiliSearch index if it was previously indexed.
*/
// * schema.post('remove')
postRemoveHook() {
if (this._meiliIndex) {
this.deleteObjectFromMeili();
@@ -331,27 +247,11 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
return MeiliMongooseModel;
};
/**
* Mongoose plugin to synchronize MongoDB collections with a MeiliSearch index.
*
* This plugin:
* - Validates the provided options.
* - Adds a `_meiliIndex` field to the schema to track indexing status.
* - Sets up a MeiliSearch client and creates an index if it doesn't already exist.
* - Loads class methods for syncing, searching, and managing documents in MeiliSearch.
* - Registers Mongoose hooks (post-save, post-update, post-remove, etc.) to maintain index consistency.
*
* @param {mongoose.Schema} schema - The Mongoose schema to which the plugin is applied.
* @param {Object} options - Configuration options.
* @param {string} options.host - The MeiliSearch host.
* @param {string} options.apiKey - The MeiliSearch API key.
* @param {string} options.indexName - The name of the MeiliSearch index.
* @param {string} options.primaryKey - The primary key field for indexing.
*/
module.exports = function mongoMeili(schema, options) {
// Vaidate Options for mongoMeili
validateOptions(options);
// Add _meiliIndex field to the schema to track if a document has been indexed in MeiliSearch.
// Add meiliIndex to schema
schema.add({
_meiliIndex: {
type: Boolean,
@@ -363,77 +263,69 @@ module.exports = function mongoMeili(schema, options) {
const { host, apiKey, indexName, primaryKey } = options;
// Setup the MeiliSearch client.
// Setup MeiliSearch Client
const client = new MeiliSearch({ host, apiKey });
// Create the index asynchronously if it doesn't exist.
// Asynchronously create the index
client.createIndex(indexName, { primaryKey });
// Setup the MeiliSearch index for this schema.
// Setup the index to search for this schema
const index = client.index(indexName);
// Collect attributes from the schema that should be indexed.
const attributesToIndex = [
..._.reduce(
schema.obj,
function (results, value, key) {
return value.meiliIndex ? [...results, key] : results;
// }, []), '_id'];
},
[],
),
];
// Load the class methods into the schema.
schema.loadClass(createMeiliMongooseModel({ index, indexName, client, attributesToIndex }));
// Register Mongoose hooks to synchronize with MeiliSearch.
// Post-save: synchronize after a document is saved.
// Register hooks
schema.post('save', function (doc) {
doc.postSaveHook();
});
// Post-update: synchronize after a document is updated.
schema.post('update', function (doc) {
doc.postUpdateHook();
});
// Post-remove: synchronize after a document is removed.
schema.post('remove', function (doc) {
doc.postRemoveHook();
});
// Pre-deleteMany hook: remove corresponding documents from MeiliSearch when multiple documents are deleted.
schema.pre('deleteMany', async function (next) {
if (!meiliEnabled) {
return next();
next();
}
try {
// Check if the schema has a "messages" field to determine if it's a conversation schema.
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messages')) {
const convoIndex = client.index('convos');
const deletedConvos = await mongoose.model('Conversation').find(this._conditions).lean();
const promises = deletedConvos.map((convo) =>
convoIndex.deleteDocument(convo.conversationId),
);
let promises = [];
for (const convo of deletedConvos) {
promises.push(convoIndex.deleteDocument(convo.conversationId));
}
await Promise.all(promises);
}
// Check if the schema has a "messageId" field to determine if it's a message schema.
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messageId')) {
const messageIndex = client.index('messages');
const deletedMessages = await mongoose.model('Message').find(this._conditions).lean();
const promises = deletedMessages.map((message) =>
messageIndex.deleteDocument(message.messageId),
);
let promises = [];
for (const message of deletedMessages) {
promises.push(messageIndex.deleteDocument(message.messageId));
}
await Promise.all(promises);
}
return next();
} catch (error) {
if (meiliEnabled) {
logger.error(
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion. Next startup may be slow due to syncing.',
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion, next startup may be slow due to syncing',
error,
);
}
@@ -441,19 +333,17 @@ module.exports = function mongoMeili(schema, options) {
}
});
// Post-findOneAndUpdate hook: update MeiliSearch index after a document is updated via findOneAndUpdate.
schema.post('findOneAndUpdate', async function (doc) {
if (!meiliEnabled) {
return;
}
// If the document is unfinished, do not update the index.
if (doc.unfinished) {
return;
}
let meiliDoc;
// For conversation documents, try to fetch the document from the "convos" index.
// Doc is a Conversation
if (doc.messages) {
try {
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
@@ -466,12 +356,10 @@ module.exports = function mongoMeili(schema, options) {
}
}
// If the MeiliSearch document exists and the title is unchanged, do nothing.
if (meiliDoc && meiliDoc.title === doc.title) {
return;
}
// Otherwise, trigger a post-save hook to synchronize the document.
doc.postSaveHook();
});
};

View File

@@ -0,0 +1,60 @@
const mongoose = require('mongoose');
const { Schema } = mongoose;
const AuthSchema = new Schema(
{
authorization_type: String,
custom_auth_header: String,
type: {
type: String,
enum: ['service_http', 'oauth', 'none'],
},
authorization_content_type: String,
authorization_url: String,
client_url: String,
scope: String,
token_exchange_method: {
type: String,
enum: ['default_post', 'basic_auth_header', null],
},
},
{ _id: false },
);
const actionSchema = new Schema({
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
index: true,
required: true,
},
action_id: {
type: String,
index: true,
required: true,
},
type: {
type: String,
default: 'action_prototype',
},
settings: Schema.Types.Mixed,
agent_id: String,
assistant_id: String,
metadata: {
api_key: String, // private, encrypted
auth: AuthSchema,
domain: {
type: String,
required: true,
},
// json_schema: Schema.Types.Mixed,
privacy_policy_url: String,
raw_spec: String,
oauth_client_id: String, // private, encrypted
oauth_client_secret: String, // private, encrypted
},
});
// }, { minimize: false }); // Prevent removal of empty objects
module.exports = actionSchema;

View File

@@ -1,33 +1,6 @@
import { Schema, Document, Types } from 'mongoose';
export interface IAgent extends Omit<Document, 'model'> {
id: string;
name?: string;
description?: string;
instructions?: string;
avatar?: {
filepath: string;
source: string;
};
provider: string;
model: string;
model_parameters?: Record<string, unknown>;
artifacts?: string;
access_level?: number;
tools?: string[];
tool_kwargs?: Array<unknown>;
actions?: string[];
author: Types.ObjectId;
authorName?: string;
hide_sequential_outputs?: boolean;
end_after_tools?: boolean;
agent_ids?: string[];
isCollaborative?: boolean;
conversation_starters?: string[];
tool_resources?: unknown;
projectIds?: Types.ObjectId[];
}
const mongoose = require('mongoose');
const agentSchema = new Schema<IAgent>(
const agentSchema = mongoose.Schema(
{
id: {
type: String,
@@ -45,7 +18,10 @@ const agentSchema = new Schema<IAgent>(
type: String,
},
avatar: {
type: Schema.Types.Mixed,
type: {
filepath: String,
source: String,
},
default: undefined,
},
provider: {
@@ -70,14 +46,14 @@ const agentSchema = new Schema<IAgent>(
default: undefined,
},
tool_kwargs: {
type: [{ type: Schema.Types.Mixed }],
type: [{ type: mongoose.Schema.Types.Mixed }],
},
actions: {
type: [String],
default: undefined,
},
author: {
type: Schema.Types.ObjectId,
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
required: true,
},
@@ -103,11 +79,11 @@ const agentSchema = new Schema<IAgent>(
default: [],
},
tool_resources: {
type: Schema.Types.Mixed,
type: mongoose.Schema.Types.Mixed,
default: {},
},
projectIds: {
type: [Schema.Types.ObjectId],
type: [mongoose.Schema.Types.ObjectId],
ref: 'Project',
index: true,
},
@@ -117,4 +93,4 @@ const agentSchema = new Schema<IAgent>(
},
);
export default agentSchema;
module.exports = agentSchema;

View File

@@ -1,23 +1,9 @@
import { Schema, Document, Types } from 'mongoose';
const mongoose = require('mongoose');
export interface IAssistant extends Document {
user: Types.ObjectId;
assistant_id: string;
avatar?: {
filepath: string;
source: string;
};
conversation_starters?: string[];
access_level?: number;
file_ids?: string[];
actions?: string[];
append_current_datetime?: boolean;
}
const assistantSchema = new Schema<IAssistant>(
const assistantSchema = mongoose.Schema(
{
user: {
type: Schema.Types.ObjectId,
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
required: true,
},
@@ -27,7 +13,10 @@ const assistantSchema = new Schema<IAssistant>(
required: true,
},
avatar: {
type: Schema.Types.Mixed,
type: {
filepath: String,
source: String,
},
default: undefined,
},
conversation_starters: {
@@ -49,4 +38,4 @@ const assistantSchema = new Schema<IAssistant>(
},
);
export default assistantSchema;
module.exports = assistantSchema;

View File

@@ -0,0 +1,17 @@
const mongoose = require('mongoose');
const balanceSchema = mongoose.Schema({
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
index: true,
required: true,
},
// 1000 tokenCredits = 1 mill ($0.001 USD)
tokenCredits: {
type: Number,
default: 0,
},
});
module.exports = balanceSchema;

View File

@@ -1,15 +1,6 @@
import { Schema, Document } from 'mongoose';
const mongoose = require('mongoose');
export interface IBanner extends Document {
bannerId: string;
message: string;
displayFrom: Date;
displayTo?: Date;
type: 'banner' | 'popup';
isPublic: boolean;
}
const bannerSchema = new Schema<IBanner>(
const bannerSchema = mongoose.Schema(
{
bannerId: {
type: String,
@@ -37,7 +28,9 @@ const bannerSchema = new Schema<IBanner>(
default: false,
},
},
{ timestamps: true },
);
export default bannerSchema;
const Banner = mongoose.model('Banner', bannerSchema);
module.exports = Banner;

View File

@@ -0,0 +1,19 @@
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const categoriesSchema = new Schema({
label: {
type: String,
required: true,
unique: true,
},
value: {
type: String,
required: true,
unique: true,
},
});
const categories = mongoose.model('categories', categoriesSchema);
module.exports = { Categories: categories };

View File

@@ -0,0 +1,32 @@
const mongoose = require('mongoose');
const conversationTagSchema = mongoose.Schema(
{
tag: {
type: String,
index: true,
},
user: {
type: String,
index: true,
},
description: {
type: String,
index: true,
},
count: {
type: Number,
default: 0,
},
position: {
type: Number,
default: 0,
index: true,
},
},
{ timestamps: true },
);
conversationTagSchema.index({ tag: 1, user: 1 }, { unique: true });
module.exports = mongoose.model('ConversationTag', conversationTagSchema);

View File

@@ -1,7 +1,46 @@
const mongoose = require('mongoose');
const mongoMeili = require('../plugins/mongoMeili');
const { convoSchema } = require('@librechat/data-schemas');
const { conversationPreset } = require('./defaults');
const convoSchema = mongoose.Schema(
{
conversationId: {
type: String,
unique: true,
required: true,
index: true,
meiliIndex: true,
},
title: {
type: String,
default: 'New Chat',
meiliIndex: true,
},
user: {
type: String,
index: true,
},
messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }],
agentOptions: {
type: mongoose.Schema.Types.Mixed,
},
...conversationPreset,
agent_id: {
type: String,
},
tags: {
type: [String],
default: [],
meiliIndex: true,
},
files: {
type: [String],
},
expiredAt: {
type: Date,
},
},
{ timestamps: true },
);
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
convoSchema.plugin(mongoMeili, {
@@ -13,6 +52,10 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
});
}
convoSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
convoSchema.index({ createdAt: 1, updatedAt: 1 });
convoSchema.index({ conversationId: 1, user: 1 }, { unique: true });
const Conversation = mongoose.models.Conversation || mongoose.model('Conversation', convoSchema);
module.exports = Conversation;

View File

@@ -1,7 +1,6 @@
import { Schema } from 'mongoose';
const mongoose = require('mongoose');
// @ts-ignore
export const conversationPreset = {
const conversationPreset = {
// endpoint: [azureOpenAI, openAI, anthropic, chatGPTBrowser]
endpoint: {
type: String,
@@ -27,7 +26,7 @@ export const conversationPreset = {
required: false,
},
// for google only
examples: { type: [{ type: Schema.Types.Mixed }], default: undefined },
examples: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
modelLabel: {
type: String,
required: false,
@@ -57,10 +56,6 @@ export const conversationPreset = {
type: Number,
required: false,
},
maxTokens: {
type: Number,
required: false,
},
presence_penalty: {
type: Number,
required: false,
@@ -136,3 +131,7 @@ export const conversationPreset = {
type: String,
},
};
module.exports = {
conversationPreset,
};

View File

@@ -0,0 +1,111 @@
const { FileSources } = require('librechat-data-provider');
const mongoose = require('mongoose');
/**
* @typedef {Object} MongoFile
* @property {ObjectId} [_id] - MongoDB Document ID
* @property {number} [__v] - MongoDB Version Key
* @property {ObjectId} user - User ID
* @property {string} [conversationId] - Optional conversation ID
* @property {string} file_id - File identifier
* @property {string} [temp_file_id] - Temporary File identifier
* @property {number} bytes - Size of the file in bytes
* @property {string} filename - Name of the file
* @property {string} filepath - Location of the file
* @property {'file'} object - Type of object, always 'file'
* @property {string} type - Type of file
* @property {number} [usage=0] - Number of uses of the file
* @property {string} [context] - Context of the file origin
* @property {boolean} [embedded=false] - Whether or not the file is embedded in vector db
* @property {string} [model] - The model to identify the group region of the file (for Azure OpenAI hosting)
* @property {string} [source] - The source of the file (e.g., from FileSources)
* @property {number} [width] - Optional width of the file
* @property {number} [height] - Optional height of the file
* @property {Object} [metadata] - Metadata related to the file
* @property {string} [metadata.fileIdentifier] - Unique identifier for the file in metadata
* @property {Date} [expiresAt] - Optional expiration date of the file
* @property {Date} [createdAt] - Date when the file was created
* @property {Date} [updatedAt] - Date when the file was updated
*/
/** @type {MongooseSchema<MongoFile>} */
const fileSchema = mongoose.Schema(
{
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
index: true,
required: true,
},
conversationId: {
type: String,
ref: 'Conversation',
index: true,
},
file_id: {
type: String,
// required: true,
index: true,
},
temp_file_id: {
type: String,
// required: true,
},
bytes: {
type: Number,
required: true,
},
filename: {
type: String,
required: true,
},
filepath: {
type: String,
required: true,
},
object: {
type: String,
required: true,
default: 'file',
},
embedded: {
type: Boolean,
},
type: {
type: String,
required: true,
},
context: {
type: String,
// required: true,
},
usage: {
type: Number,
required: true,
default: 0,
},
source: {
type: String,
default: FileSources.local,
},
model: {
type: String,
},
width: Number,
height: Number,
metadata: {
fileIdentifier: String,
},
expiresAt: {
type: Date,
expires: 3600, // 1 hour in seconds
},
},
{
timestamps: true,
},
);
fileSchema.index({ createdAt: 1, updatedAt: 1 });
module.exports = fileSchema;

View File

@@ -1,13 +1,6 @@
import mongoose, { Schema, Document, Types } from 'mongoose';
const mongoose = require('mongoose');
export interface IKey extends Document {
userId: Types.ObjectId;
name: string;
value: string;
expiresAt?: Date;
}
const keySchema: Schema<IKey> = new Schema({
const keySchema = mongoose.Schema({
userId: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
@@ -28,4 +21,4 @@ const keySchema: Schema<IKey> = new Schema({
keySchema.index({ expiresAt: 1 }, { expireAfterSeconds: 0 });
export default keySchema;
module.exports = keySchema;

View File

@@ -1,6 +1,145 @@
const mongoose = require('mongoose');
const mongoMeili = require('~/models/plugins/mongoMeili');
const { messageSchema } = require('@librechat/data-schemas');
const messageSchema = mongoose.Schema(
{
messageId: {
type: String,
unique: true,
required: true,
index: true,
meiliIndex: true,
},
conversationId: {
type: String,
index: true,
required: true,
meiliIndex: true,
},
user: {
type: String,
index: true,
required: true,
default: null,
},
model: {
type: String,
default: null,
},
endpoint: {
type: String,
},
conversationSignature: {
type: String,
},
clientId: {
type: String,
},
invocationId: {
type: Number,
},
parentMessageId: {
type: String,
},
tokenCount: {
type: Number,
},
summaryTokenCount: {
type: Number,
},
sender: {
type: String,
meiliIndex: true,
},
text: {
type: String,
meiliIndex: true,
},
summary: {
type: String,
},
isCreatedByUser: {
type: Boolean,
required: true,
default: false,
},
unfinished: {
type: Boolean,
default: false,
},
error: {
type: Boolean,
default: false,
},
finish_reason: {
type: String,
},
_meiliIndex: {
type: Boolean,
required: false,
select: false,
default: false,
},
files: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
plugin: {
type: {
latest: {
type: String,
required: false,
},
inputs: {
type: [mongoose.Schema.Types.Mixed],
required: false,
default: undefined,
},
outputs: {
type: String,
required: false,
},
},
default: undefined,
},
plugins: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
content: {
type: [{ type: mongoose.Schema.Types.Mixed }],
default: undefined,
meiliIndex: true,
},
thread_id: {
type: String,
},
/* frontend components */
iconURL: {
type: String,
},
attachments: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
/*
attachments: {
type: [
{
file_id: String,
filename: String,
filepath: String,
expiresAt: Date,
width: Number,
height: Number,
type: String,
conversationId: String,
messageId: {
type: String,
required: true,
},
toolCallId: String,
},
],
default: undefined,
},
*/
expiredAt: {
type: Date,
},
},
{ timestamps: true },
);
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
messageSchema.plugin(mongoMeili, {
@@ -10,7 +149,11 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
primaryKey: 'messageId',
});
}
messageSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
messageSchema.index({ createdAt: 1 });
messageSchema.index({ messageId: 1, user: 1 }, { unique: true });
/** @type {mongoose.Model<TMessage>} */
const Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
module.exports = Message;

View File

@@ -1,5 +1,25 @@
const mongoose = require('mongoose');
const { pluginAuthSchema } = require('@librechat/data-schemas');
const pluginAuthSchema = mongoose.Schema(
{
authField: {
type: String,
required: true,
},
value: {
type: String,
required: true,
},
userId: {
type: String,
required: true,
},
pluginKey: {
type: String,
},
},
{ timestamps: true },
);
const PluginAuth = mongoose.models.Plugin || mongoose.model('PluginAuth', pluginAuthSchema);

View File

@@ -1,5 +1,36 @@
const mongoose = require('mongoose');
const { presetSchema } = require('@librechat/data-schemas');
const { conversationPreset } = require('./defaults');
const presetSchema = mongoose.Schema(
{
presetId: {
type: String,
unique: true,
required: true,
index: true,
},
title: {
type: String,
default: 'New Chat',
meiliIndex: true,
},
user: {
type: String,
default: null,
},
defaultPreset: {
type: Boolean,
},
order: {
type: Number,
},
...conversationPreset,
agentOptions: {
type: mongoose.Schema.Types.Mixed,
default: null,
},
},
{ timestamps: true },
);
const Preset = mongoose.models.Preset || mongoose.model('Preset', presetSchema);

View File

@@ -0,0 +1,35 @@
const { Schema } = require('mongoose');
/**
* @typedef {Object} MongoProject
* @property {ObjectId} [_id] - MongoDB Document ID
* @property {string} name - The name of the project
* @property {ObjectId[]} promptGroupIds - Array of PromptGroup IDs associated with the project
* @property {Date} [createdAt] - Date when the project was created (added by timestamps)
* @property {Date} [updatedAt] - Date when the project was last updated (added by timestamps)
*/
const projectSchema = new Schema(
{
name: {
type: String,
required: true,
index: true,
},
promptGroupIds: {
type: [Schema.Types.ObjectId],
ref: 'PromptGroup',
default: [],
},
agentIds: {
type: [String],
ref: 'Agent',
default: [],
},
},
{
timestamps: true,
},
);
module.exports = projectSchema;

View File

@@ -0,0 +1,118 @@
const mongoose = require('mongoose');
const { Constants } = require('librechat-data-provider');
const Schema = mongoose.Schema;
/**
* @typedef {Object} MongoPromptGroup
* @property {ObjectId} [_id] - MongoDB Document ID
* @property {string} name - The name of the prompt group
* @property {ObjectId} author - The author of the prompt group
* @property {ObjectId} [projectId=null] - The project ID of the prompt group
* @property {ObjectId} [productionId=null] - The project ID of the prompt group
* @property {string} authorName - The name of the author of the prompt group
* @property {number} [numberOfGenerations=0] - Number of generations the prompt group has
* @property {string} [oneliner=''] - Oneliner description of the prompt group
* @property {string} [category=''] - Category of the prompt group
* @property {string} [command] - Command for the prompt group
* @property {Date} [createdAt] - Date when the prompt group was created (added by timestamps)
* @property {Date} [updatedAt] - Date when the prompt group was last updated (added by timestamps)
*/
const promptGroupSchema = new Schema(
{
name: {
type: String,
required: true,
index: true,
},
numberOfGenerations: {
type: Number,
default: 0,
},
oneliner: {
type: String,
default: '',
},
category: {
type: String,
default: '',
index: true,
},
projectIds: {
type: [Schema.Types.ObjectId],
ref: 'Project',
index: true,
},
productionId: {
type: Schema.Types.ObjectId,
ref: 'Prompt',
required: true,
index: true,
},
author: {
type: Schema.Types.ObjectId,
ref: 'User',
required: true,
index: true,
},
authorName: {
type: String,
required: true,
},
command: {
type: String,
index: true,
validate: {
validator: function (v) {
return v === undefined || v === null || v === '' || /^[a-z0-9-]+$/.test(v);
},
message: (props) =>
`${props.value} is not a valid command. Only lowercase alphanumeric characters and highfins (') are allowed.`,
},
maxlength: [
Constants.COMMANDS_MAX_LENGTH,
`Command cannot be longer than ${Constants.COMMANDS_MAX_LENGTH} characters`,
],
},
},
{
timestamps: true,
},
);
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
const promptSchema = new Schema(
{
groupId: {
type: Schema.Types.ObjectId,
ref: 'PromptGroup',
required: true,
index: true,
},
author: {
type: Schema.Types.ObjectId,
ref: 'User',
required: true,
},
prompt: {
type: String,
required: true,
},
type: {
type: String,
enum: ['text', 'chat'],
required: true,
},
},
{
timestamps: true,
},
);
const Prompt = mongoose.model('Prompt', promptSchema);
promptSchema.index({ createdAt: 1, updatedAt: 1 });
promptGroupSchema.index({ createdAt: 1, updatedAt: 1 });
module.exports = { Prompt, PromptGroup };

View File

@@ -1,33 +1,7 @@
import { Schema, Document } from 'mongoose';
import { PermissionTypes, Permissions } from 'librechat-data-provider';
const { PermissionTypes, Permissions } = require('librechat-data-provider');
const mongoose = require('mongoose');
export interface IRole extends Document {
name: string;
[PermissionTypes.BOOKMARKS]?: {
[Permissions.USE]?: boolean;
};
[PermissionTypes.PROMPTS]?: {
[Permissions.SHARED_GLOBAL]?: boolean;
[Permissions.USE]?: boolean;
[Permissions.CREATE]?: boolean;
};
[PermissionTypes.AGENTS]?: {
[Permissions.SHARED_GLOBAL]?: boolean;
[Permissions.USE]?: boolean;
[Permissions.CREATE]?: boolean;
};
[PermissionTypes.MULTI_CONVO]?: {
[Permissions.USE]?: boolean;
};
[PermissionTypes.TEMPORARY_CHAT]?: {
[Permissions.USE]?: boolean;
};
[PermissionTypes.RUN_CODE]?: {
[Permissions.USE]?: boolean;
};
}
const roleSchema: Schema<IRole> = new Schema({
const roleSchema = new mongoose.Schema({
name: {
type: String,
required: true,
@@ -88,4 +62,6 @@ const roleSchema: Schema<IRole> = new Schema({
},
});
export default roleSchema;
const Role = mongoose.model('Role', roleSchema);
module.exports = Role;

View File

@@ -0,0 +1,20 @@
const mongoose = require('mongoose');
const sessionSchema = mongoose.Schema({
refreshTokenHash: {
type: String,
required: true,
},
expiration: {
type: Date,
required: true,
expires: 0,
},
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
required: true,
},
});
module.exports = sessionSchema;

View File

@@ -1,17 +1,6 @@
import mongoose, { Schema, Document, Types } from 'mongoose';
const mongoose = require('mongoose');
export interface ISharedLink extends Document {
conversationId: string;
title?: string;
user?: string;
messages?: Types.ObjectId[];
shareId?: string;
isPublic: boolean;
createdAt?: Date;
updatedAt?: Date;
}
const shareSchema: Schema<ISharedLink> = new Schema(
const shareSchema = mongoose.Schema(
{
conversationId: {
type: String,
@@ -38,4 +27,4 @@ const shareSchema: Schema<ISharedLink> = new Schema(
{ timestamps: true },
);
export default shareSchema;
module.exports = mongoose.model('SharedLink', shareSchema);

View File

@@ -1,17 +1,7 @@
import { Schema, Document, Types } from 'mongoose';
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
export interface IToken extends Document {
userId: Types.ObjectId;
email?: string;
type?: string;
identifier?: string;
token: string;
createdAt: Date;
expiresAt: Date;
metadata?: Map<string, unknown>;
}
const tokenSchema: Schema<IToken> = new Schema({
const tokenSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
required: true,
@@ -20,9 +10,7 @@ const tokenSchema: Schema<IToken> = new Schema({
email: {
type: String,
},
type: {
type: String,
},
type: String,
identifier: {
type: String,
},
@@ -47,4 +35,4 @@ const tokenSchema: Schema<IToken> = new Schema({
tokenSchema.index({ expiresAt: 1 }, { expireAfterSeconds: 0 });
export default tokenSchema;
module.exports = tokenSchema;

View File

@@ -0,0 +1,54 @@
const mongoose = require('mongoose');
/**
* @typedef {Object} ToolCallData
* @property {string} conversationId - The ID of the conversation
* @property {string} messageId - The ID of the message
* @property {string} toolId - The ID of the tool
* @property {string | ObjectId} user - The user's ObjectId
* @property {unknown} [result] - Optional result data
* @property {TAttachment[]} [attachments] - Optional attachments data
* @property {number} [blockIndex] - Optional code block index
* @property {number} [partIndex] - Optional part index
*/
/** @type {MongooseSchema<ToolCallData>} */
const toolCallSchema = mongoose.Schema(
{
conversationId: {
type: String,
required: true,
},
messageId: {
type: String,
required: true,
},
toolId: {
type: String,
required: true,
},
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
required: true,
},
result: {
type: mongoose.Schema.Types.Mixed,
},
attachments: {
type: mongoose.Schema.Types.Mixed,
},
blockIndex: {
type: Number,
},
partIndex: {
type: Number,
},
},
{ timestamps: true },
);
toolCallSchema.index({ messageId: 1, user: 1 });
toolCallSchema.index({ conversationId: 1, user: 1 });
module.exports = mongoose.model('ToolCall', toolCallSchema);

View File

@@ -1,24 +1,6 @@
import mongoose, { Schema, Document, Types } from 'mongoose';
const mongoose = require('mongoose');
// @ts-ignore
export interface ITransaction extends Document {
user: Types.ObjectId;
conversationId?: string;
tokenType: 'prompt' | 'completion' | 'credits';
model?: string;
context?: string;
valueKey?: string;
rate?: number;
rawAmount?: number;
tokenValue?: number;
inputTokens?: number;
writeTokens?: number;
readTokens?: number;
createdAt?: Date;
updatedAt?: Date;
}
const transactionSchema: Schema<ITransaction> = new Schema(
const transactionSchema = mongoose.Schema(
{
user: {
type: mongoose.Schema.Types.ObjectId,
@@ -57,4 +39,4 @@ const transactionSchema: Schema<ITransaction> = new Schema(
},
);
export default transactionSchema;
module.exports = transactionSchema;

View File

@@ -0,0 +1,151 @@
const mongoose = require('mongoose');
const { SystemRoles } = require('librechat-data-provider');
/**
* @typedef {Object} MongoSession
* @property {string} [refreshToken] - The refresh token
*/
/**
* @typedef {Object} MongoUser
* @property {ObjectId} [_id] - MongoDB Document ID
* @property {string} [name] - The user's name
* @property {string} [username] - The user's username, in lowercase
* @property {string} email - The user's email address
* @property {boolean} emailVerified - Whether the user's email is verified
* @property {string} [password] - The user's password, trimmed with 8-128 characters
* @property {string} [avatar] - The URL of the user's avatar
* @property {string} provider - The provider of the user's account (e.g., 'local', 'google')
* @property {string} [role='USER'] - The role of the user
* @property {string} [googleId] - Optional Google ID for the user
* @property {string} [facebookId] - Optional Facebook ID for the user
* @property {string} [openidId] - Optional OpenID ID for the user
* @property {string} [ldapId] - Optional LDAP ID for the user
* @property {string} [githubId] - Optional GitHub ID for the user
* @property {string} [discordId] - Optional Discord ID for the user
* @property {string} [appleId] - Optional Apple ID for the user
* @property {Array} [plugins=[]] - List of plugins used by the user
* @property {Array.<MongoSession>} [refreshToken] - List of sessions with refresh tokens
* @property {Date} [expiresAt] - Optional expiration date of the file
* @property {Date} [createdAt] - Date when the user was created (added by timestamps)
* @property {Date} [updatedAt] - Date when the user was last updated (added by timestamps)
*/
/** @type {MongooseSchema<MongoSession>} */
const Session = mongoose.Schema({
refreshToken: {
type: String,
default: '',
},
});
const backupCodeSchema = mongoose.Schema({
codeHash: { type: String, required: true },
used: { type: Boolean, default: false },
usedAt: { type: Date, default: null },
});
/** @type {MongooseSchema<MongoUser>} */
const userSchema = mongoose.Schema(
{
name: {
type: String,
},
username: {
type: String,
lowercase: true,
default: '',
},
email: {
type: String,
required: [true, 'can\'t be blank'],
lowercase: true,
unique: true,
match: [/\S+@\S+\.\S+/, 'is invalid'],
index: true,
},
emailVerified: {
type: Boolean,
required: true,
default: false,
},
password: {
type: String,
trim: true,
minlength: 8,
maxlength: 128,
},
avatar: {
type: String,
required: false,
},
provider: {
type: String,
required: true,
default: 'local',
},
role: {
type: String,
default: SystemRoles.USER,
},
googleId: {
type: String,
unique: true,
sparse: true,
},
facebookId: {
type: String,
unique: true,
sparse: true,
},
openidId: {
type: String,
unique: true,
sparse: true,
},
ldapId: {
type: String,
unique: true,
sparse: true,
},
githubId: {
type: String,
unique: true,
sparse: true,
},
discordId: {
type: String,
unique: true,
sparse: true,
},
appleId: {
type: String,
unique: true,
sparse: true,
},
plugins: {
type: Array,
},
totpSecret: {
type: String,
},
backupCodes: {
type: [backupCodeSchema],
},
refreshToken: {
type: [Session],
},
expiresAt: {
type: Date,
expires: 604800, // 7 days in seconds
},
termsAccepted: {
type: Boolean,
default: false,
},
},
{ timestamps: true },
);
module.exports = userSchema;

View File

@@ -1,6 +1,6 @@
{
"name": "@librechat/backend",
"version": "v0.7.7",
"version": "v0.7.7-rc1",
"description": "",
"scripts": {
"start": "echo 'please run this from the root directory'",
@@ -36,19 +36,18 @@
"dependencies": {
"@anthropic-ai/sdk": "^0.37.0",
"@azure/search-documents": "^12.0.0",
"@google/generative-ai": "^0.23.0",
"@google/generative-ai": "^0.21.0",
"@googleapis/youtube": "^20.0.0",
"@keyv/mongo": "^2.1.8",
"@keyv/redis": "^2.8.1",
"@langchain/community": "^0.3.34",
"@langchain/community": "^0.3.14",
"@langchain/core": "^0.3.40",
"@langchain/google-genai": "^0.1.9",
"@langchain/google-vertexai": "^0.2.0",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.2.0",
"@librechat/data-schemas": "*",
"@librechat/agents": "^2.1.7",
"@waylaidwanderer/fetch-event-source": "^3.0.1",
"axios": "^1.8.2",
"axios": "1.7.8",
"bcryptjs": "^2.4.3",
"cohere-ai": "^7.9.1",
"compression": "^1.7.4",
@@ -75,6 +74,7 @@
"keyv": "^4.5.4",
"keyv-file": "^0.2.0",
"klona": "^2.0.6",
"langchain": "^0.2.19",
"librechat-data-provider": "*",
"librechat-mcp": "*",
"lodash": "^4.17.21",
@@ -82,7 +82,7 @@
"memorystore": "^1.6.7",
"mime": "^3.0.0",
"module-alias": "^2.2.3",
"mongoose": "^8.12.1",
"mongoose": "^8.9.5",
"multer": "^1.4.5-lts.1",
"nanoid": "^3.3.7",
"nodemailer": "^6.9.15",

View File

@@ -1,7 +1,6 @@
const { CacheKeys } = require('librechat-data-provider');
const { loadDefaultModels, loadConfigModels } = require('~/server/services/Config');
const { getLogStores } = require('~/cache');
const { logger } = require('~/config');
/**
* @param {ServerRequest} req
@@ -37,13 +36,8 @@ async function loadModels(req) {
}
async function modelController(req, res) {
try {
const modelConfig = await loadModels(req);
res.send(modelConfig);
} catch (error) {
logger.error('Error fetching models:', error);
res.status(500).send({ error: error.message });
}
const modelConfig = await loadModels(req);
res.send(modelConfig);
}
module.exports = { modelController, loadModels, getModelsConfig };

View File

@@ -11,19 +11,17 @@ const { encryptV2 } = require('~/server/utils/crypto');
const enable2FAController = async (req, res) => {
const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, '');
try {
const userId = req.user.id;
const secret = generateTOTPSecret();
const { plainCodes, codeObjects } = await generateBackupCodes();
const encryptedSecret = await encryptV2(secret);
// Set twoFactorEnabled to false until the user confirms 2FA.
const user = await updateUser(userId, {
totpSecret: encryptedSecret,
backupCodes: codeObjects,
twoFactorEnabled: false,
});
const user = await updateUser(userId, { totpSecret: encryptedSecret, backupCodes: codeObjects });
const otpauthUrl = `otpauth://totp/${safeAppTitle}:${user.email}?secret=${secret}&issuer=${safeAppTitle}`;
res.status(200).json({
otpauthUrl,
backupCodes: plainCodes,
@@ -39,7 +37,6 @@ const verify2FAController = async (req, res) => {
const userId = req.user.id;
const { token, backupCode } = req.body;
const user = await getUserById(userId);
// Ensure that 2FA is enabled for this user.
if (!user || !user.totpSecret) {
return res.status(400).json({ message: '2FA not initiated' });
}
@@ -55,6 +52,7 @@ const verify2FAController = async (req, res) => {
return res.status(200).json();
}
}
return res.status(400).json({ message: 'Invalid token.' });
} catch (err) {
logger.error('[verify2FAController]', err);
@@ -76,8 +74,6 @@ const confirm2FAController = async (req, res) => {
const secret = await getTOTPSecret(user.totpSecret);
if (await verifyTOTP(secret, token)) {
// Upon successful verification, enable 2FA.
await updateUser(userId, { twoFactorEnabled: true });
return res.status(200).json();
}
@@ -91,7 +87,7 @@ const confirm2FAController = async (req, res) => {
const disable2FAController = async (req, res) => {
try {
const userId = req.user.id;
await updateUser(userId, { totpSecret: null, backupCodes: [], twoFactorEnabled: false });
await updateUser(userId, { totpSecret: null, backupCodes: [] });
res.status(200).json();
} catch (err) {
logger.error('[disable2FAController]', err);

View File

@@ -10,8 +10,8 @@ const {
ChatModelStreamHandler,
} = require('@librechat/agents');
const { processCodeOutput } = require('~/server/services/Files/Code/process');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { saveBase64Image } = require('~/server/services/Files/process');
const { loadAuthValues } = require('~/app/clients/tools/util');
const { logger, sendEvent } = require('~/config');
/** @typedef {import('@librechat/agents').Graph} Graph */

View File

@@ -17,7 +17,7 @@ const {
KnownEndpoints,
anthropicSchema,
isAgentsEndpoint,
bedrockInputSchema,
bedrockOutputParser,
removeNullishValues,
} = require('librechat-data-provider');
const {
@@ -30,7 +30,6 @@ const {
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
const { getCustomEndpointConfig } = require('~/server/services/Config');
const Tokenizer = require('~/server/services/Tokenizer');
const BaseClient = require('~/app/clients/BaseClient');
const { createRun } = require('./run');
@@ -40,10 +39,10 @@ const { logger } = require('~/config');
/** @typedef {import('@langchain/core/runnables').RunnableConfig} RunnableConfig */
const providerParsers = {
[EModelEndpoint.openAI]: openAISchema.parse,
[EModelEndpoint.azureOpenAI]: openAISchema.parse,
[EModelEndpoint.anthropic]: anthropicSchema.parse,
[EModelEndpoint.bedrock]: bedrockInputSchema.parse,
[EModelEndpoint.openAI]: openAISchema,
[EModelEndpoint.azureOpenAI]: openAISchema,
[EModelEndpoint.anthropic]: anthropicSchema,
[EModelEndpoint.bedrock]: bedrockOutputParser,
};
const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deepseek]);
@@ -188,14 +187,7 @@ class AgentClient extends BaseClient {
: {};
if (parseOptions) {
try {
runOptions = parseOptions(this.options.agent.model_parameters);
} catch (error) {
logger.error(
'[api/server/controllers/agents/client.js #getSaveOptions] Error parsing options',
error,
);
}
runOptions = parseOptions(this.options.agent.model_parameters);
}
return removeNullishValues(
@@ -223,23 +215,14 @@ class AgentClient extends BaseClient {
};
}
/**
*
* @param {TMessage} message
* @param {Array<MongoFile>} attachments
* @returns {Promise<Array<Partial<MongoFile>>>}
*/
async addImageURLs(message, attachments) {
const { files, text, image_urls } = await encodeAndFormat(
const { files, image_urls } = await encodeAndFormat(
this.options.req,
attachments,
this.options.agent.provider,
VisionModes.agents,
);
message.image_urls = image_urls.length ? image_urls : undefined;
if (text && text.length) {
message.ocr = text;
}
return files;
}
@@ -317,21 +300,7 @@ class AgentClient extends BaseClient {
assistantName: this.options?.modelLabel,
});
if (message.ocr && i !== orderedMessages.length - 1) {
if (typeof formattedMessage.content === 'string') {
formattedMessage.content = message.ocr + '\n' + formattedMessage.content;
} else {
const textPart = formattedMessage.content.find((part) => part.type === 'text');
textPart
? (textPart.text = message.ocr + '\n' + textPart.text)
: formattedMessage.content.unshift({ type: 'text', text: message.ocr });
}
} else if (message.ocr && i === orderedMessages.length - 1) {
systemContent = [systemContent, message.ocr].join('\n');
}
const needsTokenCount =
(this.contextStrategy && !orderedMessages[i].tokenCount) || message.ocr;
const needsTokenCount = this.contextStrategy && !orderedMessages[i].tokenCount;
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) {
@@ -835,10 +804,7 @@ class AgentClient extends BaseClient {
'[api/server/controllers/agents/client.js #sendCompletion] Unhandled error type',
err,
);
this.contentParts.push({
type: ContentTypes.ERROR,
[ContentTypes.ERROR]: `An error occurred while processing the request${err?.message ? `: ${err.message}` : ''}`,
});
throw err;
}
}
}
@@ -858,16 +824,13 @@ class AgentClient extends BaseClient {
const clientOptions = {
maxTokens: 75,
};
let endpointConfig = this.options.req.app.locals[this.options.agent.endpoint];
if (!endpointConfig) {
endpointConfig = await getCustomEndpointConfig(this.options.agent.endpoint);
}
const providerConfig = this.options.req.app.locals[this.options.agent.provider];
if (
endpointConfig &&
endpointConfig.titleModel &&
endpointConfig.titleModel !== Constants.CURRENT_MODEL
providerConfig &&
providerConfig.titleModel &&
providerConfig.titleModel !== Constants.CURRENT_MODEL
) {
clientOptions.model = endpointConfig.titleModel;
clientOptions.model = providerConfig.titleModel;
}
try {
const titleResult = await this.run.generateTitle({

View File

@@ -45,10 +45,7 @@ async function createRun({
/** @type {'reasoning_content' | 'reasoning'} */
let reasoningKey;
if (
llmConfig.configuration?.baseURL?.includes(KnownEndpoints.openrouter) ||
(agent.endpoint && agent.endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
) {
if (llmConfig.configuration?.baseURL?.includes(KnownEndpoints.openrouter)) {
reasoningKey = 'reasoning';
}
if (/o1(?!-(?:mini|preview)).*$/.test(llmConfig.model)) {

View File

@@ -8,7 +8,7 @@ const loginController = async (req, res) => {
return res.status(400).json({ message: 'Invalid credentials' });
}
if (req.user.twoFactorEnabled) {
if (req.user.backupCodes != null && req.user.backupCodes.length > 0) {
const tempToken = generate2FATempToken(req.user._id);
return res.status(200).json({ twoFAPending: true, tempToken });
}

View File

@@ -1,9 +1,5 @@
const jwt = require('jsonwebtoken');
const {
verifyTOTP,
verifyBackupCode,
getTOTPSecret,
} = require('~/server/services/twoFactorService');
const { verifyTOTP, verifyBackupCode, getTOTPSecret } = require('~/server/services/twoFactorService');
const { setAuthTokens } = require('~/server/services/AuthService');
const { getUserById } = require('~/models/userMethods');
const { logger } = require('~/config');
@@ -23,12 +19,12 @@ const verify2FA = async (req, res) => {
}
const user = await getUserById(payload.userId);
// Ensure that the user exists and has 2FA enabled
if (!user || !user.twoFactorEnabled) {
// Ensure that the user exists and has backup codes (i.e. 2FA enabled)
if (!user || !(user.backupCodes && user.backupCodes.length > 0)) {
return res.status(400).json({ message: '2FA is not enabled for this user' });
}
// Retrieve (and decrypt if necessary) the TOTP secret.
// Use the new getTOTPSecret function to retrieve (and decrypt if necessary) the TOTP secret.
const secret = await getTOTPSecret(user.totpSecret);
let verified = false;
@@ -43,7 +39,9 @@ const verify2FA = async (req, res) => {
}
// Prepare user data for response.
// If the user is a plain object (from lean queries), we create a shallow copy.
const userData = user.toObject ? user.toObject() : { ...user };
// Remove sensitive fields.
delete userData.password;
delete userData.__v;
delete userData.totpSecret;

View File

@@ -10,8 +10,7 @@ const {
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
const { processCodeOutput } = require('~/server/services/Files/Code/process');
const { createToolCall, getToolCallsByConvo } = require('~/models/ToolCall');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { loadTools } = require('~/app/clients/tools/util');
const { loadAuthValues, loadTools } = require('~/app/clients/tools/util');
const { checkAccess } = require('~/server/middleware');
const { getMessage } = require('~/models/Message');
const { logger } = require('~/config');

View File

@@ -120,7 +120,7 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
{ promptTokens, completionTokens },
);
await saveMessage(
saveMessage(
req,
{ ...responseMessage, user },
{ context: 'api/server/middleware/abortMiddleware.js' },

View File

@@ -10,6 +10,7 @@ const openAI = require('~/server/services/Endpoints/openAI');
const agents = require('~/server/services/Endpoints/agents');
const custom = require('~/server/services/Endpoints/custom');
const google = require('~/server/services/Endpoints/google');
const { getConvoFiles } = require('~/models/Conversation');
const { handleError } = require('~/server/utils');
const buildFunction = {
@@ -86,8 +87,16 @@ async function buildEndpointOption(req, res, next) {
// TODO: use `getModelsConfig` only when necessary
const modelsConfig = await getModelsConfig(req);
const { resendFiles = true } = req.body.endpointOption;
req.body.endpointOption.modelsConfig = modelsConfig;
if (req.body.files && !isAgents) {
if (isAgents && resendFiles && req.body.conversationId) {
const fileIds = await getConvoFiles(req.body.conversationId);
const requestFiles = req.body.files ?? [];
if (requestFiles.length || fileIds.length) {
req.body.endpointOption.attachments = processFiles(requestFiles, fileIds);
}
} else if (req.body.files) {
// hold the promise
req.body.endpointOption.attachments = processFiles(req.body.files);
}
next();

View File

@@ -1,18 +1,32 @@
const passport = require('passport');
const { logger } = require('~/config');
const DebugControl = require('../../utils/debug.js');
function log({ title, parameters }) {
DebugControl.log.functionName(title);
if (parameters) {
DebugControl.log.parameters(parameters);
}
}
const requireLocalAuth = (req, res, next) => {
passport.authenticate('local', (err, user, info) => {
if (err) {
logger.error('[requireLocalAuth] Error at passport.authenticate:', err);
log({
title: '(requireLocalAuth) Error at passport.authenticate',
parameters: [{ name: 'error', value: err }],
});
return next(err);
}
if (!user) {
logger.debug('[requireLocalAuth] Error: No user');
log({
title: '(requireLocalAuth) Error: No user',
});
return res.status(404).send(info);
}
if (info && info.message) {
logger.debug('[requireLocalAuth] Error: ' + info.message);
log({
title: '(requireLocalAuth) Error: ' + info.message,
});
return res.status(422).send({ message: info.message });
}
req.user = user;

View File

@@ -47,10 +47,10 @@ router.get('/', async function (req, res) {
githubLoginEnabled: !!process.env.GITHUB_CLIENT_ID && !!process.env.GITHUB_CLIENT_SECRET,
googleLoginEnabled: !!process.env.GOOGLE_CLIENT_ID && !!process.env.GOOGLE_CLIENT_SECRET,
appleLoginEnabled:
!!process.env.APPLE_CLIENT_ID &&
!!process.env.APPLE_TEAM_ID &&
!!process.env.APPLE_KEY_ID &&
!!process.env.APPLE_PRIVATE_KEY_PATH,
!!process.env.APPLE_CLIENT_ID &&
!!process.env.APPLE_TEAM_ID &&
!!process.env.APPLE_KEY_ID &&
!!process.env.APPLE_PRIVATE_KEY_PATH,
openidLoginEnabled:
!!process.env.OPENID_CLIENT_ID &&
!!process.env.OPENID_CLIENT_SECRET &&
@@ -80,7 +80,6 @@ router.get('/', async function (req, res) {
publicSharedLinksEnabled,
analyticsGtmId: process.env.ANALYTICS_GTM_ID,
instanceProjectId: instanceProject._id.toString(),
bundlerURL: process.env.SANDPACK_BUNDLER_URL,
};
if (ldap) {

View File

@@ -16,7 +16,7 @@ const {
} = require('~/server/services/Files/process');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { loadAuthValues } = require('~/app/clients/tools/util');
const { getAgent } = require('~/models/Agent');
const { getFiles } = require('~/models/File');
const { logger } = require('~/config');

View File

@@ -1,9 +1,4 @@
const {
FileSources,
EModelEndpoint,
loadOCRConfig,
getConfigDefaults,
} = require('librechat-data-provider');
const { FileSources, EModelEndpoint, getConfigDefaults } = require('librechat-data-provider');
const { checkVariables, checkHealth, checkConfig, checkAzureVariables } = require('./start/checks');
const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants');
const { initializeFirebase } = require('./Files/Firebase/initialize');
@@ -30,7 +25,6 @@ const AppService = async (app) => {
const config = (await loadCustomConfig()) ?? {};
const configDefaults = getConfigDefaults();
const ocr = loadOCRConfig(config.ocr);
const filteredTools = config.filteredTools;
const includedTools = config.includedTools;
const fileStrategy = config.fileStrategy ?? configDefaults.fileStrategy;
@@ -63,7 +57,6 @@ const AppService = async (app) => {
const interfaceConfig = await loadDefaultInterface(config, configDefaults);
const defaultLocals = {
ocr,
paths,
fileStrategy,
socialLogins,

View File

@@ -120,7 +120,6 @@ describe('AppService', () => {
},
},
paths: expect.anything(),
ocr: expect.anything(),
imageOutputType: expect.any(String),
fileConfig: undefined,
secureImageLinks: undefined,
@@ -589,33 +588,4 @@ describe('AppService updating app.locals and issuing warnings', () => {
);
});
});
it('should not parse environment variable references in OCR config', async () => {
// Mock custom configuration with env variable references in OCR config
const mockConfig = {
ocr: {
apiKey: '${OCR_API_KEY_CUSTOM_VAR_NAME}',
baseURL: '${OCR_BASEURL_CUSTOM_VAR_NAME}',
strategy: 'mistral_ocr',
mistralModel: 'mistral-medium',
},
};
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve(mockConfig));
// Set actual environment variables with different values
process.env.OCR_API_KEY_CUSTOM_VAR_NAME = 'actual-api-key';
process.env.OCR_BASEURL_CUSTOM_VAR_NAME = 'https://actual-ocr-url.com';
// Initialize app
const app = { locals: {} };
await AppService(app);
// Verify that the raw string references were preserved and not interpolated
expect(app.locals.ocr).toBeDefined();
expect(app.locals.ocr.apiKey).toEqual('${OCR_API_KEY_CUSTOM_VAR_NAME}');
expect(app.locals.ocr.baseURL).toEqual('${OCR_BASEURL_CUSTOM_VAR_NAME}');
expect(app.locals.ocr.strategy).toEqual('mistral_ocr');
expect(app.locals.ocr.mistralModel).toEqual('mistral-medium');
});
});

View File

@@ -47,7 +47,7 @@ async function loadConfigModels(req) {
);
/**
* @type {Record<string, Promise<string[]>>}
* @type {Record<string, string[]>}
* Map for promises keyed by unique combination of baseURL and apiKey */
const fetchPromisesMap = {};
/**
@@ -102,7 +102,7 @@ async function loadConfigModels(req) {
for (const name of associatedNames) {
const endpoint = endpointsMap[name];
modelsConfig[name] = !modelData?.length ? (endpoint.models.default ?? []) : modelData;
modelsConfig[name] = !modelData?.length ? endpoint.models.default ?? [] : modelData;
}
}

View File

@@ -5,8 +5,8 @@ const {
getGoogleModels,
getBedrockModels,
getAnthropicModels,
getChatGPTBrowserModels,
} = require('~/server/services/ModelService');
const { logger } = require('~/config');
/**
* Loads the default models for the application.
@@ -15,68 +15,31 @@ const { logger } = require('~/config');
* @param {Express.Request} req - The Express request object.
*/
async function loadDefaultModels(req) {
try {
const [
openAI,
anthropic,
azureOpenAI,
gptPlugins,
assistants,
azureAssistants,
google,
bedrock,
] = await Promise.all([
getOpenAIModels({ user: req.user.id }).catch((error) => {
logger.error('Error fetching OpenAI models:', error);
return [];
}),
getAnthropicModels({ user: req.user.id }).catch((error) => {
logger.error('Error fetching Anthropic models:', error);
return [];
}),
getOpenAIModels({ user: req.user.id, azure: true }).catch((error) => {
logger.error('Error fetching Azure OpenAI models:', error);
return [];
}),
getOpenAIModels({ user: req.user.id, azure: useAzurePlugins, plugins: true }).catch(
(error) => {
logger.error('Error fetching Plugin models:', error);
return [];
},
),
getOpenAIModels({ assistants: true }).catch((error) => {
logger.error('Error fetching OpenAI Assistants API models:', error);
return [];
}),
getOpenAIModels({ azureAssistants: true }).catch((error) => {
logger.error('Error fetching Azure OpenAI Assistants API models:', error);
return [];
}),
Promise.resolve(getGoogleModels()).catch((error) => {
logger.error('Error getting Google models:', error);
return [];
}),
Promise.resolve(getBedrockModels()).catch((error) => {
logger.error('Error getting Bedrock models:', error);
return [];
}),
]);
const google = getGoogleModels();
const openAI = await getOpenAIModels({ user: req.user.id });
const anthropic = getAnthropicModels();
const chatGPTBrowser = getChatGPTBrowserModels();
const azureOpenAI = await getOpenAIModels({ user: req.user.id, azure: true });
const gptPlugins = await getOpenAIModels({
user: req.user.id,
azure: useAzurePlugins,
plugins: true,
});
const assistants = await getOpenAIModels({ assistants: true });
const azureAssistants = await getOpenAIModels({ azureAssistants: true });
return {
[EModelEndpoint.openAI]: openAI,
[EModelEndpoint.agents]: openAI,
[EModelEndpoint.google]: google,
[EModelEndpoint.anthropic]: anthropic,
[EModelEndpoint.gptPlugins]: gptPlugins,
[EModelEndpoint.azureOpenAI]: azureOpenAI,
[EModelEndpoint.assistants]: assistants,
[EModelEndpoint.azureAssistants]: azureAssistants,
[EModelEndpoint.bedrock]: bedrock,
};
} catch (error) {
logger.error('Error fetching default models:', error);
throw new Error(`Failed to load default models: ${error.message}`);
}
return {
[EModelEndpoint.openAI]: openAI,
[EModelEndpoint.agents]: openAI,
[EModelEndpoint.google]: google,
[EModelEndpoint.anthropic]: anthropic,
[EModelEndpoint.gptPlugins]: gptPlugins,
[EModelEndpoint.azureOpenAI]: azureOpenAI,
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
[EModelEndpoint.assistants]: assistants,
[EModelEndpoint.azureAssistants]: azureAssistants,
[EModelEndpoint.bedrock]: getBedrockModels(),
};
}
module.exports = loadDefaultModels;

View File

@@ -2,8 +2,15 @@ const { loadAgent } = require('~/models/Agent');
const { logger } = require('~/config');
const buildOptions = (req, endpoint, parsedBody) => {
const { spec, iconURL, agent_id, instructions, maxContextTokens, ...model_parameters } =
parsedBody;
const {
spec,
iconURL,
agent_id,
instructions,
maxContextTokens,
resendFiles = true,
...model_parameters
} = parsedBody;
const agentPromise = loadAgent({
req,
agent_id,
@@ -17,6 +24,7 @@ const buildOptions = (req, endpoint, parsedBody) => {
iconURL,
endpoint,
agent_id,
resendFiles,
instructions,
maxContextTokens,
model_parameters,

View File

@@ -2,7 +2,6 @@ const { createContentAggregator, Providers } = require('@librechat/agents');
const {
EModelEndpoint,
getResponseSender,
AgentCapabilities,
providerEndpointMap,
} = require('librechat-data-provider');
const {
@@ -16,17 +15,13 @@ const initCustom = require('~/server/services/Endpoints/custom/initialize');
const initGoogle = require('~/server/services/Endpoints/google/initialize');
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
const { getCustomEndpointConfig } = require('~/server/services/Config');
const { processFiles } = require('~/server/services/Files/process');
const { loadAgentTools } = require('~/server/services/ToolService');
const AgentClient = require('~/server/controllers/agents/client');
const { getToolFiles } = require('~/models/Conversation');
const { getModelMaxTokens } = require('~/utils');
const { getAgent } = require('~/models/Agent');
const { getFiles } = require('~/models/File');
const { logger } = require('~/config');
const providerConfigMap = {
[Providers.XAI]: initCustom,
[Providers.OLLAMA]: initCustom,
[Providers.DEEPSEEK]: initCustom,
[Providers.OPENROUTER]: initCustom,
@@ -38,38 +33,20 @@ const providerConfigMap = {
};
/**
* @param {ServerRequest} req
*
* @param {Promise<Array<MongoFile | null>> | undefined} _attachments
* @param {AgentToolResources | undefined} _tool_resources
* @returns {Promise<{ attachments: Array<MongoFile | undefined> | undefined, tool_resources: AgentToolResources | undefined }>}
*/
const primeResources = async (req, _attachments, _tool_resources) => {
const primeResources = async (_attachments, _tool_resources) => {
try {
/** @type {Array<MongoFile | undefined> | undefined} */
let attachments;
const tool_resources = _tool_resources ?? {};
const isOCREnabled = (req.app.locals?.[EModelEndpoint.agents]?.capabilities ?? []).includes(
AgentCapabilities.ocr,
);
if (tool_resources.ocr?.file_ids && isOCREnabled) {
const context = await getFiles(
{
file_id: { $in: tool_resources.ocr.file_ids },
},
{},
{},
);
attachments = (attachments ?? []).concat(context);
}
if (!_attachments) {
return { attachments, tool_resources };
return { attachments: undefined, tool_resources: _tool_resources };
}
/** @type {Array<MongoFile | undefined> | undefined} */
const files = await _attachments;
if (!attachments) {
/** @type {Array<MongoFile | undefined>} */
attachments = [];
}
const attachments = [];
const tool_resources = _tool_resources ?? {};
for (const file of files) {
if (!file) {
@@ -104,6 +81,7 @@ const primeResources = async (req, _attachments, _tool_resources) => {
* @param {ServerResponse} params.res
* @param {Agent} params.agent
* @param {object} [params.endpointOption]
* @param {AgentToolResources} [params.tool_resources]
* @param {boolean} [params.isInitialAgent]
* @returns {Promise<Agent>}
*/
@@ -112,28 +90,9 @@ const initializeAgentOptions = async ({
res,
agent,
endpointOption,
tool_resources,
isInitialAgent = false,
}) => {
let currentFiles;
const requestFiles = req.body.files ?? [];
if (
isInitialAgent &&
req.body.conversationId != null &&
agent.model_parameters?.resendFiles === true
) {
const fileIds = (await getToolFiles(req.body.conversationId)).map((f) => f.file_id);
if (requestFiles.length || fileIds.length) {
currentFiles = await processFiles(requestFiles, fileIds);
}
} else if (isInitialAgent && requestFiles.length) {
currentFiles = await processFiles(requestFiles);
}
const { attachments, tool_resources } = await primeResources(
req,
currentFiles,
agent.tool_resources,
);
const { tools, toolContextMap } = await loadAgentTools({
req,
res,
@@ -142,7 +101,6 @@ const initializeAgentOptions = async ({
});
const provider = agent.provider;
agent.endpoint = provider;
let getOptions = providerConfigMap[provider];
if (!getOptions && providerConfigMap[provider.toLowerCase()] != null) {
agent.provider = provider.toLowerCase();
@@ -154,7 +112,9 @@ const initializeAgentOptions = async ({
}
getOptions = initCustom;
agent.provider = Providers.OPENAI;
agent.endpoint = provider.toLowerCase();
}
const model_parameters = Object.assign(
{},
agent.model_parameters ?? { model: agent.model },
@@ -200,7 +160,6 @@ const initializeAgentOptions = async ({
return {
...agent,
tools,
attachments,
toolContextMap,
maxContextTokens:
agent.max_context_tokens ??
@@ -238,6 +197,11 @@ const initializeClient = async ({ req, res, endpointOption }) => {
throw new Error('Agent not found');
}
const { attachments, tool_resources } = await primeResources(
endpointOption.attachments,
primaryAgent.tool_resources,
);
const agentConfigs = new Map();
// Handle primary agent
@@ -246,6 +210,7 @@ const initializeClient = async ({ req, res, endpointOption }) => {
res,
agent: primaryAgent,
endpointOption,
tool_resources,
isInitialAgent: true,
});
@@ -275,19 +240,18 @@ const initializeClient = async ({ req, res, endpointOption }) => {
const client = new AgentClient({
req,
agent: primaryConfig,
sender,
attachments,
contentParts,
agentConfigs,
eventHandlers,
collectedUsage,
artifactPromises,
agent: primaryConfig,
spec: endpointOption.spec,
iconURL: endpointOption.iconURL,
agentConfigs,
endpoint: EModelEndpoint.agents,
attachments: primaryConfig.attachments,
maxContextTokens: primaryConfig.maxContextTokens,
resendFiles: primaryConfig.model_parameters?.resendFiles ?? true,
});
return { client };

View File

@@ -27,7 +27,6 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
if (anthropicConfig) {
clientOptions.streamRate = anthropicConfig.streamRate;
clientOptions.titleModel = anthropicConfig.titleModel;
}
/** @type {undefined | TBaseEndpoint} */

View File

@@ -1,5 +1,6 @@
const { removeNullishValues } = require('librechat-data-provider');
const { removeNullishValues, bedrockInputParser } = require('librechat-data-provider');
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
const { logger } = require('~/config');
const buildOptions = (endpoint, parsedBody) => {
const {
@@ -14,6 +15,12 @@ const buildOptions = (endpoint, parsedBody) => {
artifacts,
...model_parameters
} = parsedBody;
let parsedParams = model_parameters;
try {
parsedParams = bedrockInputParser.parse(model_parameters);
} catch (error) {
logger.warn('Failed to parse bedrock input', error);
}
const endpointOption = removeNullishValues({
endpoint,
name,
@@ -24,7 +31,7 @@ const buildOptions = (endpoint, parsedBody) => {
spec,
promptPrefix,
maxContextTokens,
model_parameters,
model_parameters: parsedParams,
});
if (typeof artifacts === 'string') {

View File

@@ -1,16 +1,14 @@
const { HttpsProxyAgent } = require('https-proxy-agent');
const {
AuthType,
Constants,
EModelEndpoint,
bedrockInputParser,
bedrockOutputParser,
Constants,
AuthType,
removeNullishValues,
} = require('librechat-data-provider');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const { sleep } = require('~/server/utils');
const getOptions = async ({ req, overrideModel, endpointOption }) => {
const getOptions = async ({ req, endpointOption }) => {
const {
BEDROCK_AWS_SECRET_ACCESS_KEY,
BEDROCK_AWS_ACCESS_KEY_ID,
@@ -64,44 +62,39 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => {
/** @type {BedrockClientOptions} */
const requestOptions = {
model: overrideModel ?? endpointOption.model,
model: endpointOption.model,
region: BEDROCK_AWS_DEFAULT_REGION,
streaming: true,
streamUsage: true,
callbacks: [
{
handleLLMNewToken: async () => {
if (!streamRate) {
return;
}
await sleep(streamRate);
},
},
],
};
if (credentials) {
requestOptions.credentials = credentials;
}
if (BEDROCK_REVERSE_PROXY) {
requestOptions.endpointHost = BEDROCK_REVERSE_PROXY;
}
const configOptions = {};
if (PROXY) {
/** NOTE: NOT SUPPORTED BY BEDROCK */
configOptions.httpAgent = new HttpsProxyAgent(PROXY);
}
const llmConfig = bedrockOutputParser(
bedrockInputParser.parse(
removeNullishValues(Object.assign(requestOptions, endpointOption.model_parameters)),
),
);
if (credentials) {
llmConfig.credentials = credentials;
}
if (BEDROCK_REVERSE_PROXY) {
llmConfig.endpointHost = BEDROCK_REVERSE_PROXY;
}
llmConfig.callbacks = [
{
handleLLMNewToken: async () => {
if (!streamRate) {
return;
}
await sleep(streamRate);
},
},
];
return {
/** @type {BedrockClientOptions} */
llmConfig,
llmConfig: removeNullishValues(Object.assign(requestOptions, endpointOption.model_parameters)),
configOptions,
};
};

View File

@@ -141,8 +141,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
},
clientOptions,
);
clientOptions.modelOptions.user = req.user.id;
const options = getLLMConfig(apiKey, clientOptions, endpoint);
const options = getLLMConfig(apiKey, clientOptions);
if (!customOptions.streamRate) {
return options;
}

View File

@@ -5,7 +5,12 @@ const { isEnabled } = require('~/server/utils');
const { GoogleClient } = require('~/app');
const initializeClient = async ({ req, res, endpointOption, overrideModel, optionsOnly }) => {
const { GOOGLE_KEY, GOOGLE_REVERSE_PROXY, GOOGLE_AUTH_HEADER, PROXY } = process.env;
const {
GOOGLE_KEY,
GOOGLE_REVERSE_PROXY,
GOOGLE_AUTH_HEADER,
PROXY,
} = process.env;
const isUserProvided = GOOGLE_KEY === 'user_provided';
const { key: expiresAt } = req.body;
@@ -38,7 +43,6 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
if (googleConfig) {
clientOptions.streamRate = googleConfig.streamRate;
clientOptions.titleModel = googleConfig.titleModel;
}
if (allConfig) {

View File

@@ -113,7 +113,6 @@ const initializeClient = async ({
if (!isAzureOpenAI && openAIConfig) {
clientOptions.streamRate = openAIConfig.streamRate;
clientOptions.titleModel = openAIConfig.titleModel;
}
/** @type {undefined | TBaseEndpoint} */
@@ -141,7 +140,6 @@ const initializeClient = async ({
},
clientOptions,
);
clientOptions.modelOptions.user = req.user.id;
const options = getLLMConfig(apiKey, clientOptions);
if (!clientOptions.streamRate) {
return options;

View File

@@ -9,7 +9,6 @@ const { isEnabled } = require('~/server/utils');
* @param {Object} options - Additional options for configuring the LLM.
* @param {Object} [options.modelOptions] - Model-specific options.
* @param {string} [options.modelOptions.model] - The name of the model to use.
* @param {string} [options.modelOptions.user] - The user ID
* @param {number} [options.modelOptions.temperature] - Controls randomness in output generation (0-2).
* @param {number} [options.modelOptions.top_p] - Controls diversity via nucleus sampling (0-1).
* @param {number} [options.modelOptions.frequency_penalty] - Reduces repetition of token sequences (-2 to 2).
@@ -24,10 +23,9 @@ const { isEnabled } = require('~/server/utils');
* @param {boolean} [options.streaming] - Whether to use streaming mode.
* @param {Object} [options.addParams] - Additional parameters to add to the model options.
* @param {string[]} [options.dropParams] - Parameters to remove from the model options.
* @param {string|null} [endpoint=null] - The endpoint name
* @returns {Object} Configuration options for creating an LLM instance.
*/
function getLLMConfig(apiKey, options = {}, endpoint = null) {
function getLLMConfig(apiKey, options = {}) {
const {
modelOptions = {},
reverseProxyUrl,
@@ -60,10 +58,7 @@ function getLLMConfig(apiKey, options = {}, endpoint = null) {
let useOpenRouter;
/** @type {OpenAIClientOptions['configuration']} */
const configOptions = {};
if (
(reverseProxyUrl && reverseProxyUrl.includes(KnownEndpoints.openrouter)) ||
(endpoint && endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
) {
if (reverseProxyUrl && reverseProxyUrl.includes(KnownEndpoints.openrouter)) {
useOpenRouter = true;
llmConfig.include_reasoning = true;
configOptions.baseURL = reverseProxyUrl;

View File

@@ -1,10 +1,9 @@
// Code Files
const axios = require('axios');
const FormData = require('form-data');
const { getCodeBaseURL } = require('@librechat/agents');
const { createAxiosInstance } = require('~/config');
const { logAxiosError } = require('~/utils');
const axios = createAxiosInstance();
const MAX_FILE_SIZE = 150 * 1024 * 1024;
/**
@@ -17,8 +16,7 @@ const MAX_FILE_SIZE = 150 * 1024 * 1024;
async function getCodeOutputDownloadStream(fileIdentifier, apiKey) {
try {
const baseURL = getCodeBaseURL();
/** @type {import('axios').AxiosRequestConfig} */
const options = {
const response = await axios({
method: 'get',
url: `${baseURL}/download/${fileIdentifier}`,
responseType: 'stream',
@@ -27,15 +25,10 @@ async function getCodeOutputDownloadStream(fileIdentifier, apiKey) {
'X-API-Key': apiKey,
},
timeout: 15000,
};
});
const response = await axios(options);
return response;
} catch (error) {
logAxiosError({
message: `Error downloading code environment file stream: ${error.message}`,
error,
});
throw new Error(`Error downloading file: ${error.message}`);
}
}
@@ -61,8 +54,7 @@ async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = ''
form.append('file', stream, filename);
const baseURL = getCodeBaseURL();
/** @type {import('axios').AxiosRequestConfig} */
const options = {
const response = await axios.post(`${baseURL}/upload`, form, {
headers: {
...form.getHeaders(),
'Content-Type': 'multipart/form-data',
@@ -72,9 +64,7 @@ async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = ''
},
maxContentLength: MAX_FILE_SIZE,
maxBodyLength: MAX_FILE_SIZE,
};
const response = await axios.post(`${baseURL}/upload`, form, options);
});
/** @type {{ message: string; session_id: string; files: Array<{ fileId: string; filename: string }> }} */
const result = response.data;

View File

@@ -1,207 +0,0 @@
// ~/server/services/Files/MistralOCR/crud.js
const fs = require('fs');
const path = require('path');
const FormData = require('form-data');
const { FileSources, envVarRegex, extractEnvVariable } = require('librechat-data-provider');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { logger, createAxiosInstance } = require('~/config');
const { logAxiosError } = require('~/utils');
const axios = createAxiosInstance();
/**
* Uploads a document to Mistral API using file streaming to avoid loading the entire file into memory
*
* @param {Object} params Upload parameters
* @param {string} params.filePath The path to the file on disk
* @param {string} [params.fileName] Optional filename to use (defaults to the name from filePath)
* @param {string} params.apiKey Mistral API key
* @param {string} [params.baseURL=https://api.mistral.ai/v1] Mistral API base URL
* @returns {Promise<Object>} The response from Mistral API
*/
async function uploadDocumentToMistral({
filePath,
fileName = '',
apiKey,
baseURL = 'https://api.mistral.ai/v1',
}) {
const form = new FormData();
form.append('purpose', 'ocr');
const actualFileName = fileName || path.basename(filePath);
const fileStream = fs.createReadStream(filePath);
form.append('file', fileStream, { filename: actualFileName });
return axios
.post(`${baseURL}/files`, form, {
headers: {
Authorization: `Bearer ${apiKey}`,
...form.getHeaders(),
},
maxBodyLength: Infinity,
maxContentLength: Infinity,
})
.then((res) => res.data)
.catch((error) => {
logger.error('Error uploading document to Mistral:', error.message);
throw error;
});
}
async function getSignedUrl({
apiKey,
fileId,
expiry = 24,
baseURL = 'https://api.mistral.ai/v1',
}) {
return axios
.get(`${baseURL}/files/${fileId}/url?expiry=${expiry}`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
})
.then((res) => res.data)
.catch((error) => {
logger.error('Error fetching signed URL:', error.message);
throw error;
});
}
/**
* @param {Object} params
* @param {string} params.apiKey
* @param {string} params.documentUrl
* @param {string} [params.baseURL]
* @returns {Promise<OCRResult>}
*/
async function performOCR({
apiKey,
documentUrl,
model = 'mistral-ocr-latest',
baseURL = 'https://api.mistral.ai/v1',
}) {
return axios
.post(
`${baseURL}/ocr`,
{
model,
include_image_base64: false,
document: {
type: 'document_url',
document_url: documentUrl,
},
},
{
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
},
)
.then((res) => res.data)
.catch((error) => {
logger.error('Error performing OCR:', error.message);
throw error;
});
}
function extractVariableName(str) {
const match = str.match(envVarRegex);
return match ? match[1] : null;
}
const uploadMistralOCR = async ({ req, file, file_id, entity_id }) => {
try {
/** @type {TCustomConfig['ocr']} */
const ocrConfig = req.app.locals?.ocr;
const apiKeyConfig = ocrConfig.apiKey || '';
const baseURLConfig = ocrConfig.baseURL || '';
const isApiKeyEnvVar = envVarRegex.test(apiKeyConfig);
const isBaseURLEnvVar = envVarRegex.test(baseURLConfig);
const isApiKeyEmpty = !apiKeyConfig.trim();
const isBaseURLEmpty = !baseURLConfig.trim();
let apiKey, baseURL;
if (isApiKeyEnvVar || isBaseURLEnvVar || isApiKeyEmpty || isBaseURLEmpty) {
const apiKeyVarName = isApiKeyEnvVar ? extractVariableName(apiKeyConfig) : 'OCR_API_KEY';
const baseURLVarName = isBaseURLEnvVar ? extractVariableName(baseURLConfig) : 'OCR_BASEURL';
const authValues = await loadAuthValues({
userId: req.user.id,
authFields: [baseURLVarName, apiKeyVarName],
optional: new Set([baseURLVarName]),
});
apiKey = authValues[apiKeyVarName];
baseURL = authValues[baseURLVarName];
} else {
apiKey = apiKeyConfig;
baseURL = baseURLConfig;
}
const mistralFile = await uploadDocumentToMistral({
filePath: file.path,
fileName: file.originalname,
apiKey,
baseURL,
});
const modelConfig = ocrConfig.mistralModel || '';
const model = envVarRegex.test(modelConfig)
? extractEnvVariable(modelConfig)
: modelConfig.trim() || 'mistral-ocr-latest';
const signedUrlResponse = await getSignedUrl({
apiKey,
baseURL,
fileId: mistralFile.id,
});
const ocrResult = await performOCR({
apiKey,
baseURL,
model,
documentUrl: signedUrlResponse.url,
});
let aggregatedText = '';
const images = [];
ocrResult.pages.forEach((page, index) => {
if (ocrResult.pages.length > 1) {
aggregatedText += `# PAGE ${index + 1}\n`;
}
aggregatedText += page.markdown + '\n\n';
if (page.images && page.images.length > 0) {
page.images.forEach((image) => {
if (image.image_base64) {
images.push(image.image_base64);
}
});
}
});
return {
filename: file.originalname,
bytes: aggregatedText.length * 4,
filepath: FileSources.mistral_ocr,
text: aggregatedText,
images,
};
} catch (error) {
const message = 'Error uploading document to Mistral OCR API';
logAxiosError({ error, message });
throw new Error(message);
}
};
module.exports = {
uploadDocumentToMistral,
uploadMistralOCR,
getSignedUrl,
performOCR,
};

View File

@@ -1,737 +0,0 @@
const fs = require('fs');
const mockAxios = {
interceptors: {
request: { use: jest.fn(), eject: jest.fn() },
response: { use: jest.fn(), eject: jest.fn() },
},
create: jest.fn().mockReturnValue({
defaults: {
proxy: null,
},
get: jest.fn().mockResolvedValue({ data: {} }),
post: jest.fn().mockResolvedValue({ data: {} }),
put: jest.fn().mockResolvedValue({ data: {} }),
delete: jest.fn().mockResolvedValue({ data: {} }),
}),
get: jest.fn().mockResolvedValue({ data: {} }),
post: jest.fn().mockResolvedValue({ data: {} }),
put: jest.fn().mockResolvedValue({ data: {} }),
delete: jest.fn().mockResolvedValue({ data: {} }),
reset: jest.fn().mockImplementation(function () {
this.get.mockClear();
this.post.mockClear();
this.put.mockClear();
this.delete.mockClear();
this.create.mockClear();
}),
};
jest.mock('axios', () => mockAxios);
jest.mock('fs');
jest.mock('~/utils', () => ({
logAxiosError: jest.fn(),
}));
jest.mock('~/config', () => ({
logger: {
error: jest.fn(),
},
createAxiosInstance: () => mockAxios,
}));
jest.mock('~/server/services/Tools/credentials', () => ({
loadAuthValues: jest.fn(),
}));
const { uploadDocumentToMistral, uploadMistralOCR, getSignedUrl, performOCR } = require('./crud');
describe('MistralOCR Service', () => {
afterEach(() => {
mockAxios.reset();
jest.clearAllMocks();
});
describe('uploadDocumentToMistral', () => {
beforeEach(() => {
// Create a more complete mock for file streams that FormData can work with
const mockReadStream = {
on: jest.fn().mockImplementation(function (event, handler) {
// Simulate immediate 'end' event to make FormData complete processing
if (event === 'end') {
handler();
}
return this;
}),
pipe: jest.fn().mockImplementation(function () {
return this;
}),
pause: jest.fn(),
resume: jest.fn(),
emit: jest.fn(),
once: jest.fn(),
destroy: jest.fn(),
};
fs.createReadStream = jest.fn().mockReturnValue(mockReadStream);
// Mock FormData's append to avoid actual stream processing
jest.mock('form-data', () => {
const mockFormData = function () {
return {
append: jest.fn(),
getHeaders: jest
.fn()
.mockReturnValue({ 'content-type': 'multipart/form-data; boundary=---boundary' }),
getBuffer: jest.fn().mockReturnValue(Buffer.from('mock-form-data')),
getLength: jest.fn().mockReturnValue(100),
};
};
return mockFormData;
});
});
it('should upload a document to Mistral API using file streaming', async () => {
const mockResponse = { data: { id: 'file-123', purpose: 'ocr' } };
mockAxios.post.mockResolvedValueOnce(mockResponse);
const result = await uploadDocumentToMistral({
filePath: '/path/to/test.pdf',
fileName: 'test.pdf',
apiKey: 'test-api-key',
});
// Check that createReadStream was called with the correct file path
expect(fs.createReadStream).toHaveBeenCalledWith('/path/to/test.pdf');
// Since we're mocking FormData, we'll just check that axios was called correctly
expect(mockAxios.post).toHaveBeenCalledWith(
'https://api.mistral.ai/v1/files',
expect.anything(),
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'Bearer test-api-key',
}),
maxBodyLength: Infinity,
maxContentLength: Infinity,
}),
);
expect(result).toEqual(mockResponse.data);
});
it('should handle errors during document upload', async () => {
const errorMessage = 'API error';
mockAxios.post.mockRejectedValueOnce(new Error(errorMessage));
await expect(
uploadDocumentToMistral({
filePath: '/path/to/test.pdf',
fileName: 'test.pdf',
apiKey: 'test-api-key',
}),
).rejects.toThrow();
const { logger } = require('~/config');
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining('Error uploading document to Mistral:'),
expect.any(String),
);
});
});
describe('getSignedUrl', () => {
it('should fetch signed URL from Mistral API', async () => {
const mockResponse = { data: { url: 'https://document-url.com' } };
mockAxios.get.mockResolvedValueOnce(mockResponse);
const result = await getSignedUrl({
fileId: 'file-123',
apiKey: 'test-api-key',
});
expect(mockAxios.get).toHaveBeenCalledWith(
'https://api.mistral.ai/v1/files/file-123/url?expiry=24',
{
headers: {
Authorization: 'Bearer test-api-key',
},
},
);
expect(result).toEqual(mockResponse.data);
});
it('should handle errors when fetching signed URL', async () => {
const errorMessage = 'API error';
mockAxios.get.mockRejectedValueOnce(new Error(errorMessage));
await expect(
getSignedUrl({
fileId: 'file-123',
apiKey: 'test-api-key',
}),
).rejects.toThrow();
const { logger } = require('~/config');
expect(logger.error).toHaveBeenCalledWith('Error fetching signed URL:', errorMessage);
});
});
describe('performOCR', () => {
it('should perform OCR using Mistral API', async () => {
const mockResponse = {
data: {
pages: [{ markdown: 'Page 1 content' }, { markdown: 'Page 2 content' }],
},
};
mockAxios.post.mockResolvedValueOnce(mockResponse);
const result = await performOCR({
apiKey: 'test-api-key',
documentUrl: 'https://document-url.com',
model: 'mistral-ocr-latest',
});
expect(mockAxios.post).toHaveBeenCalledWith(
'https://api.mistral.ai/v1/ocr',
{
model: 'mistral-ocr-latest',
include_image_base64: false,
document: {
type: 'document_url',
document_url: 'https://document-url.com',
},
},
{
headers: {
'Content-Type': 'application/json',
Authorization: 'Bearer test-api-key',
},
},
);
expect(result).toEqual(mockResponse.data);
});
it('should handle errors during OCR processing', async () => {
const errorMessage = 'OCR processing error';
mockAxios.post.mockRejectedValueOnce(new Error(errorMessage));
await expect(
performOCR({
apiKey: 'test-api-key',
documentUrl: 'https://document-url.com',
}),
).rejects.toThrow();
const { logger } = require('~/config');
expect(logger.error).toHaveBeenCalledWith('Error performing OCR:', errorMessage);
});
});
describe('uploadMistralOCR', () => {
beforeEach(() => {
const mockReadStream = {
on: jest.fn().mockImplementation(function (event, handler) {
if (event === 'end') {
handler();
}
return this;
}),
pipe: jest.fn().mockImplementation(function () {
return this;
}),
pause: jest.fn(),
resume: jest.fn(),
emit: jest.fn(),
once: jest.fn(),
destroy: jest.fn(),
};
fs.createReadStream = jest.fn().mockReturnValue(mockReadStream);
});
it('should process OCR for a file with standard configuration', async () => {
// Setup mocks
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'test-api-key',
OCR_BASEURL: 'https://api.mistral.ai/v1',
});
// Mock file upload response
mockAxios.post.mockResolvedValueOnce({
data: { id: 'file-123', purpose: 'ocr' },
});
// Mock signed URL response
mockAxios.get.mockResolvedValueOnce({
data: { url: 'https://signed-url.com' },
});
// Mock OCR response with text and images
mockAxios.post.mockResolvedValueOnce({
data: {
pages: [
{
markdown: 'Page 1 content',
images: [{ image_base64: 'base64image1' }],
},
{
markdown: 'Page 2 content',
images: [{ image_base64: 'base64image2' }],
},
],
},
});
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Use environment variable syntax to ensure loadAuthValues is called
apiKey: '${OCR_API_KEY}',
baseURL: '${OCR_BASEURL}',
mistralModel: 'mistral-medium',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['OCR_BASEURL', 'OCR_API_KEY'],
optional: expect.any(Set),
});
// Verify OCR result
expect(result).toEqual({
filename: 'document.pdf',
bytes: expect.any(Number),
filepath: 'mistral_ocr',
text: expect.stringContaining('# PAGE 1'),
images: ['base64image1', 'base64image2'],
});
});
it('should process variable references in configuration', async () => {
// Setup mocks with environment variables
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
CUSTOM_API_KEY: 'custom-api-key',
CUSTOM_BASEURL: 'https://custom-api.mistral.ai/v1',
});
// Mock API responses
mockAxios.post.mockResolvedValueOnce({
data: { id: 'file-123', purpose: 'ocr' },
});
mockAxios.get.mockResolvedValueOnce({
data: { url: 'https://signed-url.com' },
});
mockAxios.post.mockResolvedValueOnce({
data: {
pages: [{ markdown: 'Content from custom API' }],
},
});
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
apiKey: '${CUSTOM_API_KEY}',
baseURL: '${CUSTOM_BASEURL}',
mistralModel: '${CUSTOM_MODEL}',
},
},
},
};
// Set environment variable for model
process.env.CUSTOM_MODEL = 'mistral-large';
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify that custom environment variables were extracted and used
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['CUSTOM_BASEURL', 'CUSTOM_API_KEY'],
optional: expect.any(Set),
});
// Check that mistral-large was used in the OCR API call
expect(mockAxios.post).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
model: 'mistral-large',
}),
expect.anything(),
);
expect(result.text).toEqual('Content from custom API\n\n');
});
it('should fall back to default values when variables are not properly formatted', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'default-api-key',
OCR_BASEURL: undefined, // Testing optional parameter
});
mockAxios.post.mockResolvedValueOnce({
data: { id: 'file-123', purpose: 'ocr' },
});
mockAxios.get.mockResolvedValueOnce({
data: { url: 'https://signed-url.com' },
});
mockAxios.post.mockResolvedValueOnce({
data: {
pages: [{ markdown: 'Default API result' }],
},
});
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Use environment variable syntax to ensure loadAuthValues is called
apiKey: '${INVALID_FORMAT}', // Using valid env var format but with an invalid name
baseURL: '${OCR_BASEURL}', // Using valid env var format
mistralModel: 'mistral-ocr-latest', // Plain string value
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
};
await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Should use the default values
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['OCR_BASEURL', 'INVALID_FORMAT'],
optional: expect.any(Set),
});
// Should use the default model when not using environment variable format
expect(mockAxios.post).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
model: 'mistral-ocr-latest',
}),
expect.anything(),
);
});
it('should handle API errors during OCR process', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'test-api-key',
});
// Mock file upload to fail
mockAxios.post.mockRejectedValueOnce(new Error('Upload failed'));
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
apiKey: 'OCR_API_KEY',
baseURL: 'OCR_BASEURL',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
};
await expect(
uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
}),
).rejects.toThrow('Error uploading document to Mistral OCR API');
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
const { logAxiosError } = require('~/utils');
expect(logAxiosError).toHaveBeenCalled();
});
it('should handle single page documents without page numbering', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'test-api-key',
OCR_BASEURL: 'https://api.mistral.ai/v1', // Make sure this is included
});
// Clear all previous mocks
mockAxios.post.mockClear();
mockAxios.get.mockClear();
// 1. First mock: File upload response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
);
// 2. Second mock: Signed URL response
mockAxios.get.mockImplementationOnce(() =>
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
);
// 3. Third mock: OCR response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({
data: {
pages: [{ markdown: 'Single page content' }],
},
}),
);
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
apiKey: 'OCR_API_KEY',
baseURL: 'OCR_BASEURL',
mistralModel: 'mistral-ocr-latest',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'single-page.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify that single page documents don't include page numbering
expect(result.text).not.toContain('# PAGE');
expect(result.text).toEqual('Single page content\n\n');
});
it('should use literal values in configuration when provided directly', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
// We'll still mock this but it should not be used for literal values
loadAuthValues.mockResolvedValue({});
// Clear all previous mocks
mockAxios.post.mockClear();
mockAxios.get.mockClear();
// 1. First mock: File upload response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
);
// 2. Second mock: Signed URL response
mockAxios.get.mockImplementationOnce(() =>
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
);
// 3. Third mock: OCR response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({
data: {
pages: [{ markdown: 'Processed with literal config values' }],
},
}),
);
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Direct values that should be used as-is, without variable substitution
apiKey: 'actual-api-key-value',
baseURL: 'https://direct-api-url.mistral.ai/v1',
mistralModel: 'mistral-direct-model',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'direct-values.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify the correct URL was used with the direct baseURL value
expect(mockAxios.post).toHaveBeenCalledWith(
'https://direct-api-url.mistral.ai/v1/files',
expect.any(Object),
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'Bearer actual-api-key-value',
}),
}),
);
// Check the OCR call was made with the direct model value
expect(mockAxios.post).toHaveBeenCalledWith(
'https://direct-api-url.mistral.ai/v1/ocr',
expect.objectContaining({
model: 'mistral-direct-model',
}),
expect.any(Object),
);
// Verify the result
expect(result.text).toEqual('Processed with literal config values\n\n');
// Verify loadAuthValues was never called since we used direct values
expect(loadAuthValues).not.toHaveBeenCalled();
});
it('should handle empty configuration values and use defaults', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
// Set up the mock values to be returned by loadAuthValues
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'default-from-env-key',
OCR_BASEURL: 'https://default-from-env.mistral.ai/v1',
});
// Clear all previous mocks
mockAxios.post.mockClear();
mockAxios.get.mockClear();
// 1. First mock: File upload response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
);
// 2. Second mock: Signed URL response
mockAxios.get.mockImplementationOnce(() =>
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
);
// 3. Third mock: OCR response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({
data: {
pages: [{ markdown: 'Content from default configuration' }],
},
}),
);
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Empty string values - should fall back to defaults
apiKey: '',
baseURL: '',
mistralModel: '',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'empty-config.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify loadAuthValues was called with the default variable names
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['OCR_BASEURL', 'OCR_API_KEY'],
optional: expect.any(Set),
});
// Verify the API calls used the default values from loadAuthValues
expect(mockAxios.post).toHaveBeenCalledWith(
'https://default-from-env.mistral.ai/v1/files',
expect.any(Object),
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'Bearer default-from-env-key',
}),
}),
);
// Verify the OCR model defaulted to mistral-ocr-latest
expect(mockAxios.post).toHaveBeenCalledWith(
'https://default-from-env.mistral.ai/v1/ocr',
expect.objectContaining({
model: 'mistral-ocr-latest',
}),
expect.any(Object),
);
// Check result
expect(result.text).toEqual('Content from default configuration\n\n');
});
});
});

View File

@@ -1,5 +0,0 @@
const crud = require('./crud');
module.exports = {
...crud,
};

View File

@@ -49,7 +49,6 @@ async function encodeAndFormat(req, files, endpoint, mode) {
const promises = [];
const encodingMethods = {};
const result = {
text: '',
files: [],
image_urls: [],
};
@@ -60,9 +59,6 @@ async function encodeAndFormat(req, files, endpoint, mode) {
for (let file of files) {
const source = file.source ?? FileSources.local;
if (source === FileSources.text && file.text) {
result.text += `${!result.text ? 'Attached document(s):\n```md' : '\n\n---\n\n'}# "${file.filename}"\n${file.text}\n`;
}
if (!file.height) {
promises.push([file, null]);
@@ -89,10 +85,6 @@ async function encodeAndFormat(req, files, endpoint, mode) {
promises.push(preparePayload(req, file));
}
if (result.text) {
result.text += '\n```';
}
const detail = req.body.imageDetail ?? ImageDetail.auto;
/** @type {Array<[MongoFile, string]>} */

View File

@@ -28,8 +28,8 @@ const { addResourceFileId, deleteResourceFileId } = require('~/server/controller
const { addAgentResourceFile, removeAgentResourceFiles } = require('~/models/Agent');
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
const { createFile, updateFileUsage, deleteFiles } = require('~/models/File');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { getEndpointsConfig } = require('~/server/services/Config');
const { loadAuthValues } = require('~/app/clients/tools/util');
const { LB_QueueAsyncCall } = require('~/server/utils/queue');
const { getStrategyFunctions } = require('./strategies');
const { determineFileType } = require('~/server/utils');
@@ -162,6 +162,7 @@ const processDeleteRequest = async ({ req, files }) => {
for (const file of files) {
const source = file.source ?? FileSources.local;
if (req.body.agent_id && req.body.tool_resource) {
agentFiles.push({
tool_resource: req.body.tool_resource,
@@ -169,11 +170,6 @@ const processDeleteRequest = async ({ req, files }) => {
});
}
if (source === FileSources.text) {
resolvedFileIds.push(file.file_id);
continue;
}
if (checkOpenAIStorage(source) && !client[source]) {
await initializeClients();
}
@@ -525,52 +521,6 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
if (!isFileSearchEnabled) {
throw new Error('File search is not enabled for Agents');
}
} else if (tool_resource === EToolResources.ocr) {
const isOCREnabled = await checkCapability(req, AgentCapabilities.ocr);
if (!isOCREnabled) {
throw new Error('OCR capability is not enabled for Agents');
}
const { handleFileUpload } = getStrategyFunctions(
req.app.locals?.ocr?.strategy ?? FileSources.mistral_ocr,
);
const { file_id, temp_file_id } = metadata;
const {
text,
bytes,
// TODO: OCR images support?
images,
filename,
filepath: ocrFileURL,
} = await handleFileUpload({ req, file, file_id, entity_id: agent_id });
const fileInfo = removeNullishValues({
text,
bytes,
file_id,
temp_file_id,
user: req.user.id,
type: file.mimetype,
filepath: ocrFileURL,
source: FileSources.text,
filename: filename ?? file.originalname,
model: messageAttachment ? undefined : req.body.model,
context: messageAttachment ? FileContext.message_attachment : FileContext.agents,
});
if (!messageAttachment && tool_resource) {
await addAgentResourceFile({
req,
file_id,
agent_id,
tool_resource,
});
}
const result = await createFile(fileInfo, true);
return res
.status(200)
.json({ message: 'Agent file uploaded and processed successfully', ...result });
}
const source =

View File

@@ -24,7 +24,6 @@ const {
const { uploadOpenAIFile, deleteOpenAIFile, getOpenAIFileStream } = require('./OpenAI');
const { getCodeOutputDownloadStream, uploadCodeEnvFile } = require('./Code');
const { uploadVectors, deleteVectors } = require('./VectorDB');
const { uploadMistralOCR } = require('./MistralOCR');
/**
* Firebase Storage Strategy Functions
@@ -128,26 +127,6 @@ const codeOutputStrategy = () => ({
getDownloadStream: getCodeOutputDownloadStream,
});
const mistralOCRStrategy = () => ({
/** @type {typeof saveFileFromURL | null} */
saveURL: null,
/** @type {typeof getLocalFileURL | null} */
getFileURL: null,
/** @type {typeof saveLocalBuffer | null} */
saveBuffer: null,
/** @type {typeof processLocalAvatar | null} */
processAvatar: null,
/** @type {typeof uploadLocalImage | null} */
handleImageUpload: null,
/** @type {typeof prepareImagesLocal | null} */
prepareImagePayload: null,
/** @type {typeof deleteLocalFile | null} */
deleteFile: null,
/** @type {typeof getLocalFileStream | null} */
getDownloadStream: null,
handleFileUpload: uploadMistralOCR,
});
// Strategy Selector
const getStrategyFunctions = (fileSource) => {
if (fileSource === FileSources.firebase) {
@@ -162,8 +141,6 @@ const getStrategyFunctions = (fileSource) => {
return vectorStrategy();
} else if (fileSource === FileSources.execute_code) {
return codeOutputStrategy();
} else if (fileSource === FileSources.mistral_ocr) {
return mistralOCRStrategy();
} else {
throw new Error('Invalid file source');
}

View File

@@ -4,9 +4,7 @@ const { HttpsProxyAgent } = require('https-proxy-agent');
const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider');
const { inputSchema, logAxiosError, extractBaseURL, processModelData } = require('~/utils');
const { OllamaClient } = require('~/app/clients/OllamaClient');
const { isUserProvided } = require('~/server/utils');
const getLogStores = require('~/cache/getLogStores');
const { logger } = require('~/config');
/**
* Splits a string by commas and trims each resulting value.
@@ -44,7 +42,7 @@ const fetchModels = async ({
user,
apiKey,
baseURL,
name = EModelEndpoint.openAI,
name = 'OpenAI',
azure = false,
userIdQuery = false,
createTokenConfig = true,
@@ -66,19 +64,12 @@ const fetchModels = async ({
try {
const options = {
headers: {},
headers: {
Authorization: `Bearer ${apiKey}`,
},
timeout: 5000,
};
if (name === EModelEndpoint.anthropic) {
options.headers = {
'x-api-key': apiKey,
'anthropic-version': process.env.ANTHROPIC_VERSION || '2023-06-01',
};
} else {
options.headers.Authorization = `Bearer ${apiKey}`;
}
if (process.env.PROXY) {
options.httpsAgent = new HttpsProxyAgent(process.env.PROXY);
}
@@ -157,7 +148,7 @@ const fetchOpenAIModels = async (opts, _models = []) => {
baseURL,
azure: opts.azure,
user: opts.user,
name: EModelEndpoint.openAI,
name: baseURL,
});
}
@@ -166,7 +157,7 @@ const fetchOpenAIModels = async (opts, _models = []) => {
}
if (baseURL === openaiBaseURL) {
const regex = /(text-davinci-003|gpt-|o\d+)/;
const regex = /(text-davinci-003|gpt-|o\d+-)/;
const excludeRegex = /audio|realtime/;
models = models.filter((model) => regex.test(model) && !excludeRegex.test(model));
const instructModels = models.filter((model) => model.includes('instruct'));
@@ -240,71 +231,13 @@ const getChatGPTBrowserModels = () => {
return models;
};
/**
* Fetches models from the Anthropic API.
* @async
* @function
* @param {object} opts - The options for fetching the models.
* @param {string} opts.user - The user ID to send to the API.
* @param {string[]} [_models=[]] - The models to use as a fallback.
*/
const fetchAnthropicModels = async (opts, _models = []) => {
let models = _models.slice() ?? [];
let apiKey = process.env.ANTHROPIC_API_KEY;
const anthropicBaseURL = 'https://api.anthropic.com/v1';
let baseURL = anthropicBaseURL;
let reverseProxyUrl = process.env.ANTHROPIC_REVERSE_PROXY;
if (reverseProxyUrl) {
baseURL = extractBaseURL(reverseProxyUrl);
}
if (!apiKey) {
return models;
}
const modelsCache = getLogStores(CacheKeys.MODEL_QUERIES);
const cachedModels = await modelsCache.get(baseURL);
if (cachedModels) {
return cachedModels;
}
if (baseURL) {
models = await fetchModels({
apiKey,
baseURL,
user: opts.user,
name: EModelEndpoint.anthropic,
tokenKey: EModelEndpoint.anthropic,
});
}
if (models.length === 0) {
return _models;
}
await modelsCache.set(baseURL, models);
return models;
};
const getAnthropicModels = async (opts = {}) => {
const getAnthropicModels = () => {
let models = defaultModels[EModelEndpoint.anthropic];
if (process.env.ANTHROPIC_MODELS) {
models = splitAndTrim(process.env.ANTHROPIC_MODELS);
return models;
}
if (isUserProvided(process.env.ANTHROPIC_API_KEY)) {
return models;
}
try {
return await fetchAnthropicModels(opts, models);
} catch (error) {
logger.error('Error fetching Anthropic models:', error);
return models;
}
return models;
};
const getGoogleModels = () => {

View File

@@ -352,15 +352,15 @@ describe('splitAndTrim', () => {
});
describe('getAnthropicModels', () => {
it('returns default models when ANTHROPIC_MODELS is not set', async () => {
it('returns default models when ANTHROPIC_MODELS is not set', () => {
delete process.env.ANTHROPIC_MODELS;
const models = await getAnthropicModels();
const models = getAnthropicModels();
expect(models).toEqual(defaultModels[EModelEndpoint.anthropic]);
});
it('returns models from ANTHROPIC_MODELS when set', async () => {
it('returns models from ANTHROPIC_MODELS when set', () => {
process.env.ANTHROPIC_MODELS = 'claude-1, claude-2 ';
const models = await getAnthropicModels();
const models = getAnthropicModels();
expect(models).toEqual(['claude-1', 'claude-2']);
});
});

View File

@@ -1,56 +0,0 @@
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
/**
*
* @param {Object} params
* @param {string} params.userId
* @param {string[]} params.authFields
* @param {Set<string>} [params.optional]
* @param {boolean} [params.throwError]
* @returns
*/
const loadAuthValues = async ({ userId, authFields, optional, throwError = true }) => {
let authValues = {};
/**
* Finds the first non-empty value for the given authentication field, supporting alternate fields.
* @param {string[]} fields Array of strings representing the authentication fields. Supports alternate fields delimited by "||".
* @returns {Promise<{ authField: string, authValue: string} | null>} An object containing the authentication field and value, or null if not found.
*/
const findAuthValue = async (fields) => {
for (const field of fields) {
let value = process.env[field];
if (value) {
return { authField: field, authValue: value };
}
try {
value = await getUserPluginAuthValue(userId, field, throwError);
} catch (err) {
if (optional && optional.has(field)) {
return { authField: field, authValue: undefined };
}
if (field === fields[fields.length - 1] && !value) {
throw err;
}
}
if (value) {
return { authField: field, authValue: value };
}
}
return null;
};
for (let authField of authFields) {
const fields = authField.split('||');
const result = await findAuthValue(fields);
if (result) {
authValues[result.authField] = result.authValue;
}
}
return authValues;
};
module.exports = {
loadAuthValues,
};

View File

@@ -203,7 +203,6 @@ function generateConfig(key, baseURL, endpoint) {
AgentCapabilities.artifacts,
AgentCapabilities.actions,
AgentCapabilities.tools,
AgentCapabilities.ocr,
];
}

View File

@@ -39,10 +39,7 @@ jest.mock('winston-daily-rotate-file', () => {
});
jest.mock('~/config', () => {
const actualModule = jest.requireActual('~/config');
return {
sendEvent: actualModule.sendEvent,
createAxiosInstance: actualModule.createAxiosInstance,
logger: {
info: jest.fn(),
warn: jest.fn(),

Some files were not shown because too many files have changed in this diff Show More