Compare commits

..

24 Commits

Author SHA1 Message Date
Ruben Talstra
47dff7d387 feat: Add Markdown heading and list components for enhanced formatting 2025-03-12 10:42:08 +01:00
Danny Avila
cf03731cc8 🔧 fix: Axios Proxy Usage And Bump mongoose (#6298)
* fix: bump mongoose to fix nested schema errors

* fix: Enhance Axios instance creation with improved proxy handling and error logging

* fix: Refactor Axios instance creation and remove proxy handling from file upload functions

* fix: Update proxy configuration in Axios instance creation and add unit tests
2025-03-11 14:44:54 -04:00
Marco Beretta
cbd5bd2405 🎨 a11y: Update Model Spec Description Text (#6294) 2025-03-11 13:18:27 -04:00
Danny Avila
ded3cd8876 🔍 feat: Mistral OCR API / Upload Files as Text (#6274)
* refactor: move `loadAuthValues` to `~/services/Tools/credentials`

* feat: add createAxiosInstance function to configure axios with proxy support

* WIP: First pass mistral ocr

* refactor: replace getConvoFiles with getToolFiles for improved file retrieval logic

* refactor: improve document formatting in encodeAndFormat function

* refactor: remove unused resendFiles parameter from buildOptions function (this option comes from the agent config)

* fix: update getFiles call to include files with `text` property as well

* refactor: move file handling to `initializeAgentOptions`

* refactor: enhance addImageURLs method to handle OCR text and improve message formatting

* refactor: update message formatting to handle OCR text in various content types

* refactor: remove unused resendFiles property from compactAgentsSchema

* fix: add error handling for Mistral OCR document upload and logging

* refactor: integrate OCR capability into file upload options and configuration

* refactor: skip processing for text source files in delete request, as they are directly tied to database

* feat: add metadata field to ExtendedFile type and update PanelColumns and PanelTable components for localization and metadata handling

* fix: source icon styling

* wip: first pass, frontend file context agent resources

* refactor: add hover card with contextual information for File Context (OCR) in FileContext component

* feat: enhance file processing by integrating file retrieval for OCR resources in agent initialization

* feat: implement OCR config; fix: agent resource deletion for ocr files

* feat: enhance agent initialization by adding OCR capability check in resource priming

* ci: fix `~/config` module mock

* ci: add OCR property expectation in AppService tests

* refactor: simplify OCR config loading by removing environment variable extraction, to be done when OCR is actually performed

* ci: add unit test to ensure environment variable references are not parsed in OCR config

* refactor: disable base64 image inclusion in OCR request

* refactor: enhance OCR configuration handling by validating environment variables and providing defaults

* refactor: use file stream from disk for mistral ocr api
2025-03-10 17:23:46 -04:00
github-actions[bot]
9db00edfc4 🌍 i18n: Update translation.json with latest translations (#6241)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-03-09 18:19:50 -04:00
sh4shii
a53638c481 🐛 fix: Await saveMessage in abortMiddleware to ensure proper execution (#6248) 2025-03-09 18:07:31 -04:00
Danny Avila
d6ab769b80 ⚠️ refactor: Use Error Content Part Instead Of Throwing Error for Agents (#6262) 2025-03-09 18:06:34 -04:00
Ruben Talstra
3e3dfe5bad 🔏 fix: Enhance Two-Factor Authentication (#6247)
* 🌟 feat: Implement Two-Factor Authentication (2FA) functionality

* fix: Two-Factor Authentication Logic and State Management

* 🌟 feat: Add LICENSE file and update package version to 0.0.2 with MIT license
2025-03-08 15:28:27 -05:00
Ruben Talstra
cc661c95ee 🔧 fix: MeiliSearch Field Error and Patch Incorrect Import by #6210 (#6245)
* 📦 refactor: Update MeiliSearch integration and improve schema handling

* Update indexSync.js

* 📦 refactor: Update Conversation model import path in indexSync.js

* 📦 refactor: Update import paths for Conversation and Message models in indexSync.js
2025-03-08 14:37:33 -05:00
github-actions[bot]
6ea88e09a2 🌍 i18n: Update translation.json with latest translations (#6240)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-03-08 11:18:04 -05:00
Danny Avila
a846e898a2 🐛 fix: Avatar Type Definitions in Agent/Assistant Schemas (#6235)
* fix: Simplify avatar type definition in agent and assistant schemas

* fix: Update regex to correctly match OpenAI model identifiers
2025-03-08 10:55:06 -05:00
Danny Avila
dc8d5dee6a 📦 chore: Patch axios to address CVE-2025-27152 (#6222)
* 📦 chore: remove `langchain` (no longer used)

* chore: patch `axios` to address CVE-2025-27152
2025-03-07 12:45:31 -05:00
github-actions[bot]
f04ae65a75 🌍 i18n: Update translation.json with latest translations (#6220)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-03-07 12:43:40 -05:00
Danny Avila
0a4da06fe1 📦 ci: Update npm authentication token for publishing in workflow 2025-03-07 12:20:56 -05:00
Danny Avila
932474c44e 📦 ci: Refactor workflow to combine build and publish steps with version check for @librechat/data-schemas 2025-03-07 12:18:03 -05:00
Danny Avila
a2b7812033 📦 ci: Update workflow to publish @librechat/data-schemas to NPM with manual trigger option 2025-03-07 12:12:30 -05:00
Danny Avila
88d2920b06 📦 ci: npm publish access to public for data-schemas 2025-03-07 12:10:32 -05:00
Ruben Talstra
c5e012abc0 🌍 i18n: Add Thai Language Support and Update Translations (#6219)
* 🌍 i18n: Add Thai Language Support and Update Translations

* 📝 docs: Update Locize Logo in README.md
2025-03-07 11:57:57 -05:00
Ruben Talstra
b51cd21b3c 📦 refactor: Move DB Models to @librechat/data-schemas (#6210)
* 🚀 feat: Introduce data schemas and refactor models to use @librechat/data-schemas

* 🚀 feat: Add installation step for Data Schemas Package in backend review workflow

* chore: Add `data-schemas` package to update/rebuild packages scripts

* chore: Update Dockerfile to include data-schemas package build process

* fix: add missing @rollup/plugin-typescript package

* chore: Add GitHub Actions workflow for publishing data-schemas package

---------

Co-authored-by: Danny Avila <danny@librechat.ai>
2025-03-07 11:55:44 -05:00
Danny Avila
4d04904af3 v0.7.7 (#6206)
* v0.7.7

* chore: Bump librechat-mcp version to 1.1.0

* action: update Unreleased changelog

* Update CHANGELOG.md

---------

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Ruben Talstra <RubenTalstra1211@outlook.com>
2025-03-06 14:33:33 -05:00
Danny Avila
8cb7f34f86 🚀 feat: Add Code API Proxy Support and Update MCP SDK (#6203)
* chore: bump mcp sdk

* feat: Add proxy support for file download and upload in Code Environment CRUD operations

* chore: remove unused files

* chore: change output format from CommonJS to ES module in server rollup config
2025-03-06 12:47:59 -05:00
Kaushik Iska
780fdf743a 🕒 feat: Add Configurable MCP Server Timeouts (#6199) 2025-03-06 12:02:43 -05:00
Danny Avila
c8f7588164 🪄 feat: Customize Sandpack bundlerURL for Artifacts (#6191) 2025-03-05 16:03:54 -05:00
Danny Avila
00b2d026c1 🚀 feat: Enhance Model Handling, Logging & xAI Agent Support (#6182)
* chore: update @librechat/agents to version 2.1.9

* feat: xAI standalone provider for agents

* chore: bump librechat-data-provider version to 0.7.6997

* fix: reorder import statements and enhance user listing output

* fix: Update Docker Compose commands to support v2 syntax with fallback

* 🔧 fix: drop `reasoning_effort` for o1-preview/mini models

* chore: requireLocalAuth logging

* fix: edge case artifact message editing logic to handle `new` conversation IDs

* fix: remove `temperature` from model options in OpenAIClient if o1-mini/preview

* fix: update type annotation for fetchPromisesMap to use Promise<string[]> instead of string[]

* feat: anthropic model fetching

* fix: update model name to use EModelEndpoint.openAI in fetchModels and fetchOpenAIModels

* fix: add error handling to modelController for loadModels

* fix: add error handling and logging for model fetching in loadDefaultModels

* ci: update getAnthropicModels tests to be asynchronous

* feat: add user ID to model options in OpenAI and custom endpoint initialization

---------

Co-authored-by: Andrei Berceanu <andreicberceanu@gmail.com>
Co-authored-by: KiGamji <maloyh44@gmail.com>
2025-03-05 12:04:26 -05:00
210 changed files with 8030 additions and 6246 deletions

View File

@@ -39,6 +39,9 @@ jobs:
- name: Install MCP Package
run: npm run build:mcp
- name: Install Data Schemas Package
run: npm run build:data-schemas
- name: Create empty auth.json file
run: |
mkdir -p api/data

58
.github/workflows/data-schemas.yml vendored Normal file
View File

@@ -0,0 +1,58 @@
name: Publish `@librechat/data-schemas` to NPM
on:
push:
branches:
- main
paths:
- 'packages/data-schemas/package.json'
workflow_dispatch:
inputs:
reason:
description: 'Reason for manual trigger'
required: false
default: 'Manual publish requested'
jobs:
build-and-publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: '18.x'
- name: Install dependencies
run: cd packages/data-schemas && npm ci
- name: Build
run: cd packages/data-schemas && npm run build
- name: Set up npm authentication
run: echo "//registry.npmjs.org/:_authToken=${{ secrets.PUBLISH_NPM_TOKEN }}" > ~/.npmrc
- name: Check version change
id: check
working-directory: packages/data-schemas
run: |
PACKAGE_VERSION=$(node -p "require('./package.json').version")
PUBLISHED_VERSION=$(npm view @librechat/data-schemas version 2>/dev/null || echo "0.0.0")
if [ "$PACKAGE_VERSION" = "$PUBLISHED_VERSION" ]; then
echo "No version change, skipping publish"
echo "skip=true" >> $GITHUB_OUTPUT
else
echo "Version changed, proceeding with publish"
echo "skip=false" >> $GITHUB_OUTPUT
fi
- name: Pack package
if: steps.check.outputs.skip != 'true'
working-directory: packages/data-schemas
run: npm pack
- name: Publish
if: steps.check.outputs.skip != 'true'
working-directory: packages/data-schemas
run: npm publish *.tgz --access public

View File

@@ -84,11 +84,11 @@ jobs:
with:
token: ${{ secrets.GITHUB_TOKEN }}
sign-commits: true
commit-message: "chore: update CHANGELOG for release ${GITHUB_REF##*/}"
commit-message: "chore: update CHANGELOG for release ${{ github.ref_name }}"
base: main
branch: "changelog/${GITHUB_REF##*/}"
branch: "changelog/${{ github.ref_name }}"
reviewers: danny-avila
title: "chore: update CHANGELOG for release ${GITHUB_REF##*/}"
title: "chore: update CHANGELOG for release ${{ github.ref_name }}"
body: |
**Description**:
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${GITHUB_REF##*/} above previous releases.
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${{ github.ref_name }} above previous releases.

16
CHANGELOG.md Normal file
View File

@@ -0,0 +1,16 @@
# Changelog
All notable changes to this project will be documented in this file.
## [Unreleased]
### ✨ New Features
- 🪄 feat: Agent Artifacts by **@danny-avila** in [#5804](https://github.com/danny-avila/LibreChat/pull/5804)
### ⚙️ Other Changes
- 🔄 chore: Enforce 18next Language Keys by **@rubentalstra** in [#5803](https://github.com/danny-avila/LibreChat/pull/5803)
- 🔃 refactor: Parent Message ID Handling on Error, Update Translations, Bump Agents by **@danny-avila** in [#5833](https://github.com/danny-avila/LibreChat/pull/5833)
---

View File

@@ -1,4 +1,4 @@
# v0.7.7-rc1
# v0.7.7
# Base node image
FROM node:20-alpine AS node

View File

@@ -1,5 +1,5 @@
# Dockerfile.multi
# v0.7.7-rc1
# v0.7.7
# Base for all builds
FROM node:20-alpine AS base-min
@@ -11,6 +11,7 @@ RUN npm config set fetch-retry-maxtimeout 600000 && \
COPY package*.json ./
COPY packages/data-provider/package*.json ./packages/data-provider/
COPY packages/mcp/package*.json ./packages/mcp/
COPY packages/data-schemas/package*.json ./packages/data-schemas/
COPY client/package*.json ./client/
COPY api/package*.json ./api/
@@ -32,6 +33,13 @@ COPY packages/mcp ./
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
RUN npm run build
# Build data-schemas
FROM base AS data-schemas-build
WORKDIR /app/packages/data-schemas
COPY packages/data-schemas ./
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
RUN npm run build
# Client build
FROM base AS client-build
WORKDIR /app/client
@@ -49,8 +57,9 @@ COPY api ./api
COPY config ./config
COPY --from=data-provider-build /app/packages/data-provider/dist ./packages/data-provider/dist
COPY --from=mcp-build /app/packages/mcp/dist ./packages/mcp/dist
COPY --from=data-schemas-build /app/packages/data-schemas/dist ./packages/data-schemas/dist
COPY --from=client-build /app/client/dist ./client/dist
WORKDIR /app/api
EXPOSE 3080
ENV HOST=0.0.0.0
CMD ["node", "server/index.js"]
CMD ["node", "server/index.js"]

View File

@@ -197,6 +197,6 @@ We thank [Locize](https://locize.com) for their translation management tools tha
<p align="center">
<a href="https://locize.com" target="_blank" rel="noopener noreferrer">
<img src="https://locize.com/img/locize_color.svg" alt="Locize Logo" height="50">
<img src="https://github.com/user-attachments/assets/d6b70894-6064-475e-bb65-92a9e23e0077" alt="Locize Logo" height="50">
</a>
</p>

View File

@@ -1,3 +1,4 @@
const crypto = require('crypto');
const fetch = require('node-fetch');
const {
supportsBalanceCheck,
@@ -8,7 +9,7 @@ const {
ErrorTypes,
Constants,
} = require('librechat-data-provider');
const { getMessages, saveMessage, updateMessage, saveConvo, getConvo, getUserById } = require('~/models');
const { getMessages, saveMessage, updateMessage, saveConvo, getConvo } = require('~/models');
const { addSpaceIfNeeded, isEnabled } = require('~/server/utils');
const { truncateToolCallOutputs } = require('./prompts');
const checkBalance = require('~/models/checkBalance');
@@ -16,48 +17,6 @@ const { getFiles } = require('~/models/File');
const TextStream = require('./TextStream');
const { logger } = require('~/config');
let crypto;
try {
crypto = require('crypto');
} catch (err) {
logger.error('[AskController] crypto support is disabled!', err);
}
/**
* Helper function to encrypt plaintext using AES-256-GCM and then RSA-encrypt the AES key.
* @param {string} plainText - The plaintext to encrypt.
* @param {string} pemPublicKey - The RSA public key in PEM format.
* @returns {Object} An object containing the ciphertext, iv, authTag, and encryptedKey.
*/
function encryptText(plainText, pemPublicKey) {
// Generate a random 256-bit AES key and a 12-byte IV.
const aesKey = crypto.randomBytes(32);
const iv = crypto.randomBytes(12);
// Encrypt the plaintext using AES-256-GCM.
const cipher = crypto.createCipheriv('aes-256-gcm', aesKey, iv);
let ciphertext = cipher.update(plainText, 'utf8', 'base64');
ciphertext += cipher.final('base64');
const authTag = cipher.getAuthTag().toString('base64');
// Encrypt the AES key using the user's RSA public key.
const encryptedKey = crypto.publicEncrypt(
{
key: pemPublicKey,
padding: crypto.constants.RSA_PKCS1_OAEP_PADDING,
oaepHash: 'sha256',
},
aesKey,
).toString('base64');
return {
ciphertext,
iv: iv.toString('base64'),
authTag,
encryptedKey,
};
}
class BaseClient {
constructor(apiKey, options = {}) {
this.apiKey = apiKey;
@@ -890,64 +849,18 @@ class BaseClient {
* @param {string | null} user
*/
async saveMessageToDatabase(message, endpointOptions, user = null) {
// Normalize the user information:
// If "user" is an object, use it; otherwise, if a string is passed use req.user (if available)
const currentUser =
user && typeof user === 'object'
? user
: (this.options.req && this.options.req.user
? this.options.req.user
: { id: user });
const currentUserId = currentUser.id || currentUser;
// Check if the clients stored user matches the current user.
// (this.user might have been set earlier in setMessageOptions)
const storedUserId =
this.user && typeof this.user === 'object' ? this.user.id : this.user;
if (storedUserId && currentUserId && storedUserId !== currentUserId) {
if (this.user && user !== this.user) {
throw new Error('User mismatch.');
}
// console.log('User ID:', currentUserId);
const dbUser = await getUserById(currentUserId, 'encryptionPublicKey');
// --- NEW ENCRYPTION BLOCK: Encrypt AI response if encryptionPublicKey exists ---
if (dbUser.encryptionPublicKey && message && message.text) {
try {
// Rebuild the PEM format if necessary.
const pemPublicKey = `-----BEGIN PUBLIC KEY-----\n${dbUser.encryptionPublicKey
.match(/.{1,64}/g)
.join('\n')}\n-----END PUBLIC KEY-----`;
const { ciphertext, iv, authTag, encryptedKey } = encryptText(
message.text,
pemPublicKey,
);
message.text = ciphertext;
message.iv = iv;
message.authTag = authTag;
message.encryptedKey = encryptedKey;
logger.debug('[BaseClient.saveMessageToDatabase] Encrypted message text');
} catch (err) {
logger.error('[BaseClient.saveMessageToDatabase] Error encrypting message text', err);
}
}
// --- End Encryption Block ---
// Build update parameters including encryption fields.
const updateParams = {
...message,
endpoint: this.options.endpoint,
unfinished: false,
user: currentUserId, // store the user id (ensured to be a string)
iv: message.iv ?? null,
authTag: message.authTag ?? null,
encryptedKey: message.encryptedKey ?? null,
};
const savedMessage = await saveMessage(
this.options.req,
updateParams,
{
...message,
endpoint: this.options.endpoint,
unfinished: false,
user,
},
{ context: 'api/app/clients/BaseClient.js - saveMessageToDatabase #saveMessage' },
);
@@ -1208,9 +1121,13 @@ class BaseClient {
return message;
}
const files = await getFiles({
file_id: { $in: fileIds },
});
const files = await getFiles(
{
file_id: { $in: fileIds },
},
{},
{},
);
await this.addImageURLs(message, files, this.visionMode);
@@ -1236,4 +1153,4 @@ class BaseClient {
}
}
module.exports = BaseClient;
module.exports = BaseClient;

View File

@@ -1307,8 +1307,12 @@ ${convo}
) {
delete modelOptions.stream;
delete modelOptions.stop;
} else if (!this.isOmni && modelOptions.reasoning_effort != null) {
} else if (
(!this.isOmni || /^o1-(mini|preview)/i.test(modelOptions.model)) &&
modelOptions.reasoning_effort != null
) {
delete modelOptions.reasoning_effort;
delete modelOptions.temperature;
}
let reasoningKey = 'reasoning_content';

View File

@@ -325,4 +325,37 @@ describe('formatAgentMessages', () => {
);
expect(result[0].content).not.toContain('Analyzing the problem...');
});
it('should exclude ERROR type content parts', () => {
const payload = [
{
role: 'assistant',
content: [
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello there' },
{
type: ContentTypes.ERROR,
[ContentTypes.ERROR]:
'An error occurred while processing the request: Something went wrong',
},
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final answer' },
],
},
];
const result = formatAgentMessages(payload);
expect(result).toHaveLength(1);
expect(result[0]).toBeInstanceOf(AIMessage);
expect(result[0].content).toEqual([
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello there' },
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final answer' },
]);
// Make sure no error content exists in the result
const hasErrorContent = result[0].content.some(
(item) =>
item.type === ContentTypes.ERROR || JSON.stringify(item).includes('An error occurred'),
);
expect(hasErrorContent).toBe(false);
});
});

View File

@@ -211,6 +211,8 @@ const formatAgentMessages = (payload) => {
} else if (part.type === ContentTypes.THINK) {
hasReasoning = true;
continue;
} else if (part.type === ContentTypes.ERROR) {
continue;
} else {
currentContent.push(part);
}

View File

@@ -21,6 +21,7 @@ const {
} = require('../');
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { createMCPTool } = require('~/server/services/MCP');
const { loadSpecs } = require('./loadSpecs');
const { logger } = require('~/config');
@@ -90,45 +91,6 @@ const validateTools = async (user, tools = []) => {
}
};
const loadAuthValues = async ({ userId, authFields, throwError = true }) => {
let authValues = {};
/**
* Finds the first non-empty value for the given authentication field, supporting alternate fields.
* @param {string[]} fields Array of strings representing the authentication fields. Supports alternate fields delimited by "||".
* @returns {Promise<{ authField: string, authValue: string} | null>} An object containing the authentication field and value, or null if not found.
*/
const findAuthValue = async (fields) => {
for (const field of fields) {
let value = process.env[field];
if (value) {
return { authField: field, authValue: value };
}
try {
value = await getUserPluginAuthValue(userId, field, throwError);
} catch (err) {
if (field === fields[fields.length - 1] && !value) {
throw err;
}
}
if (value) {
return { authField: field, authValue: value };
}
}
return null;
};
for (let authField of authFields) {
const fields = authField.split('||');
const result = await findAuthValue(fields);
if (result) {
authValues[result.authField] = result.authValue;
}
}
return authValues;
};
/** @typedef {typeof import('@langchain/core/tools').Tool} ToolConstructor */
/** @typedef {import('@langchain/core/tools').Tool} Tool */
@@ -348,7 +310,6 @@ const loadTools = async ({
module.exports = {
loadToolWithAuth,
loadAuthValues,
validateTools,
loadTools,
};

View File

@@ -1,9 +1,8 @@
const { validateTools, loadTools, loadAuthValues } = require('./handleTools');
const { validateTools, loadTools } = require('./handleTools');
const handleOpenAIErrors = require('./handleOpenAIErrors');
module.exports = {
handleOpenAIErrors,
loadAuthValues,
validateTools,
loadTools,
};

View File

@@ -1,3 +1,4 @@
const axios = require('axios');
const { EventSource } = require('eventsource');
const { Time, CacheKeys } = require('librechat-data-provider');
const logger = require('./winston');
@@ -47,9 +48,46 @@ const sendEvent = (res, event) => {
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
};
/**
* Creates and configures an Axios instance with optional proxy settings.
*
* @typedef {import('axios').AxiosInstance} AxiosInstance
* @typedef {import('axios').AxiosProxyConfig} AxiosProxyConfig
*
* @returns {AxiosInstance} A configured Axios instance
* @throws {Error} If there's an issue creating the Axios instance or parsing the proxy URL
*/
function createAxiosInstance() {
const instance = axios.create();
if (process.env.proxy) {
try {
const url = new URL(process.env.proxy);
/** @type {AxiosProxyConfig} */
const proxyConfig = {
host: url.hostname.replace(/^\[|\]$/g, ''),
protocol: url.protocol.replace(':', ''),
};
if (url.port) {
proxyConfig.port = parseInt(url.port, 10);
}
instance.defaults.proxy = proxyConfig;
} catch (error) {
console.error('Error parsing proxy URL:', error);
throw new Error(`Invalid proxy URL: ${process.env.proxy}`);
}
}
return instance;
}
module.exports = {
logger,
sendEvent,
getMCPManager,
createAxiosInstance,
getFlowStateManager,
};

126
api/config/index.spec.js Normal file
View File

@@ -0,0 +1,126 @@
const axios = require('axios');
const { createAxiosInstance } = require('./index');
// Mock axios
jest.mock('axios', () => ({
interceptors: {
request: { use: jest.fn(), eject: jest.fn() },
response: { use: jest.fn(), eject: jest.fn() },
},
create: jest.fn().mockReturnValue({
defaults: {
proxy: null,
},
get: jest.fn().mockResolvedValue({ data: {} }),
post: jest.fn().mockResolvedValue({ data: {} }),
put: jest.fn().mockResolvedValue({ data: {} }),
delete: jest.fn().mockResolvedValue({ data: {} }),
}),
get: jest.fn().mockResolvedValue({ data: {} }),
post: jest.fn().mockResolvedValue({ data: {} }),
put: jest.fn().mockResolvedValue({ data: {} }),
delete: jest.fn().mockResolvedValue({ data: {} }),
reset: jest.fn().mockImplementation(function () {
this.get.mockClear();
this.post.mockClear();
this.put.mockClear();
this.delete.mockClear();
this.create.mockClear();
}),
}));
describe('createAxiosInstance', () => {
const originalEnv = process.env;
beforeEach(() => {
// Reset mocks
jest.clearAllMocks();
// Create a clean copy of process.env
process.env = { ...originalEnv };
// Default: no proxy
delete process.env.proxy;
});
afterAll(() => {
// Restore original process.env
process.env = originalEnv;
});
test('creates an axios instance without proxy when no proxy env is set', () => {
const instance = createAxiosInstance();
expect(axios.create).toHaveBeenCalledTimes(1);
expect(instance.defaults.proxy).toBeNull();
});
test('configures proxy correctly with hostname and protocol', () => {
process.env.proxy = 'http://example.com';
const instance = createAxiosInstance();
expect(axios.create).toHaveBeenCalledTimes(1);
expect(instance.defaults.proxy).toEqual({
host: 'example.com',
protocol: 'http',
});
});
test('configures proxy correctly with hostname, protocol and port', () => {
process.env.proxy = 'https://proxy.example.com:8080';
const instance = createAxiosInstance();
expect(axios.create).toHaveBeenCalledTimes(1);
expect(instance.defaults.proxy).toEqual({
host: 'proxy.example.com',
protocol: 'https',
port: 8080,
});
});
test('handles proxy URLs with authentication', () => {
process.env.proxy = 'http://user:pass@proxy.example.com:3128';
const instance = createAxiosInstance();
expect(axios.create).toHaveBeenCalledTimes(1);
expect(instance.defaults.proxy).toEqual({
host: 'proxy.example.com',
protocol: 'http',
port: 3128,
// Note: The current implementation doesn't handle auth - if needed, add this functionality
});
});
test('throws error when proxy URL is invalid', () => {
process.env.proxy = 'invalid-url';
expect(() => createAxiosInstance()).toThrow('Invalid proxy URL');
expect(axios.create).toHaveBeenCalledTimes(1);
});
// If you want to test the actual URL parsing more thoroughly
test('handles edge case proxy URLs correctly', () => {
// IPv6 address
process.env.proxy = 'http://[::1]:8080';
let instance = createAxiosInstance();
expect(instance.defaults.proxy).toEqual({
host: '::1',
protocol: 'http',
port: 8080,
});
// URL with path (which should be ignored for proxy config)
process.env.proxy = 'http://proxy.example.com:8080/some/path';
instance = createAxiosInstance();
expect(instance.defaults.proxy).toEqual({
host: 'proxy.example.com',
protocol: 'http',
port: 8080,
});
});
});

View File

@@ -1,6 +1,6 @@
const { MeiliSearch } = require('meilisearch');
const Conversation = require('~/models/schema/convoSchema');
const Message = require('~/models/schema/messageSchema');
const { Conversation } = require('~/models/Conversation');
const { Message } = require('~/models/Message');
const { isEnabled } = require('~/server/utils');
const { logger } = require('~/config');

View File

@@ -1,5 +1,5 @@
const mongoose = require('mongoose');
const actionSchema = require('./schema/action');
const { actionSchema } = require('@librechat/data-schemas');
const Action = mongoose.model('action', actionSchema);

View File

@@ -9,7 +9,7 @@ const {
removeAgentFromAllProjects,
} = require('./Project');
const getLogStores = require('~/cache/getLogStores');
const agentSchema = require('./schema/agent');
const { agentSchema } = require('@librechat/data-schemas');
const Agent = mongoose.model('agent', agentSchema);

View File

@@ -1,5 +1,5 @@
const mongoose = require('mongoose');
const assistantSchema = require('./schema/assistant');
const { assistantSchema } = require('@librechat/data-schemas');
const Assistant = mongoose.model('assistant', assistantSchema);

View File

@@ -1,5 +1,5 @@
const mongoose = require('mongoose');
const balanceSchema = require('./schema/balance');
const { balanceSchema } = require('@librechat/data-schemas');
const { getMultiplier } = require('./tx');
const { logger } = require('~/config');

View File

@@ -1,5 +1,9 @@
const Banner = require('./schema/banner');
const mongoose = require('mongoose');
const logger = require('~/config/winston');
const { bannerSchema } = require('@librechat/data-schemas');
const Banner = mongoose.model('Banner', bannerSchema);
/**
* Retrieves the current active banner.
* @returns {Promise<Object|null>} The active banner object or null if no active banner is found.

View File

@@ -1,5 +1,4 @@
const { logger } = require('~/config');
// const { Categories } = require('./schema/categories');
const options = [
{

View File

@@ -15,19 +15,6 @@ const searchConversation = async (conversationId) => {
throw new Error('Error searching conversation');
}
};
/**
* Searches for a conversation by conversationId and returns associated file ids.
* @param {string} conversationId - The conversation's ID.
* @returns {Promise<string[] | null>}
*/
const getConvoFiles = async (conversationId) => {
try {
return (await Conversation.findOne({ conversationId }, 'files').lean())?.files ?? [];
} catch (error) {
logger.error('[getConvoFiles] Error getting conversation files', error);
throw new Error('Error getting conversation files');
}
};
/**
* Retrieves a single conversation for a given user and conversation ID.
@@ -73,9 +60,46 @@ const deleteNullOrEmptyConversations = async () => {
}
};
/**
* Retrieves files from a conversation that have either embedded=true
* or a metadata.fileIdentifier. Simplified and efficient query.
*
* @param {string} conversationId - The conversation ID
* @returns {Promise<MongoFile[]>} - Filtered array of matching file objects
*/
const getToolFiles = async (conversationId) => {
try {
const [result] = await Conversation.aggregate([
{ $match: { conversationId } },
{
$project: {
files: {
$filter: {
input: '$files',
as: 'file',
cond: {
$or: [
{ $eq: ['$$file.embedded', true] },
{ $ifNull: ['$$file.metadata.fileIdentifier', false] },
],
},
},
},
_id: 0,
},
},
]).exec();
return result?.files || [];
} catch (error) {
logger.error('[getConvoEmbeddedFiles] Error fetching embedded files:', error);
throw new Error('Error fetching embedded files');
}
};
module.exports = {
Conversation,
getConvoFiles,
getToolFiles,
searchConversation,
deleteNullOrEmptyConversations,
/**

View File

@@ -1,7 +1,11 @@
const ConversationTag = require('./schema/conversationTagSchema');
const mongoose = require('mongoose');
const Conversation = require('./schema/convoSchema');
const logger = require('~/config/winston');
const { conversationTagSchema } = require('@librechat/data-schemas');
const ConversationTag = mongoose.model('ConversationTag', conversationTagSchema);
/**
* Retrieves all conversation tags for a user.
* @param {string} user - The user ID.

View File

@@ -1,5 +1,5 @@
const mongoose = require('mongoose');
const fileSchema = require('./schema/fileSchema');
const { fileSchema } = require('@librechat/data-schemas');
const File = mongoose.model('File', fileSchema);
@@ -7,7 +7,7 @@ const File = mongoose.model('File', fileSchema);
* Finds a file by its file_id with additional query options.
* @param {string} file_id - The unique identifier of the file.
* @param {object} options - Query options for filtering, projection, etc.
* @returns {Promise<MongoFile>} A promise that resolves to the file document or null.
* @returns {Promise<IMongoFile>} A promise that resolves to the file document or null.
*/
const findFileById = async (file_id, options = {}) => {
return await File.findOne({ file_id, ...options }).lean();
@@ -17,18 +17,20 @@ const findFileById = async (file_id, options = {}) => {
* Retrieves files matching a given filter, sorted by the most recently updated.
* @param {Object} filter - The filter criteria to apply.
* @param {Object} [_sortOptions] - Optional sort parameters.
* @returns {Promise<Array<MongoFile>>} A promise that resolves to an array of file documents.
* @param {Object|String} [selectFields={ text: 0 }] - Fields to include/exclude in the query results.
* Default excludes the 'text' field.
* @returns {Promise<Array<IMongoFile>>} A promise that resolves to an array of file documents.
*/
const getFiles = async (filter, _sortOptions) => {
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
const sortOptions = { updatedAt: -1, ..._sortOptions };
return await File.find(filter).sort(sortOptions).lean();
return await File.find(filter).select(selectFields).sort(sortOptions).lean();
};
/**
* Creates a new file with a TTL of 1 hour.
* @param {MongoFile} data - The file data to be created, must contain file_id.
* @param {IMongoFile} data - The file data to be created, must contain file_id.
* @param {boolean} disableTTL - Whether to disable the TTL.
* @returns {Promise<MongoFile>} A promise that resolves to the created file document.
* @returns {Promise<IMongoFile>} A promise that resolves to the created file document.
*/
const createFile = async (data, disableTTL) => {
const fileData = {
@@ -48,8 +50,8 @@ const createFile = async (data, disableTTL) => {
/**
* Updates a file identified by file_id with new data and removes the TTL.
* @param {MongoFile} data - The data to update, must contain file_id.
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
* @param {IMongoFile} data - The data to update, must contain file_id.
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
*/
const updateFile = async (data) => {
const { file_id, ...update } = data;
@@ -62,8 +64,8 @@ const updateFile = async (data) => {
/**
* Increments the usage of a file identified by file_id.
* @param {MongoFile} data - The data to update, must contain file_id and the increment value for usage.
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
* @param {IMongoFile} data - The data to update, must contain file_id and the increment value for usage.
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
*/
const updateFileUsage = async (data) => {
const { file_id, inc = 1 } = data;
@@ -77,7 +79,7 @@ const updateFileUsage = async (data) => {
/**
* Deletes a file identified by file_id.
* @param {string} file_id - The unique identifier of the file to delete.
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
*/
const deleteFile = async (file_id) => {
return await File.findOneAndDelete({ file_id }).lean();
@@ -86,7 +88,7 @@ const deleteFile = async (file_id) => {
/**
* Deletes a file identified by a filter.
* @param {object} filter - The filter criteria to apply.
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
*/
const deleteFileByFilter = async (filter) => {
return await File.findOneAndDelete(filter).lean();

View File

@@ -1,4 +1,4 @@
const mongoose = require('mongoose');
const keySchema = require('./schema/key');
const { keySchema } = require('@librechat/data-schemas');
module.exports = mongoose.model('Key', keySchema);

View File

@@ -2,7 +2,6 @@ const { z } = require('zod');
const Message = require('./schema/messageSchema');
const { logger } = require('~/config');
// Validate conversation ID as a UUID (if your conversation IDs follow UUID format)
const idSchema = z.string().uuid();
/**
@@ -29,11 +28,8 @@ const idSchema = z.string().uuid();
* @param {string} [params.plugin] - Plugin associated with the message.
* @param {string[]} [params.plugins] - An array of plugins associated with the message.
* @param {string} [params.model] - The model used to generate the message.
* @param {string} [params.iv] - (Optional) Base64-encoded initialization vector for encryption.
* @param {string} [params.authTag] - (Optional) Base64-encoded authentication tag from AES-GCM.
* @param {string} [params.encryptedKey] - (Optional) Base64-encoded AES key encrypted with RSA.
* @param {Object} [metadata] - Additional metadata for this operation.
* @param {string} [metadata.context] - The context of the operation.
* @param {Object} [metadata] - Additional metadata for this operation
* @param {string} [metadata.context] - The context of the operation
* @returns {Promise<TMessage>} The updated or newly inserted message document.
* @throws {Error} If there is an error in saving the message.
*/
@@ -55,9 +51,6 @@ async function saveMessage(req, params, metadata) {
...params,
user: req.user.id,
messageId: params.newMessageId || params.messageId,
iv: params.iv ?? null,
authTag: params.authTag ?? null,
encryptedKey: params.encryptedKey ?? null,
};
if (req?.body?.isTemporary) {
@@ -97,12 +90,7 @@ async function bulkSaveMessages(messages, overrideTimestamp = false) {
const bulkOps = messages.map((message) => ({
updateOne: {
filter: { messageId: message.messageId },
update: {
...message,
iv: message.iv ?? null,
authTag: message.authTag ?? null,
encryptedKey: message.encryptedKey ?? null,
},
update: message,
timestamps: !overrideTimestamp,
upsert: true,
},
@@ -131,7 +119,14 @@ async function bulkSaveMessages(messages, overrideTimestamp = false) {
* @returns {Promise<Object>} The updated or newly inserted message document.
* @throws {Error} If there is an error in saving the message.
*/
async function recordMessage({ user, endpoint, messageId, conversationId, parentMessageId, ...rest }) {
async function recordMessage({
user,
endpoint,
messageId,
conversationId,
parentMessageId,
...rest
}) {
try {
// No parsing of convoId as may use threadId
const message = {
@@ -141,9 +136,6 @@ async function recordMessage({ user, endpoint, messageId, conversationId, parent
conversationId,
parentMessageId,
...rest,
iv: rest.iv ?? null,
authTag: rest.authTag ?? null,
encryptedKey: rest.encryptedKey ?? null,
};
return await Message.findOneAndUpdate({ user, messageId }, message, {
@@ -198,15 +190,12 @@ async function updateMessageText(req, { messageId, text }) {
async function updateMessage(req, message, metadata) {
try {
const { messageId, ...update } = message;
// Ensure encryption fields are explicitly updated (if provided)
update.iv = update.iv ?? null;
update.authTag = update.authTag ?? null;
update.encryptedKey = update.encryptedKey ?? null;
const updatedMessage = await Message.findOneAndUpdate(
{ messageId, user: req.user.id },
update,
{ new: true },
{
new: true,
},
);
if (!updatedMessage) {
@@ -236,11 +225,11 @@ async function updateMessage(req, message, metadata) {
*
* @async
* @function deleteMessagesSince
* @param {Object} req - The request object.
* @param {Object} params - The parameters object.
* @param {Object} req - The request object.
* @param {string} params.messageId - The unique identifier for the message.
* @param {string} params.conversationId - The identifier of the conversation.
* @returns {Promise<number>} The number of deleted messages.
* @returns {Promise<Number>} The number of deleted messages.
* @throws {Error} If there is an error in deleting messages.
*/
async function deleteMessagesSince(req, { messageId, conversationId }) {
@@ -274,6 +263,7 @@ async function getMessages(filter, select) {
if (select) {
return await Message.find(filter).select(select).sort({ createdAt: 1 }).lean();
}
return await Message.find(filter).sort({ createdAt: 1 }).lean();
} catch (err) {
logger.error('Error getting messages:', err);
@@ -291,7 +281,10 @@ async function getMessages(filter, select) {
*/
async function getMessage({ user, messageId }) {
try {
return await Message.findOne({ user, messageId }).lean();
return await Message.findOne({
user,
messageId,
}).lean();
} catch (err) {
logger.error('Error getting message:', err);
throw err;

View File

@@ -1,6 +1,6 @@
const { model } = require('mongoose');
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
const projectSchema = require('~/models/schema/projectSchema');
const { projectSchema } = require('@librechat/data-schemas');
const Project = model('Project', projectSchema);
@@ -9,7 +9,7 @@ const Project = model('Project', projectSchema);
*
* @param {string} projectId - The ID of the project to find and return as a plain object.
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
* @returns {Promise<MongoProject>} A plain object representing the project document, or `null` if no project is found.
* @returns {Promise<IMongoProject>} A plain object representing the project document, or `null` if no project is found.
*/
const getProjectById = async function (projectId, fieldsToSelect = null) {
const query = Project.findById(projectId);
@@ -27,7 +27,7 @@ const getProjectById = async function (projectId, fieldsToSelect = null) {
*
* @param {string} projectName - The name of the project to find or create.
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
* @returns {Promise<MongoProject>} A plain object representing the project document.
* @returns {Promise<IMongoProject>} A plain object representing the project document.
*/
const getProjectByName = async function (projectName, fieldsToSelect = null) {
const query = { name: projectName };
@@ -47,7 +47,7 @@ const getProjectByName = async function (projectName, fieldsToSelect = null) {
*
* @param {string} projectId - The ID of the project to update.
* @param {string[]} promptGroupIds - The array of prompt group IDs to add to the project.
* @returns {Promise<MongoProject>} The updated project document.
* @returns {Promise<IMongoProject>} The updated project document.
*/
const addGroupIdsToProject = async function (projectId, promptGroupIds) {
return await Project.findByIdAndUpdate(
@@ -62,7 +62,7 @@ const addGroupIdsToProject = async function (projectId, promptGroupIds) {
*
* @param {string} projectId - The ID of the project to update.
* @param {string[]} promptGroupIds - The array of prompt group IDs to remove from the project.
* @returns {Promise<MongoProject>} The updated project document.
* @returns {Promise<IMongoProject>} The updated project document.
*/
const removeGroupIdsFromProject = async function (projectId, promptGroupIds) {
return await Project.findByIdAndUpdate(
@@ -87,7 +87,7 @@ const removeGroupFromAllProjects = async (promptGroupId) => {
*
* @param {string} projectId - The ID of the project to update.
* @param {string[]} agentIds - The array of agent IDs to add to the project.
* @returns {Promise<MongoProject>} The updated project document.
* @returns {Promise<IMongoProject>} The updated project document.
*/
const addAgentIdsToProject = async function (projectId, agentIds) {
return await Project.findByIdAndUpdate(
@@ -102,7 +102,7 @@ const addAgentIdsToProject = async function (projectId, agentIds) {
*
* @param {string} projectId - The ID of the project to update.
* @param {string[]} agentIds - The array of agent IDs to remove from the project.
* @returns {Promise<MongoProject>} The updated project document.
* @returns {Promise<IMongoProject>} The updated project document.
*/
const removeAgentIdsFromProject = async function (projectId, agentIds) {
return await Project.findByIdAndUpdate(

View File

@@ -1,3 +1,4 @@
const mongoose = require('mongoose');
const { ObjectId } = require('mongodb');
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
const {
@@ -6,10 +7,13 @@ const {
removeGroupIdsFromProject,
removeGroupFromAllProjects,
} = require('./Project');
const { Prompt, PromptGroup } = require('./schema/promptSchema');
const { promptGroupSchema, promptSchema } = require('@librechat/data-schemas');
const { escapeRegExp } = require('~/server/utils');
const { logger } = require('~/config');
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
const Prompt = mongoose.model('Prompt', promptSchema);
/**
* Create a pipeline for the aggregation to get prompt groups
* @param {Object} query

View File

@@ -1,3 +1,4 @@
const mongoose = require('mongoose');
const {
CacheKeys,
SystemRoles,
@@ -12,9 +13,11 @@ const {
temporaryChatPermissionsSchema,
} = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores');
const Role = require('~/models/schema/roleSchema');
const { roleSchema } = require('@librechat/data-schemas');
const { logger } = require('~/config');
const Role = mongoose.model('Role', roleSchema);
/**
* Retrieve a role by name and convert the found role document to a plain object.
* If the role with the given name doesn't exist and the name is a system defined role, create it and return the lean version.
@@ -168,6 +171,7 @@ const initializeRoles = async function () {
}
};
module.exports = {
Role,
getRoleByName,
initializeRoles,
updateRoleByName,

View File

@@ -8,7 +8,7 @@ const {
} = require('librechat-data-provider');
const { updateAccessPermissions, initializeRoles } = require('~/models/Role');
const getLogStores = require('~/cache/getLogStores');
const Role = require('~/models/schema/roleSchema');
const { Role } = require('~/models/Role');
// Mock the cache
jest.mock('~/cache/getLogStores', () => {

View File

@@ -1,7 +1,7 @@
const mongoose = require('mongoose');
const signPayload = require('~/server/services/signPayload');
const { hashToken } = require('~/server/utils/crypto');
const sessionSchema = require('./schema/session');
const { sessionSchema } = require('@librechat/data-schemas');
const { logger } = require('~/config');
const Session = mongoose.model('Session', sessionSchema);

View File

@@ -1,7 +1,9 @@
const mongoose = require('mongoose');
const { nanoid } = require('nanoid');
const { Constants } = require('librechat-data-provider');
const { Conversation } = require('~/models/Conversation');
const SharedLink = require('./schema/shareSchema');
const { shareSchema } = require('@librechat/data-schemas');
const SharedLink = mongoose.model('SharedLink', shareSchema);
const { getMessages } = require('./Message');
const logger = require('~/config/winston');

View File

@@ -1,6 +1,6 @@
const mongoose = require('mongoose');
const { encryptV2 } = require('~/server/utils/crypto');
const tokenSchema = require('./schema/tokenSchema');
const { tokenSchema } = require('@librechat/data-schemas');
const { logger } = require('~/config');
/**

View File

@@ -1,9 +1,11 @@
const ToolCall = require('./schema/toolCallSchema');
const mongoose = require('mongoose');
const { toolCallSchema } = require('@librechat/data-schemas');
const ToolCall = mongoose.model('ToolCall', toolCallSchema);
/**
* Create a new tool call
* @param {ToolCallData} toolCallData - The tool call data
* @returns {Promise<ToolCallData>} The created tool call document
* @param {IToolCallData} toolCallData - The tool call data
* @returns {Promise<IToolCallData>} The created tool call document
*/
async function createToolCall(toolCallData) {
try {
@@ -16,7 +18,7 @@ async function createToolCall(toolCallData) {
/**
* Get a tool call by ID
* @param {string} id - The tool call document ID
* @returns {Promise<ToolCallData|null>} The tool call document or null if not found
* @returns {Promise<IToolCallData|null>} The tool call document or null if not found
*/
async function getToolCallById(id) {
try {
@@ -44,7 +46,7 @@ async function getToolCallsByMessage(messageId, userId) {
* Get tool calls by conversation ID and user
* @param {string} conversationId - The conversation ID
* @param {string} userId - The user's ObjectId
* @returns {Promise<ToolCallData[]>} Array of tool call documents
* @returns {Promise<IToolCallData[]>} Array of tool call documents
*/
async function getToolCallsByConvo(conversationId, userId) {
try {
@@ -57,8 +59,8 @@ async function getToolCallsByConvo(conversationId, userId) {
/**
* Update a tool call
* @param {string} id - The tool call document ID
* @param {Partial<ToolCallData>} updateData - The data to update
* @returns {Promise<ToolCallData|null>} The updated tool call document or null if not found
* @param {Partial<IToolCallData>} updateData - The data to update
* @returns {Promise<IToolCallData|null>} The updated tool call document or null if not found
*/
async function updateToolCall(id, updateData) {
try {

View File

@@ -1,6 +1,6 @@
const mongoose = require('mongoose');
const { isEnabled } = require('~/server/utils/handleText');
const transactionSchema = require('./schema/transaction');
const { transactionSchema } = require('@librechat/data-schemas');
const { getMultiplier, getCacheMultiplier } = require('./tx');
const { logger } = require('~/config');
const Balance = require('./Balance');

View File

@@ -1,5 +1,5 @@
const mongoose = require('mongoose');
const userSchema = require('~/models/schema/userSchema');
const { userSchema } = require('@librechat/data-schemas');
const User = mongoose.model('User', userSchema);

View File

@@ -4,9 +4,28 @@ const { MeiliSearch } = require('meilisearch');
const { cleanUpPrimaryKeyValue } = require('~/lib/utils/misc');
const logger = require('~/config/meiliLogger');
// Environment flags
/**
* Flag to indicate if search is enabled based on environment variables.
* @type {boolean}
*/
const searchEnabled = process.env.SEARCH && process.env.SEARCH.toLowerCase() === 'true';
/**
* Flag to indicate if MeiliSearch is enabled based on required environment variables.
* @type {boolean}
*/
const meiliEnabled = process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY && searchEnabled;
/**
* Validates the required options for configuring the mongoMeili plugin.
*
* @param {Object} options - The configuration options.
* @param {string} options.host - The MeiliSearch host.
* @param {string} options.apiKey - The MeiliSearch API key.
* @param {string} options.indexName - The name of the index.
* @throws {Error} Throws an error if any required option is missing.
*/
const validateOptions = function (options) {
const requiredKeys = ['host', 'apiKey', 'indexName'];
requiredKeys.forEach((key) => {
@@ -16,53 +35,64 @@ const validateOptions = function (options) {
});
};
// const createMeiliMongooseModel = function ({ index, indexName, client, attributesToIndex }) {
/**
* Factory function to create a MeiliMongooseModel class which extends a Mongoose model.
* This class contains static and instance methods to synchronize and manage the MeiliSearch index
* corresponding to the MongoDB collection.
*
* @param {Object} config - Configuration object.
* @param {Object} config.index - The MeiliSearch index object.
* @param {Array<string>} config.attributesToIndex - List of attributes to index.
* @returns {Function} A class definition that will be loaded into the Mongoose schema.
*/
const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
// The primary key is assumed to be the first attribute in the attributesToIndex array.
const primaryKey = attributesToIndex[0];
// MeiliMongooseModel is of type Mongoose.Model
class MeiliMongooseModel {
/**
* `syncWithMeili`: synchronizes the data between a MongoDB collection and a MeiliSearch index,
* only triggered if there's ever a discrepancy determined by `api\lib\db\indexSync.js`.
* Synchronizes the data between the MongoDB collection and the MeiliSearch index.
*
* 1. Fetches all documents from the MongoDB collection and the MeiliSearch index.
* 2. Compares the documents from both sources.
* 3. If a document exists in MeiliSearch but not in MongoDB, it's deleted from MeiliSearch.
* 4. If a document exists in MongoDB but not in MeiliSearch, it's added to MeiliSearch.
* 5. If a document exists in both but has different `text` or `title` fields (depending on the `primaryKey`), it's updated in MeiliSearch.
* 6. After all operations, it updates the `_meiliIndex` field in MongoDB to indicate whether the document is indexed in MeiliSearch.
* The synchronization process involves:
* 1. Fetching all documents from the MongoDB collection and MeiliSearch index.
* 2. Comparing documents from both sources.
* 3. Deleting documents from MeiliSearch that no longer exist in MongoDB.
* 4. Adding documents to MeiliSearch that exist in MongoDB but not in the index.
* 5. Updating documents in MeiliSearch if key fields (such as `text` or `title`) differ.
* 6. Updating the `_meiliIndex` field in MongoDB to indicate the indexing status.
*
* Note: This strategy does not use batch operations for Meilisearch as the `index.addDocuments` will discard
* the entire batch if there's an error with one document, and will not throw an error if there's an issue.
* Also, `index.getDocuments` needs an exact limit on the amount of documents to return, so we build the map in batches.
* Note: The function processes documents in batches because MeiliSearch's
* `index.getDocuments` requires an exact limit and `index.addDocuments` does not handle
* partial failures in a batch.
*
* @returns {Promise} A promise that resolves when the synchronization is complete.
*
* @throws {Error} Throws an error if there's an issue with adding a document to MeiliSearch.
* @returns {Promise<void>} Resolves when the synchronization is complete.
*/
static async syncWithMeili() {
try {
let moreDocuments = true;
// Retrieve all MongoDB documents from the collection as plain JavaScript objects.
const mongoDocuments = await this.find().lean();
const format = (doc) => _.pick(doc, attributesToIndex);
// Prepare for comparison
// Helper function to format a document by selecting only the attributes to index
// and omitting keys starting with '$'.
const format = (doc) =>
_.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$'));
// Build a map of MongoDB documents for quick lookup based on the primary key.
const mongoMap = new Map(mongoDocuments.map((doc) => [doc[primaryKey], format(doc)]));
const indexMap = new Map();
let offset = 0;
const batchSize = 1000;
// Fetch documents from the MeiliSearch index in batches.
while (moreDocuments) {
const batch = await index.getDocuments({ limit: batchSize, offset });
if (batch.results.length === 0) {
moreDocuments = false;
}
for (const doc of batch.results) {
indexMap.set(doc[primaryKey], format(doc));
}
offset += batchSize;
}
@@ -70,13 +100,12 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
const updateOps = [];
// Iterate over Meili index documents
// Process documents present in the MeiliSearch index.
for (const [id, doc] of indexMap) {
const update = {};
update[primaryKey] = id;
if (mongoMap.has(id)) {
// Case: Update
// If document also exists in MongoDB, would be update case
// If document exists in MongoDB, check for discrepancies in key fields.
if (
(doc.text && doc.text !== mongoMap.get(id).text) ||
(doc.title && doc.title !== mongoMap.get(id).title)
@@ -92,8 +121,7 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
await index.addDocuments([doc]);
}
} else {
// Case: Delete
// If document does not exist in MongoDB, its a delete case from meili index
// If the document does not exist in MongoDB, delete it from MeiliSearch.
await index.deleteDocument(id);
updateOps.push({
updateOne: { filter: update, update: { $set: { _meiliIndex: false } } },
@@ -101,24 +129,25 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
}
}
// Iterate over MongoDB documents
// Process documents present in MongoDB.
for (const [id, doc] of mongoMap) {
const update = {};
update[primaryKey] = id;
// Case: Insert
// If document does not exist in Meili Index, Its an insert case
// If the document is missing in the Meili index, add it.
if (!indexMap.has(id)) {
await index.addDocuments([doc]);
updateOps.push({
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
});
} else if (doc._meiliIndex === false) {
// If the document exists but is marked as not indexed, update the flag.
updateOps.push({
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
});
}
}
// Execute bulk update operations in MongoDB to update the _meiliIndex flags.
if (updateOps.length > 0) {
await this.collection.bulkWrite(updateOps);
logger.debug(
@@ -132,34 +161,47 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
}
}
// Set one or more settings of the meili index
/**
* Updates settings for the MeiliSearch index.
*
* @param {Object} settings - The settings to update on the MeiliSearch index.
* @returns {Promise<Object>} Promise resolving to the update result.
*/
static async setMeiliIndexSettings(settings) {
return await index.updateSettings(settings);
}
// Search the index
/**
* Searches the MeiliSearch index and optionally populates the results with data from MongoDB.
*
* @param {string} q - The search query.
* @param {Object} params - Additional search parameters for MeiliSearch.
* @param {boolean} populate - Whether to populate search hits with full MongoDB documents.
* @returns {Promise<Object>} The search results with populated hits if requested.
*/
static async meiliSearch(q, params, populate) {
const data = await index.search(q, params);
// Populate hits with content from mongodb
if (populate) {
// Find objects into mongodb matching `objectID` from Meili search
// Build a query using the primary key values from the search hits.
const query = {};
// query[primaryKey] = { $in: _.map(data.hits, primaryKey) };
query[primaryKey] = _.map(data.hits, (hit) => cleanUpPrimaryKeyValue(hit[primaryKey]));
// logger.debug('query', query);
const hitsFromMongoose = await this.find(
query,
_.reduce(
this.schema.obj,
function (results, value, key) {
return { ...results, [key]: 1 };
},
{ _id: 1, __v: 1 },
),
).lean();
// Add additional data from mongodb into Meili search hits
// Build a projection object, including only keys that do not start with '$'.
const projection = Object.keys(this.schema.obj).reduce(
(results, key) => {
if (!key.startsWith('$')) {
results[key] = 1;
}
return results;
},
{ _id: 1, __v: 1 },
);
// Retrieve the full documents from MongoDB.
const hitsFromMongoose = await this.find(query, projection).lean();
// Merge the MongoDB documents with the search hits.
const populatedHits = data.hits.map(function (hit) {
const query = {};
query[primaryKey] = hit[primaryKey];
@@ -176,10 +218,21 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
return data;
}
/**
* Preprocesses the current document for indexing.
*
* This method:
* - Picks only the defined attributes to index.
* - Omits any keys starting with '$'.
* - Replaces pipe characters ('|') in `conversationId` with '--'.
* - Extracts and concatenates text from an array of content items.
*
* @returns {Object} The preprocessed object ready for indexing.
*/
preprocessObjectForIndex() {
const object = _.pick(this.toJSON(), attributesToIndex);
// NOTE: MeiliSearch does not allow | in primary key, so we replace it with - for Bing convoIds
// object.conversationId = object.conversationId.replace(/\|/g, '-');
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
k.startsWith('$'),
);
if (object.conversationId && object.conversationId.includes('|')) {
object.conversationId = object.conversationId.replace(/\|/g, '--');
}
@@ -195,32 +248,53 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
return object;
}
// Push new document to Meili
/**
* Adds the current document to the MeiliSearch index.
*
* The method preprocesses the document, adds it to MeiliSearch, and then updates
* the MongoDB document's `_meiliIndex` flag to true.
*
* @returns {Promise<void>}
*/
async addObjectToMeili() {
const object = this.preprocessObjectForIndex();
try {
// logger.debug('Adding document to Meili', object);
await index.addDocuments([object]);
} catch (error) {
// logger.debug('Error adding document to Meili');
// logger.error(error);
// Error handling can be enhanced as needed.
logger.error('[addObjectToMeili] Error adding document to Meili', error);
}
await this.collection.updateMany({ _id: this._id }, { $set: { _meiliIndex: true } });
}
// Update an existing document in Meili
/**
* Updates the current document in the MeiliSearch index.
*
* @returns {Promise<void>}
*/
async updateObjectToMeili() {
const object = _.pick(this.toJSON(), attributesToIndex);
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
k.startsWith('$'),
);
await index.updateDocuments([object]);
}
// Delete a document from Meili
/**
* Deletes the current document from the MeiliSearch index.
*
* @returns {Promise<void>}
*/
async deleteObjectFromMeili() {
await index.deleteDocument(this._id);
}
// * schema.post('save')
/**
* Post-save hook to synchronize the document with MeiliSearch.
*
* If the document is already indexed (i.e. `_meiliIndex` is true), it updates it;
* otherwise, it adds the document to the index.
*/
postSaveHook() {
if (this._meiliIndex) {
this.updateObjectToMeili();
@@ -229,14 +303,24 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
}
}
// * schema.post('update')
/**
* Post-update hook to update the document in MeiliSearch.
*
* This hook is triggered after a document update, ensuring that changes are
* propagated to the MeiliSearch index if the document is indexed.
*/
postUpdateHook() {
if (this._meiliIndex) {
this.updateObjectToMeili();
}
}
// * schema.post('remove')
/**
* Post-remove hook to delete the document from MeiliSearch.
*
* This hook is triggered after a document is removed, ensuring that the document
* is also removed from the MeiliSearch index if it was previously indexed.
*/
postRemoveHook() {
if (this._meiliIndex) {
this.deleteObjectFromMeili();
@@ -247,11 +331,27 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
return MeiliMongooseModel;
};
/**
* Mongoose plugin to synchronize MongoDB collections with a MeiliSearch index.
*
* This plugin:
* - Validates the provided options.
* - Adds a `_meiliIndex` field to the schema to track indexing status.
* - Sets up a MeiliSearch client and creates an index if it doesn't already exist.
* - Loads class methods for syncing, searching, and managing documents in MeiliSearch.
* - Registers Mongoose hooks (post-save, post-update, post-remove, etc.) to maintain index consistency.
*
* @param {mongoose.Schema} schema - The Mongoose schema to which the plugin is applied.
* @param {Object} options - Configuration options.
* @param {string} options.host - The MeiliSearch host.
* @param {string} options.apiKey - The MeiliSearch API key.
* @param {string} options.indexName - The name of the MeiliSearch index.
* @param {string} options.primaryKey - The primary key field for indexing.
*/
module.exports = function mongoMeili(schema, options) {
// Vaidate Options for mongoMeili
validateOptions(options);
// Add meiliIndex to schema
// Add _meiliIndex field to the schema to track if a document has been indexed in MeiliSearch.
schema.add({
_meiliIndex: {
type: Boolean,
@@ -263,69 +363,77 @@ module.exports = function mongoMeili(schema, options) {
const { host, apiKey, indexName, primaryKey } = options;
// Setup MeiliSearch Client
// Setup the MeiliSearch client.
const client = new MeiliSearch({ host, apiKey });
// Asynchronously create the index
// Create the index asynchronously if it doesn't exist.
client.createIndex(indexName, { primaryKey });
// Setup the index to search for this schema
// Setup the MeiliSearch index for this schema.
const index = client.index(indexName);
// Collect attributes from the schema that should be indexed.
const attributesToIndex = [
..._.reduce(
schema.obj,
function (results, value, key) {
return value.meiliIndex ? [...results, key] : results;
// }, []), '_id'];
},
[],
),
];
// Load the class methods into the schema.
schema.loadClass(createMeiliMongooseModel({ index, indexName, client, attributesToIndex }));
// Register hooks
// Register Mongoose hooks to synchronize with MeiliSearch.
// Post-save: synchronize after a document is saved.
schema.post('save', function (doc) {
doc.postSaveHook();
});
// Post-update: synchronize after a document is updated.
schema.post('update', function (doc) {
doc.postUpdateHook();
});
// Post-remove: synchronize after a document is removed.
schema.post('remove', function (doc) {
doc.postRemoveHook();
});
// Pre-deleteMany hook: remove corresponding documents from MeiliSearch when multiple documents are deleted.
schema.pre('deleteMany', async function (next) {
if (!meiliEnabled) {
next();
return next();
}
try {
// Check if the schema has a "messages" field to determine if it's a conversation schema.
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messages')) {
const convoIndex = client.index('convos');
const deletedConvos = await mongoose.model('Conversation').find(this._conditions).lean();
let promises = [];
for (const convo of deletedConvos) {
promises.push(convoIndex.deleteDocument(convo.conversationId));
}
const promises = deletedConvos.map((convo) =>
convoIndex.deleteDocument(convo.conversationId),
);
await Promise.all(promises);
}
// Check if the schema has a "messageId" field to determine if it's a message schema.
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messageId')) {
const messageIndex = client.index('messages');
const deletedMessages = await mongoose.model('Message').find(this._conditions).lean();
let promises = [];
for (const message of deletedMessages) {
promises.push(messageIndex.deleteDocument(message.messageId));
}
const promises = deletedMessages.map((message) =>
messageIndex.deleteDocument(message.messageId),
);
await Promise.all(promises);
}
return next();
} catch (error) {
if (meiliEnabled) {
logger.error(
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion, next startup may be slow due to syncing',
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion. Next startup may be slow due to syncing.',
error,
);
}
@@ -333,17 +441,19 @@ module.exports = function mongoMeili(schema, options) {
}
});
// Post-findOneAndUpdate hook: update MeiliSearch index after a document is updated via findOneAndUpdate.
schema.post('findOneAndUpdate', async function (doc) {
if (!meiliEnabled) {
return;
}
// If the document is unfinished, do not update the index.
if (doc.unfinished) {
return;
}
let meiliDoc;
// Doc is a Conversation
// For conversation documents, try to fetch the document from the "convos" index.
if (doc.messages) {
try {
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
@@ -356,10 +466,12 @@ module.exports = function mongoMeili(schema, options) {
}
}
// If the MeiliSearch document exists and the title is unchanged, do nothing.
if (meiliDoc && meiliDoc.title === doc.title) {
return;
}
// Otherwise, trigger a post-save hook to synchronize the document.
doc.postSaveHook();
});
};

View File

@@ -1,60 +0,0 @@
const mongoose = require('mongoose');
const { Schema } = mongoose;
const AuthSchema = new Schema(
{
authorization_type: String,
custom_auth_header: String,
type: {
type: String,
enum: ['service_http', 'oauth', 'none'],
},
authorization_content_type: String,
authorization_url: String,
client_url: String,
scope: String,
token_exchange_method: {
type: String,
enum: ['default_post', 'basic_auth_header', null],
},
},
{ _id: false },
);
const actionSchema = new Schema({
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
index: true,
required: true,
},
action_id: {
type: String,
index: true,
required: true,
},
type: {
type: String,
default: 'action_prototype',
},
settings: Schema.Types.Mixed,
agent_id: String,
assistant_id: String,
metadata: {
api_key: String, // private, encrypted
auth: AuthSchema,
domain: {
type: String,
required: true,
},
// json_schema: Schema.Types.Mixed,
privacy_policy_url: String,
raw_spec: String,
oauth_client_id: String, // private, encrypted
oauth_client_secret: String, // private, encrypted
},
});
// }, { minimize: false }); // Prevent removal of empty objects
module.exports = actionSchema;

View File

@@ -1,17 +0,0 @@
const mongoose = require('mongoose');
const balanceSchema = mongoose.Schema({
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
index: true,
required: true,
},
// 1000 tokenCredits = 1 mill ($0.001 USD)
tokenCredits: {
type: Number,
default: 0,
},
});
module.exports = balanceSchema;

View File

@@ -1,19 +0,0 @@
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const categoriesSchema = new Schema({
label: {
type: String,
required: true,
unique: true,
},
value: {
type: String,
required: true,
unique: true,
},
});
const categories = mongoose.model('categories', categoriesSchema);
module.exports = { Categories: categories };

View File

@@ -1,32 +0,0 @@
const mongoose = require('mongoose');
const conversationTagSchema = mongoose.Schema(
{
tag: {
type: String,
index: true,
},
user: {
type: String,
index: true,
},
description: {
type: String,
index: true,
},
count: {
type: Number,
default: 0,
},
position: {
type: Number,
default: 0,
index: true,
},
},
{ timestamps: true },
);
conversationTagSchema.index({ tag: 1, user: 1 }, { unique: true });
module.exports = mongoose.model('ConversationTag', conversationTagSchema);

View File

@@ -1,46 +1,7 @@
const mongoose = require('mongoose');
const mongoMeili = require('../plugins/mongoMeili');
const { conversationPreset } = require('./defaults');
const convoSchema = mongoose.Schema(
{
conversationId: {
type: String,
unique: true,
required: true,
index: true,
meiliIndex: true,
},
title: {
type: String,
default: 'New Chat',
meiliIndex: true,
},
user: {
type: String,
index: true,
},
messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }],
agentOptions: {
type: mongoose.Schema.Types.Mixed,
},
...conversationPreset,
agent_id: {
type: String,
},
tags: {
type: [String],
default: [],
meiliIndex: true,
},
files: {
type: [String],
},
expiredAt: {
type: Date,
},
},
{ timestamps: true },
);
const { convoSchema } = require('@librechat/data-schemas');
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
convoSchema.plugin(mongoMeili, {
@@ -52,10 +13,6 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
});
}
convoSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
convoSchema.index({ createdAt: 1, updatedAt: 1 });
convoSchema.index({ conversationId: 1, user: 1 }, { unique: true });
const Conversation = mongoose.models.Conversation || mongoose.model('Conversation', convoSchema);
module.exports = Conversation;

View File

@@ -1,111 +0,0 @@
const { FileSources } = require('librechat-data-provider');
const mongoose = require('mongoose');
/**
* @typedef {Object} MongoFile
* @property {ObjectId} [_id] - MongoDB Document ID
* @property {number} [__v] - MongoDB Version Key
* @property {ObjectId} user - User ID
* @property {string} [conversationId] - Optional conversation ID
* @property {string} file_id - File identifier
* @property {string} [temp_file_id] - Temporary File identifier
* @property {number} bytes - Size of the file in bytes
* @property {string} filename - Name of the file
* @property {string} filepath - Location of the file
* @property {'file'} object - Type of object, always 'file'
* @property {string} type - Type of file
* @property {number} [usage=0] - Number of uses of the file
* @property {string} [context] - Context of the file origin
* @property {boolean} [embedded=false] - Whether or not the file is embedded in vector db
* @property {string} [model] - The model to identify the group region of the file (for Azure OpenAI hosting)
* @property {string} [source] - The source of the file (e.g., from FileSources)
* @property {number} [width] - Optional width of the file
* @property {number} [height] - Optional height of the file
* @property {Object} [metadata] - Metadata related to the file
* @property {string} [metadata.fileIdentifier] - Unique identifier for the file in metadata
* @property {Date} [expiresAt] - Optional expiration date of the file
* @property {Date} [createdAt] - Date when the file was created
* @property {Date} [updatedAt] - Date when the file was updated
*/
/** @type {MongooseSchema<MongoFile>} */
const fileSchema = mongoose.Schema(
{
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
index: true,
required: true,
},
conversationId: {
type: String,
ref: 'Conversation',
index: true,
},
file_id: {
type: String,
// required: true,
index: true,
},
temp_file_id: {
type: String,
// required: true,
},
bytes: {
type: Number,
required: true,
},
filename: {
type: String,
required: true,
},
filepath: {
type: String,
required: true,
},
object: {
type: String,
required: true,
default: 'file',
},
embedded: {
type: Boolean,
},
type: {
type: String,
required: true,
},
context: {
type: String,
// required: true,
},
usage: {
type: Number,
required: true,
default: 0,
},
source: {
type: String,
default: FileSources.local,
},
model: {
type: String,
},
width: Number,
height: Number,
metadata: {
fileIdentifier: String,
},
expiresAt: {
type: Date,
expires: 3600, // 1 hour in seconds
},
},
{
timestamps: true,
},
);
fileSchema.index({ createdAt: 1, updatedAt: 1 });
module.exports = fileSchema;

View File

@@ -1,157 +1,6 @@
const mongoose = require('mongoose');
const mongoMeili = require('~/models/plugins/mongoMeili');
const messageSchema = mongoose.Schema(
{
messageId: {
type: String,
unique: true,
required: true,
index: true,
meiliIndex: true,
},
conversationId: {
type: String,
index: true,
required: true,
meiliIndex: true,
},
user: {
type: String,
index: true,
required: true,
default: null,
},
model: {
type: String,
default: null,
},
endpoint: {
type: String,
},
conversationSignature: {
type: String,
},
clientId: {
type: String,
},
invocationId: {
type: Number,
},
parentMessageId: {
type: String,
},
tokenCount: {
type: Number,
},
summaryTokenCount: {
type: Number,
},
sender: {
type: String,
meiliIndex: true,
},
text: {
type: String,
meiliIndex: true,
},
summary: {
type: String,
},
isCreatedByUser: {
type: Boolean,
required: true,
default: false,
},
unfinished: {
type: Boolean,
default: false,
},
error: {
type: Boolean,
default: false,
},
finish_reason: {
type: String,
},
_meiliIndex: {
type: Boolean,
required: false,
select: false,
default: false,
},
files: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
plugin: {
type: {
latest: {
type: String,
required: false,
},
inputs: {
type: [mongoose.Schema.Types.Mixed],
required: false,
default: undefined,
},
outputs: {
type: String,
required: false,
},
},
default: undefined,
},
plugins: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
content: {
type: [{ type: mongoose.Schema.Types.Mixed }],
default: undefined,
meiliIndex: true,
},
thread_id: {
type: String,
},
/* frontend components */
iconURL: {
type: String,
},
attachments: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
/*
attachments: {
type: [
{
file_id: String,
filename: String,
filepath: String,
expiresAt: Date,
width: Number,
height: Number,
type: String,
conversationId: String,
messageId: {
type: String,
required: true,
},
toolCallId: String,
},
],
default: undefined,
},
*/
expiredAt: {
type: Date,
},
iv: {
type: String,
default: null,
},
authTag: {
type: String,
default: null,
},
encryptedKey: {
type: String,
default: null,
},
},
{ timestamps: true },
);
const { messageSchema } = require('@librechat/data-schemas');
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
messageSchema.plugin(mongoMeili, {
@@ -161,11 +10,7 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
primaryKey: 'messageId',
});
}
messageSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
messageSchema.index({ createdAt: 1 });
messageSchema.index({ messageId: 1, user: 1 }, { unique: true });
/** @type {mongoose.Model<TMessage>} */
const Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
module.exports = Message;

View File

@@ -1,25 +1,5 @@
const mongoose = require('mongoose');
const pluginAuthSchema = mongoose.Schema(
{
authField: {
type: String,
required: true,
},
value: {
type: String,
required: true,
},
userId: {
type: String,
required: true,
},
pluginKey: {
type: String,
},
},
{ timestamps: true },
);
const { pluginAuthSchema } = require('@librechat/data-schemas');
const PluginAuth = mongoose.models.Plugin || mongoose.model('PluginAuth', pluginAuthSchema);

View File

@@ -1,36 +1,5 @@
const mongoose = require('mongoose');
const { conversationPreset } = require('./defaults');
const presetSchema = mongoose.Schema(
{
presetId: {
type: String,
unique: true,
required: true,
index: true,
},
title: {
type: String,
default: 'New Chat',
meiliIndex: true,
},
user: {
type: String,
default: null,
},
defaultPreset: {
type: Boolean,
},
order: {
type: Number,
},
...conversationPreset,
agentOptions: {
type: mongoose.Schema.Types.Mixed,
default: null,
},
},
{ timestamps: true },
);
const { presetSchema } = require('@librechat/data-schemas');
const Preset = mongoose.models.Preset || mongoose.model('Preset', presetSchema);

View File

@@ -1,35 +0,0 @@
const { Schema } = require('mongoose');
/**
* @typedef {Object} MongoProject
* @property {ObjectId} [_id] - MongoDB Document ID
* @property {string} name - The name of the project
* @property {ObjectId[]} promptGroupIds - Array of PromptGroup IDs associated with the project
* @property {Date} [createdAt] - Date when the project was created (added by timestamps)
* @property {Date} [updatedAt] - Date when the project was last updated (added by timestamps)
*/
const projectSchema = new Schema(
{
name: {
type: String,
required: true,
index: true,
},
promptGroupIds: {
type: [Schema.Types.ObjectId],
ref: 'PromptGroup',
default: [],
},
agentIds: {
type: [String],
ref: 'Agent',
default: [],
},
},
{
timestamps: true,
},
);
module.exports = projectSchema;

View File

@@ -1,118 +0,0 @@
const mongoose = require('mongoose');
const { Constants } = require('librechat-data-provider');
const Schema = mongoose.Schema;
/**
* @typedef {Object} MongoPromptGroup
* @property {ObjectId} [_id] - MongoDB Document ID
* @property {string} name - The name of the prompt group
* @property {ObjectId} author - The author of the prompt group
* @property {ObjectId} [projectId=null] - The project ID of the prompt group
* @property {ObjectId} [productionId=null] - The project ID of the prompt group
* @property {string} authorName - The name of the author of the prompt group
* @property {number} [numberOfGenerations=0] - Number of generations the prompt group has
* @property {string} [oneliner=''] - Oneliner description of the prompt group
* @property {string} [category=''] - Category of the prompt group
* @property {string} [command] - Command for the prompt group
* @property {Date} [createdAt] - Date when the prompt group was created (added by timestamps)
* @property {Date} [updatedAt] - Date when the prompt group was last updated (added by timestamps)
*/
const promptGroupSchema = new Schema(
{
name: {
type: String,
required: true,
index: true,
},
numberOfGenerations: {
type: Number,
default: 0,
},
oneliner: {
type: String,
default: '',
},
category: {
type: String,
default: '',
index: true,
},
projectIds: {
type: [Schema.Types.ObjectId],
ref: 'Project',
index: true,
},
productionId: {
type: Schema.Types.ObjectId,
ref: 'Prompt',
required: true,
index: true,
},
author: {
type: Schema.Types.ObjectId,
ref: 'User',
required: true,
index: true,
},
authorName: {
type: String,
required: true,
},
command: {
type: String,
index: true,
validate: {
validator: function (v) {
return v === undefined || v === null || v === '' || /^[a-z0-9-]+$/.test(v);
},
message: (props) =>
`${props.value} is not a valid command. Only lowercase alphanumeric characters and highfins (') are allowed.`,
},
maxlength: [
Constants.COMMANDS_MAX_LENGTH,
`Command cannot be longer than ${Constants.COMMANDS_MAX_LENGTH} characters`,
],
},
},
{
timestamps: true,
},
);
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
const promptSchema = new Schema(
{
groupId: {
type: Schema.Types.ObjectId,
ref: 'PromptGroup',
required: true,
index: true,
},
author: {
type: Schema.Types.ObjectId,
ref: 'User',
required: true,
},
prompt: {
type: String,
required: true,
},
type: {
type: String,
enum: ['text', 'chat'],
required: true,
},
},
{
timestamps: true,
},
);
const Prompt = mongoose.model('Prompt', promptSchema);
promptSchema.index({ createdAt: 1, updatedAt: 1 });
promptGroupSchema.index({ createdAt: 1, updatedAt: 1 });
module.exports = { Prompt, PromptGroup };

View File

@@ -1,20 +0,0 @@
const mongoose = require('mongoose');
const sessionSchema = mongoose.Schema({
refreshTokenHash: {
type: String,
required: true,
},
expiration: {
type: Date,
required: true,
expires: 0,
},
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
required: true,
},
});
module.exports = sessionSchema;

View File

@@ -1,54 +0,0 @@
const mongoose = require('mongoose');
/**
* @typedef {Object} ToolCallData
* @property {string} conversationId - The ID of the conversation
* @property {string} messageId - The ID of the message
* @property {string} toolId - The ID of the tool
* @property {string | ObjectId} user - The user's ObjectId
* @property {unknown} [result] - Optional result data
* @property {TAttachment[]} [attachments] - Optional attachments data
* @property {number} [blockIndex] - Optional code block index
* @property {number} [partIndex] - Optional part index
*/
/** @type {MongooseSchema<ToolCallData>} */
const toolCallSchema = mongoose.Schema(
{
conversationId: {
type: String,
required: true,
},
messageId: {
type: String,
required: true,
},
toolId: {
type: String,
required: true,
},
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
required: true,
},
result: {
type: mongoose.Schema.Types.Mixed,
},
attachments: {
type: mongoose.Schema.Types.Mixed,
},
blockIndex: {
type: Number,
},
partIndex: {
type: Number,
},
},
{ timestamps: true },
);
toolCallSchema.index({ messageId: 1, user: 1 });
toolCallSchema.index({ conversationId: 1, user: 1 });
module.exports = mongoose.model('ToolCall', toolCallSchema);

View File

@@ -1,171 +0,0 @@
const mongoose = require('mongoose');
const { SystemRoles } = require('librechat-data-provider');
/**
* @typedef {Object} MongoSession
* @property {string} [refreshToken] - The refresh token
*/
/**
* @typedef {Object} MongoUser
* @property {ObjectId} [_id] - MongoDB Document ID
* @property {string} [name] - The user's name
* @property {string} [username] - The user's username, in lowercase
* @property {string} email - The user's email address
* @property {boolean} emailVerified - Whether the user's email is verified
* @property {string} [password] - The user's password, trimmed with 8-128 characters
* @property {string} [avatar] - The URL of the user's avatar
* @property {string} provider - The provider of the user's account (e.g., 'local', 'google')
* @property {string} [role='USER'] - The role of the user
* @property {string} [googleId] - Optional Google ID for the user
* @property {string} [facebookId] - Optional Facebook ID for the user
* @property {string} [openidId] - Optional OpenID ID for the user
* @property {string} [ldapId] - Optional LDAP ID for the user
* @property {string} [githubId] - Optional GitHub ID for the user
* @property {string} [discordId] - Optional Discord ID for the user
* @property {string} [appleId] - Optional Apple ID for the user
* @property {Array} [plugins=[]] - List of plugins used by the user
* @property {Array.<MongoSession>} [refreshToken] - List of sessions with refresh tokens
* @property {Date} [expiresAt] - Optional expiration date of the file
* @property {string} [encryptionPublicKey] - The user's encryption public key
* @property {string} [encryptedPrivateKey] - The user's encrypted private key
* @property {string} [encryptionSalt] - The salt used for key derivation (e.g., PBKDF2)
* @property {string} [encryptionIV] - The IV used for encrypting the private key
* @property {Date} [createdAt] - Date when the user was created (added by timestamps)
* @property {Date} [updatedAt] - Date when the user was last updated (added by timestamps)
*/
/** @type {MongooseSchema<MongoSession>} */
const Session = mongoose.Schema({
refreshToken: {
type: String,
default: '',
},
});
const backupCodeSchema = mongoose.Schema({
codeHash: { type: String, required: true },
used: { type: Boolean, default: false },
usedAt: { type: Date, default: null },
});
/** @type {MongooseSchema<MongoUser>} */
const userSchema = mongoose.Schema(
{
name: {
type: String,
},
username: {
type: String,
lowercase: true,
default: '',
},
email: {
type: String,
required: [true, 'can\'t be blank'],
lowercase: true,
unique: true,
match: [/\S+@\S+\.\S+/, 'is invalid'],
index: true,
},
emailVerified: {
type: Boolean,
required: true,
default: false,
},
password: {
type: String,
trim: true,
minlength: 8,
maxlength: 128,
},
avatar: {
type: String,
required: false,
},
provider: {
type: String,
required: true,
default: 'local',
},
role: {
type: String,
default: SystemRoles.USER,
},
googleId: {
type: String,
unique: true,
sparse: true,
},
facebookId: {
type: String,
unique: true,
sparse: true,
},
openidId: {
type: String,
unique: true,
sparse: true,
},
ldapId: {
type: String,
unique: true,
sparse: true,
},
githubId: {
type: String,
unique: true,
sparse: true,
},
discordId: {
type: String,
unique: true,
sparse: true,
},
appleId: {
type: String,
unique: true,
sparse: true,
},
plugins: {
type: Array,
},
totpSecret: {
type: String,
},
backupCodes: {
type: [backupCodeSchema],
},
refreshToken: {
type: [Session],
},
expiresAt: {
type: Date,
expires: 604800, // 7 days in seconds
},
termsAccepted: {
type: Boolean,
default: false,
},
encryptionPublicKey: {
type: String,
default: null,
},
encryptedPrivateKey: {
type: String,
default: null,
},
encryptionSalt: {
type: String,
default: null,
},
encryptionIV: {
type: String,
default: null,
},
},
{ timestamps: true },
);
module.exports = userSchema;

View File

@@ -1,6 +1,6 @@
{
"name": "@librechat/backend",
"version": "v0.7.7-rc1",
"version": "v0.7.7",
"description": "",
"scripts": {
"start": "echo 'please run this from the root directory'",
@@ -40,14 +40,15 @@
"@googleapis/youtube": "^20.0.0",
"@keyv/mongo": "^2.1.8",
"@keyv/redis": "^2.8.1",
"@langchain/community": "^0.3.14",
"@langchain/community": "^0.3.34",
"@langchain/core": "^0.3.40",
"@langchain/google-genai": "^0.1.9",
"@langchain/google-vertexai": "^0.2.0",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.1.8",
"@librechat/agents": "^2.2.0",
"@librechat/data-schemas": "*",
"@waylaidwanderer/fetch-event-source": "^3.0.1",
"axios": "1.7.8",
"axios": "^1.8.2",
"bcryptjs": "^2.4.3",
"cohere-ai": "^7.9.1",
"compression": "^1.7.4",
@@ -74,7 +75,6 @@
"keyv": "^4.5.4",
"keyv-file": "^0.2.0",
"klona": "^2.0.6",
"langchain": "^0.2.19",
"librechat-data-provider": "*",
"librechat-mcp": "*",
"lodash": "^4.17.21",
@@ -82,7 +82,7 @@
"memorystore": "^1.6.7",
"mime": "^3.0.0",
"module-alias": "^2.2.3",
"mongoose": "^8.9.5",
"mongoose": "^8.12.1",
"multer": "^1.4.5-lts.1",
"nanoid": "^3.3.7",
"nodemailer": "^6.9.15",

View File

@@ -1,59 +1,9 @@
const { getResponseSender, Constants } = require('librechat-data-provider');
const { createAbortController, handleAbortError } = require('~/server/middleware');
const { sendMessage, createOnProgress } = require('~/server/utils');
const { saveMessage, getUserById } = require('~/models');
const { saveMessage } = require('~/models');
const { logger } = require('~/config');
let crypto;
try {
crypto = require('crypto');
} catch (err) {
logger.error('[AskController] crypto support is disabled!', err);
}
/**
* Helper function to encrypt plaintext using AES-256-GCM and then RSA-encrypt the AES key.
* @param {string} plainText - The plaintext to encrypt.
* @param {string} pemPublicKey - The RSA public key in PEM format.
* @returns {Object} An object containing the ciphertext, iv, authTag, and encryptedKey.
*/
function encryptText(plainText, pemPublicKey) {
// Generate a random 256-bit AES key and a 12-byte IV.
const aesKey = crypto.randomBytes(32);
const iv = crypto.randomBytes(12);
// Encrypt the plaintext using AES-256-GCM.
const cipher = crypto.createCipheriv('aes-256-gcm', aesKey, iv);
let ciphertext = cipher.update(plainText, 'utf8', 'base64');
ciphertext += cipher.final('base64');
const authTag = cipher.getAuthTag().toString('base64');
// Encrypt the AES key using the user's RSA public key.
const encryptedKey = crypto.publicEncrypt(
{
key: pemPublicKey,
padding: crypto.constants.RSA_PKCS1_OAEP_PADDING,
oaepHash: 'sha256',
},
aesKey,
).toString('base64');
return {
ciphertext,
iv: iv.toString('base64'),
authTag,
encryptedKey,
};
}
/**
* AskController
* - Initializes the client.
* - Obtains the response from the language model.
* - Retrieves the full user record (to get encryption parameters).
* - If the user has encryption enabled (i.e. encryptionPublicKey is provided),
* encrypts both the request (userMessage) and the response before saving.
*/
const AskController = async (req, res, next, initializeClient, addTitle) => {
let {
text,
@@ -82,22 +32,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
modelDisplayLabel,
});
const newConvo = !conversationId;
const userId = req.user.id; // User ID from authentication
// Retrieve full user record from DB (including encryption parameters)
const dbUser = await getUserById(userId, 'encryptionPublicKey encryptedPrivateKey encryptionSalt encryptionIV');
// Build clientOptions including the encryptionPublicKey (if available)
const clientOptions = {
encryptionPublicKey: dbUser?.encryptionPublicKey,
};
// Rebuild PEM format if encryptionPublicKey is available
let pemPublicKey = null;
if (clientOptions.encryptionPublicKey && clientOptions.encryptionPublicKey.trim() !== '') {
const pubKeyBase64 = clientOptions.encryptionPublicKey;
pemPublicKey = `-----BEGIN PUBLIC KEY-----\n${pubKeyBase64.match(/.{1,64}/g).join('\n')}\n-----END PUBLIC KEY-----`;
}
const user = req.user.id;
const getReqData = (data = {}) => {
for (let key in data) {
@@ -117,10 +52,11 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
};
let getText;
try {
// Pass clientOptions (which includes encryptionPublicKey) along with other parameters to initializeClient
const { client } = await initializeClient({ req, res, endpointOption, ...clientOptions });
const { client } = await initializeClient({ req, res, endpointOption });
const { onProgress: progressCallback, getPartialText } = createOnProgress();
getText = client.getStreamText != null ? client.getStreamText.bind(client) : getPartialText;
const getAbortData = () => ({
@@ -138,14 +74,20 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
res.on('close', () => {
logger.debug('[AskController] Request closed');
if (!abortController) { return; }
if (abortController.signal.aborted || abortController.requestCompleted) { return; }
if (!abortController) {
return;
} else if (abortController.signal.aborted) {
return;
} else if (abortController.requestCompleted) {
return;
}
abortController.abort();
logger.debug('[AskController] Request aborted on close');
});
const messageOptions = {
user: userId,
user,
parentMessageId,
conversationId,
overrideParentMessageId,
@@ -153,14 +95,16 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
onStart,
abortController,
progressCallback,
progressOptions: { res },
progressOptions: {
res,
// parentMessageId: overrideParentMessageId || userMessageId,
},
};
// Get the response from the language model client.
/** @type {TMessage} */
let response = await client.sendMessage(text, messageOptions);
response.endpoint = endpointOption.endpoint;
// Ensure the conversation has a title.
const { conversation = {} } = await client.responsePromise;
conversation.title =
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
@@ -171,35 +115,6 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
delete userMessage.image_urls;
}
// --- Encrypt the user message if encryption is enabled ---
if (pemPublicKey && userMessage && userMessage.text) {
try {
const { ciphertext, iv, authTag, encryptedKey } = encryptText(userMessage.text, pemPublicKey);
userMessage.text = ciphertext;
userMessage.iv = iv;
userMessage.authTag = authTag;
userMessage.encryptedKey = encryptedKey;
logger.debug('[AskController] User message encrypted.');
} catch (encError) {
logger.error('[AskController] Error encrypting user message:', encError);
}
}
// --- Encrypt the AI response if encryption is enabled ---
if (pemPublicKey && response.text) {
try {
const { ciphertext, iv, authTag, encryptedKey } = encryptText(response.text, pemPublicKey);
response.text = ciphertext;
response.iv = iv;
response.authTag = authTag;
response.encryptedKey = encryptedKey;
logger.debug('[AskController] Response message encrypted.');
} catch (encError) {
logger.error('[AskController] Error encrypting response message:', encError);
}
}
// --- End Encryption Branch ---
if (!abortController.signal.aborted) {
sendMessage(res, {
final: true,
@@ -213,15 +128,15 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
if (!client.savedMessageIds.has(response.messageId)) {
await saveMessage(
req,
{ ...response, user: userId },
{ context: 'AskController - response end' },
{ ...response, user },
{ context: 'api/server/controllers/AskController.js - response end' },
);
}
}
if (!client.skipSaveUserMessage) {
await saveMessage(req, userMessage, {
context: 'AskController - save user message',
context: 'api/server/controllers/AskController.js - don\'t skip saving user message',
});
}
@@ -241,9 +156,9 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
messageId: responseMessageId,
parentMessageId: overrideParentMessageId ?? userMessageId ?? parentMessageId,
}).catch((err) => {
logger.error('[AskController] Error in handleAbortError', err);
logger.error('[AskController] Error in `handleAbortError`', err);
});
}
};
module.exports = AskController;
module.exports = AskController;

View File

@@ -1,6 +1,7 @@
const { CacheKeys } = require('librechat-data-provider');
const { loadDefaultModels, loadConfigModels } = require('~/server/services/Config');
const { getLogStores } = require('~/cache');
const { logger } = require('~/config');
/**
* @param {ServerRequest} req
@@ -36,8 +37,13 @@ async function loadModels(req) {
}
async function modelController(req, res) {
const modelConfig = await loadModels(req);
res.send(modelConfig);
try {
const modelConfig = await loadModels(req);
res.send(modelConfig);
} catch (error) {
logger.error('Error fetching models:', error);
res.status(500).send({ error: error.message });
}
}
module.exports = { modelController, loadModels, getModelsConfig };

View File

@@ -11,17 +11,19 @@ const { encryptV2 } = require('~/server/utils/crypto');
const enable2FAController = async (req, res) => {
const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, '');
try {
const userId = req.user.id;
const secret = generateTOTPSecret();
const { plainCodes, codeObjects } = await generateBackupCodes();
const encryptedSecret = await encryptV2(secret);
const user = await updateUser(userId, { totpSecret: encryptedSecret, backupCodes: codeObjects });
// Set twoFactorEnabled to false until the user confirms 2FA.
const user = await updateUser(userId, {
totpSecret: encryptedSecret,
backupCodes: codeObjects,
twoFactorEnabled: false,
});
const otpauthUrl = `otpauth://totp/${safeAppTitle}:${user.email}?secret=${secret}&issuer=${safeAppTitle}`;
res.status(200).json({
otpauthUrl,
backupCodes: plainCodes,
@@ -37,6 +39,7 @@ const verify2FAController = async (req, res) => {
const userId = req.user.id;
const { token, backupCode } = req.body;
const user = await getUserById(userId);
// Ensure that 2FA is enabled for this user.
if (!user || !user.totpSecret) {
return res.status(400).json({ message: '2FA not initiated' });
}
@@ -52,7 +55,6 @@ const verify2FAController = async (req, res) => {
return res.status(200).json();
}
}
return res.status(400).json({ message: 'Invalid token.' });
} catch (err) {
logger.error('[verify2FAController]', err);
@@ -74,6 +76,8 @@ const confirm2FAController = async (req, res) => {
const secret = await getTOTPSecret(user.totpSecret);
if (await verifyTOTP(secret, token)) {
// Upon successful verification, enable 2FA.
await updateUser(userId, { twoFactorEnabled: true });
return res.status(200).json();
}
@@ -87,7 +91,7 @@ const confirm2FAController = async (req, res) => {
const disable2FAController = async (req, res) => {
try {
const userId = req.user.id;
await updateUser(userId, { totpSecret: null, backupCodes: [] });
await updateUser(userId, { totpSecret: null, backupCodes: [], twoFactorEnabled: false });
res.status(200).json();
} catch (err) {
logger.error('[disable2FAController]', err);

View File

@@ -7,7 +7,6 @@ const {
deleteMessages,
deleteUserById,
deleteAllUserSessions,
updateUser,
} = require('~/models');
const User = require('~/models/User');
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
@@ -165,37 +164,6 @@ const resendVerificationController = async (req, res) => {
}
};
const updateUserEncryptionController = async (req, res) => {
try {
const { encryptionPublicKey, encryptedPrivateKey, encryptionSalt, encryptionIV } = req.body;
// Allow disabling encryption by passing null for all fields.
const allNull = encryptionPublicKey === null && encryptedPrivateKey === null && encryptionSalt === null && encryptionIV === null;
const allPresent = encryptionPublicKey && encryptedPrivateKey && encryptionSalt && encryptionIV;
if (!allNull && !allPresent) {
return res.status(400).json({ message: 'Missing encryption parameters.' });
}
// Update the user record with the provided encryption parameters (or null to disable)
const updatedUser = await updateUser(req.user.id, {
encryptionPublicKey: encryptionPublicKey || null,
encryptedPrivateKey: encryptedPrivateKey || null,
encryptionSalt: encryptionSalt || null,
encryptionIV: encryptionIV || null,
});
if (!updatedUser) {
return res.status(404).json({ message: 'User not found.' });
}
res.status(200).json({ success: true });
} catch (error) {
logger.error('[updateUserEncryptionController]', error);
res.status(500).json({ message: 'Something went wrong updating encryption keys.' });
}
};
module.exports = {
getUserController,
getTermsStatusController,
@@ -204,5 +172,4 @@ module.exports = {
verifyEmailController,
updateUserPluginsController,
resendVerificationController,
updateUserEncryptionController,
};

View File

@@ -10,8 +10,8 @@ const {
ChatModelStreamHandler,
} = require('@librechat/agents');
const { processCodeOutput } = require('~/server/services/Files/Code/process');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { saveBase64Image } = require('~/server/services/Files/process');
const { loadAuthValues } = require('~/app/clients/tools/util');
const { logger, sendEvent } = require('~/config');
/** @typedef {import('@librechat/agents').Graph} Graph */

View File

@@ -223,14 +223,23 @@ class AgentClient extends BaseClient {
};
}
/**
*
* @param {TMessage} message
* @param {Array<MongoFile>} attachments
* @returns {Promise<Array<Partial<MongoFile>>>}
*/
async addImageURLs(message, attachments) {
const { files, image_urls } = await encodeAndFormat(
const { files, text, image_urls } = await encodeAndFormat(
this.options.req,
attachments,
this.options.agent.provider,
VisionModes.agents,
);
message.image_urls = image_urls.length ? image_urls : undefined;
if (text && text.length) {
message.ocr = text;
}
return files;
}
@@ -308,7 +317,21 @@ class AgentClient extends BaseClient {
assistantName: this.options?.modelLabel,
});
const needsTokenCount = this.contextStrategy && !orderedMessages[i].tokenCount;
if (message.ocr && i !== orderedMessages.length - 1) {
if (typeof formattedMessage.content === 'string') {
formattedMessage.content = message.ocr + '\n' + formattedMessage.content;
} else {
const textPart = formattedMessage.content.find((part) => part.type === 'text');
textPart
? (textPart.text = message.ocr + '\n' + textPart.text)
: formattedMessage.content.unshift({ type: 'text', text: message.ocr });
}
} else if (message.ocr && i === orderedMessages.length - 1) {
systemContent = [systemContent, message.ocr].join('\n');
}
const needsTokenCount =
(this.contextStrategy && !orderedMessages[i].tokenCount) || message.ocr;
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) {
@@ -812,7 +835,10 @@ class AgentClient extends BaseClient {
'[api/server/controllers/agents/client.js #sendCompletion] Unhandled error type',
err,
);
throw err;
this.contentParts.push({
type: ContentTypes.ERROR,
[ContentTypes.ERROR]: `An error occurred while processing the request${err?.message ? `: ${err.message}` : ''}`,
});
}
}
}

View File

@@ -8,7 +8,7 @@ const loginController = async (req, res) => {
return res.status(400).json({ message: 'Invalid credentials' });
}
if (req.user.backupCodes != null && req.user.backupCodes.length > 0) {
if (req.user.twoFactorEnabled) {
const tempToken = generate2FATempToken(req.user._id);
return res.status(200).json({ twoFAPending: true, tempToken });
}

View File

@@ -1,5 +1,9 @@
const jwt = require('jsonwebtoken');
const { verifyTOTP, verifyBackupCode, getTOTPSecret } = require('~/server/services/twoFactorService');
const {
verifyTOTP,
verifyBackupCode,
getTOTPSecret,
} = require('~/server/services/twoFactorService');
const { setAuthTokens } = require('~/server/services/AuthService');
const { getUserById } = require('~/models/userMethods');
const { logger } = require('~/config');
@@ -19,12 +23,12 @@ const verify2FA = async (req, res) => {
}
const user = await getUserById(payload.userId);
// Ensure that the user exists and has backup codes (i.e. 2FA enabled)
if (!user || !(user.backupCodes && user.backupCodes.length > 0)) {
// Ensure that the user exists and has 2FA enabled
if (!user || !user.twoFactorEnabled) {
return res.status(400).json({ message: '2FA is not enabled for this user' });
}
// Use the new getTOTPSecret function to retrieve (and decrypt if necessary) the TOTP secret.
// Retrieve (and decrypt if necessary) the TOTP secret.
const secret = await getTOTPSecret(user.totpSecret);
let verified = false;
@@ -39,9 +43,7 @@ const verify2FA = async (req, res) => {
}
// Prepare user data for response.
// If the user is a plain object (from lean queries), we create a shallow copy.
const userData = user.toObject ? user.toObject() : { ...user };
// Remove sensitive fields.
delete userData.password;
delete userData.__v;
delete userData.totpSecret;

View File

@@ -10,7 +10,8 @@ const {
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
const { processCodeOutput } = require('~/server/services/Files/Code/process');
const { createToolCall, getToolCallsByConvo } = require('~/models/ToolCall');
const { loadAuthValues, loadTools } = require('~/app/clients/tools/util');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { loadTools } = require('~/app/clients/tools/util');
const { checkAccess } = require('~/server/middleware');
const { getMessage } = require('~/models/Message');
const { logger } = require('~/config');

View File

@@ -120,7 +120,7 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
{ promptTokens, completionTokens },
);
saveMessage(
await saveMessage(
req,
{ ...responseMessage, user },
{ context: 'api/server/middleware/abortMiddleware.js' },

View File

@@ -10,7 +10,6 @@ const openAI = require('~/server/services/Endpoints/openAI');
const agents = require('~/server/services/Endpoints/agents');
const custom = require('~/server/services/Endpoints/custom');
const google = require('~/server/services/Endpoints/google');
const { getConvoFiles } = require('~/models/Conversation');
const { handleError } = require('~/server/utils');
const buildFunction = {
@@ -87,16 +86,8 @@ async function buildEndpointOption(req, res, next) {
// TODO: use `getModelsConfig` only when necessary
const modelsConfig = await getModelsConfig(req);
const { resendFiles = true } = req.body.endpointOption;
req.body.endpointOption.modelsConfig = modelsConfig;
if (isAgents && resendFiles && req.body.conversationId) {
const fileIds = await getConvoFiles(req.body.conversationId);
const requestFiles = req.body.files ?? [];
if (requestFiles.length || fileIds.length) {
req.body.endpointOption.attachments = processFiles(requestFiles, fileIds);
}
} else if (req.body.files) {
// hold the promise
if (req.body.files && !isAgents) {
req.body.endpointOption.attachments = processFiles(req.body.files);
}
next();

View File

@@ -1,32 +1,18 @@
const passport = require('passport');
const DebugControl = require('../../utils/debug.js');
function log({ title, parameters }) {
DebugControl.log.functionName(title);
if (parameters) {
DebugControl.log.parameters(parameters);
}
}
const { logger } = require('~/config');
const requireLocalAuth = (req, res, next) => {
passport.authenticate('local', (err, user, info) => {
if (err) {
log({
title: '(requireLocalAuth) Error at passport.authenticate',
parameters: [{ name: 'error', value: err }],
});
logger.error('[requireLocalAuth] Error at passport.authenticate:', err);
return next(err);
}
if (!user) {
log({
title: '(requireLocalAuth) Error: No user',
});
logger.debug('[requireLocalAuth] Error: No user');
return res.status(404).send(info);
}
if (info && info.message) {
log({
title: '(requireLocalAuth) Error: ' + info.message,
});
logger.debug('[requireLocalAuth] Error: ' + info.message);
return res.status(422).send({ message: info.message });
}
req.user = user;

View File

@@ -47,10 +47,10 @@ router.get('/', async function (req, res) {
githubLoginEnabled: !!process.env.GITHUB_CLIENT_ID && !!process.env.GITHUB_CLIENT_SECRET,
googleLoginEnabled: !!process.env.GOOGLE_CLIENT_ID && !!process.env.GOOGLE_CLIENT_SECRET,
appleLoginEnabled:
!!process.env.APPLE_CLIENT_ID &&
!!process.env.APPLE_TEAM_ID &&
!!process.env.APPLE_KEY_ID &&
!!process.env.APPLE_PRIVATE_KEY_PATH,
!!process.env.APPLE_CLIENT_ID &&
!!process.env.APPLE_TEAM_ID &&
!!process.env.APPLE_KEY_ID &&
!!process.env.APPLE_PRIVATE_KEY_PATH,
openidLoginEnabled:
!!process.env.OPENID_CLIENT_ID &&
!!process.env.OPENID_CLIENT_SECRET &&
@@ -80,6 +80,7 @@ router.get('/', async function (req, res) {
publicSharedLinksEnabled,
analyticsGtmId: process.env.ANALYTICS_GTM_ID,
instanceProjectId: instanceProject._id.toString(),
bundlerURL: process.env.SANDPACK_BUNDLER_URL,
};
if (ldap) {

View File

@@ -16,7 +16,7 @@ const {
} = require('~/server/services/Files/process');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
const { loadAuthValues } = require('~/app/clients/tools/util');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { getAgent } = require('~/models/Agent');
const { getFiles } = require('~/models/File');
const { logger } = require('~/config');

View File

@@ -8,14 +8,12 @@ const {
resendVerificationController,
getTermsStatusController,
acceptTermsController,
updateUserEncryptionController,
} = require('~/server/controllers/UserController');
const router = express.Router();
router.get('/', requireJwtAuth, getUserController);
router.get('/terms', requireJwtAuth, getTermsStatusController);
router.put('/encryption', requireJwtAuth, updateUserEncryptionController);
router.post('/terms/accept', requireJwtAuth, acceptTermsController);
router.post('/plugins', requireJwtAuth, updateUserPluginsController);
router.delete('/delete', requireJwtAuth, canDeleteAccount, deleteUserController);

View File

@@ -1,4 +1,9 @@
const { FileSources, EModelEndpoint, getConfigDefaults } = require('librechat-data-provider');
const {
FileSources,
EModelEndpoint,
loadOCRConfig,
getConfigDefaults,
} = require('librechat-data-provider');
const { checkVariables, checkHealth, checkConfig, checkAzureVariables } = require('./start/checks');
const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants');
const { initializeFirebase } = require('./Files/Firebase/initialize');
@@ -25,6 +30,7 @@ const AppService = async (app) => {
const config = (await loadCustomConfig()) ?? {};
const configDefaults = getConfigDefaults();
const ocr = loadOCRConfig(config.ocr);
const filteredTools = config.filteredTools;
const includedTools = config.includedTools;
const fileStrategy = config.fileStrategy ?? configDefaults.fileStrategy;
@@ -57,6 +63,7 @@ const AppService = async (app) => {
const interfaceConfig = await loadDefaultInterface(config, configDefaults);
const defaultLocals = {
ocr,
paths,
fileStrategy,
socialLogins,

View File

@@ -120,6 +120,7 @@ describe('AppService', () => {
},
},
paths: expect.anything(),
ocr: expect.anything(),
imageOutputType: expect.any(String),
fileConfig: undefined,
secureImageLinks: undefined,
@@ -588,4 +589,33 @@ describe('AppService updating app.locals and issuing warnings', () => {
);
});
});
it('should not parse environment variable references in OCR config', async () => {
// Mock custom configuration with env variable references in OCR config
const mockConfig = {
ocr: {
apiKey: '${OCR_API_KEY_CUSTOM_VAR_NAME}',
baseURL: '${OCR_BASEURL_CUSTOM_VAR_NAME}',
strategy: 'mistral_ocr',
mistralModel: 'mistral-medium',
},
};
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve(mockConfig));
// Set actual environment variables with different values
process.env.OCR_API_KEY_CUSTOM_VAR_NAME = 'actual-api-key';
process.env.OCR_BASEURL_CUSTOM_VAR_NAME = 'https://actual-ocr-url.com';
// Initialize app
const app = { locals: {} };
await AppService(app);
// Verify that the raw string references were preserved and not interpolated
expect(app.locals.ocr).toBeDefined();
expect(app.locals.ocr.apiKey).toEqual('${OCR_API_KEY_CUSTOM_VAR_NAME}');
expect(app.locals.ocr.baseURL).toEqual('${OCR_BASEURL_CUSTOM_VAR_NAME}');
expect(app.locals.ocr.strategy).toEqual('mistral_ocr');
expect(app.locals.ocr.mistralModel).toEqual('mistral-medium');
});
});

View File

@@ -47,7 +47,7 @@ async function loadConfigModels(req) {
);
/**
* @type {Record<string, string[]>}
* @type {Record<string, Promise<string[]>>}
* Map for promises keyed by unique combination of baseURL and apiKey */
const fetchPromisesMap = {};
/**
@@ -102,7 +102,7 @@ async function loadConfigModels(req) {
for (const name of associatedNames) {
const endpoint = endpointsMap[name];
modelsConfig[name] = !modelData?.length ? endpoint.models.default ?? [] : modelData;
modelsConfig[name] = !modelData?.length ? (endpoint.models.default ?? []) : modelData;
}
}

View File

@@ -5,8 +5,8 @@ const {
getGoogleModels,
getBedrockModels,
getAnthropicModels,
getChatGPTBrowserModels,
} = require('~/server/services/ModelService');
const { logger } = require('~/config');
/**
* Loads the default models for the application.
@@ -15,31 +15,68 @@ const {
* @param {Express.Request} req - The Express request object.
*/
async function loadDefaultModels(req) {
const google = getGoogleModels();
const openAI = await getOpenAIModels({ user: req.user.id });
const anthropic = getAnthropicModels();
const chatGPTBrowser = getChatGPTBrowserModels();
const azureOpenAI = await getOpenAIModels({ user: req.user.id, azure: true });
const gptPlugins = await getOpenAIModels({
user: req.user.id,
azure: useAzurePlugins,
plugins: true,
});
const assistants = await getOpenAIModels({ assistants: true });
const azureAssistants = await getOpenAIModels({ azureAssistants: true });
try {
const [
openAI,
anthropic,
azureOpenAI,
gptPlugins,
assistants,
azureAssistants,
google,
bedrock,
] = await Promise.all([
getOpenAIModels({ user: req.user.id }).catch((error) => {
logger.error('Error fetching OpenAI models:', error);
return [];
}),
getAnthropicModels({ user: req.user.id }).catch((error) => {
logger.error('Error fetching Anthropic models:', error);
return [];
}),
getOpenAIModels({ user: req.user.id, azure: true }).catch((error) => {
logger.error('Error fetching Azure OpenAI models:', error);
return [];
}),
getOpenAIModels({ user: req.user.id, azure: useAzurePlugins, plugins: true }).catch(
(error) => {
logger.error('Error fetching Plugin models:', error);
return [];
},
),
getOpenAIModels({ assistants: true }).catch((error) => {
logger.error('Error fetching OpenAI Assistants API models:', error);
return [];
}),
getOpenAIModels({ azureAssistants: true }).catch((error) => {
logger.error('Error fetching Azure OpenAI Assistants API models:', error);
return [];
}),
Promise.resolve(getGoogleModels()).catch((error) => {
logger.error('Error getting Google models:', error);
return [];
}),
Promise.resolve(getBedrockModels()).catch((error) => {
logger.error('Error getting Bedrock models:', error);
return [];
}),
]);
return {
[EModelEndpoint.openAI]: openAI,
[EModelEndpoint.agents]: openAI,
[EModelEndpoint.google]: google,
[EModelEndpoint.anthropic]: anthropic,
[EModelEndpoint.gptPlugins]: gptPlugins,
[EModelEndpoint.azureOpenAI]: azureOpenAI,
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
[EModelEndpoint.assistants]: assistants,
[EModelEndpoint.azureAssistants]: azureAssistants,
[EModelEndpoint.bedrock]: getBedrockModels(),
};
return {
[EModelEndpoint.openAI]: openAI,
[EModelEndpoint.agents]: openAI,
[EModelEndpoint.google]: google,
[EModelEndpoint.anthropic]: anthropic,
[EModelEndpoint.gptPlugins]: gptPlugins,
[EModelEndpoint.azureOpenAI]: azureOpenAI,
[EModelEndpoint.assistants]: assistants,
[EModelEndpoint.azureAssistants]: azureAssistants,
[EModelEndpoint.bedrock]: bedrock,
};
} catch (error) {
logger.error('Error fetching default models:', error);
throw new Error(`Failed to load default models: ${error.message}`);
}
}
module.exports = loadDefaultModels;

View File

@@ -2,15 +2,8 @@ const { loadAgent } = require('~/models/Agent');
const { logger } = require('~/config');
const buildOptions = (req, endpoint, parsedBody) => {
const {
spec,
iconURL,
agent_id,
instructions,
maxContextTokens,
resendFiles = true,
...model_parameters
} = parsedBody;
const { spec, iconURL, agent_id, instructions, maxContextTokens, ...model_parameters } =
parsedBody;
const agentPromise = loadAgent({
req,
agent_id,
@@ -24,7 +17,6 @@ const buildOptions = (req, endpoint, parsedBody) => {
iconURL,
endpoint,
agent_id,
resendFiles,
instructions,
maxContextTokens,
model_parameters,

View File

@@ -2,6 +2,7 @@ const { createContentAggregator, Providers } = require('@librechat/agents');
const {
EModelEndpoint,
getResponseSender,
AgentCapabilities,
providerEndpointMap,
} = require('librechat-data-provider');
const {
@@ -15,13 +16,17 @@ const initCustom = require('~/server/services/Endpoints/custom/initialize');
const initGoogle = require('~/server/services/Endpoints/google/initialize');
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
const { getCustomEndpointConfig } = require('~/server/services/Config');
const { processFiles } = require('~/server/services/Files/process');
const { loadAgentTools } = require('~/server/services/ToolService');
const AgentClient = require('~/server/controllers/agents/client');
const { getToolFiles } = require('~/models/Conversation');
const { getModelMaxTokens } = require('~/utils');
const { getAgent } = require('~/models/Agent');
const { getFiles } = require('~/models/File');
const { logger } = require('~/config');
const providerConfigMap = {
[Providers.XAI]: initCustom,
[Providers.OLLAMA]: initCustom,
[Providers.DEEPSEEK]: initCustom,
[Providers.OPENROUTER]: initCustom,
@@ -33,20 +38,38 @@ const providerConfigMap = {
};
/**
*
* @param {ServerRequest} req
* @param {Promise<Array<MongoFile | null>> | undefined} _attachments
* @param {AgentToolResources | undefined} _tool_resources
* @returns {Promise<{ attachments: Array<MongoFile | undefined> | undefined, tool_resources: AgentToolResources | undefined }>}
*/
const primeResources = async (_attachments, _tool_resources) => {
const primeResources = async (req, _attachments, _tool_resources) => {
try {
/** @type {Array<MongoFile | undefined> | undefined} */
let attachments;
const tool_resources = _tool_resources ?? {};
const isOCREnabled = (req.app.locals?.[EModelEndpoint.agents]?.capabilities ?? []).includes(
AgentCapabilities.ocr,
);
if (tool_resources.ocr?.file_ids && isOCREnabled) {
const context = await getFiles(
{
file_id: { $in: tool_resources.ocr.file_ids },
},
{},
{},
);
attachments = (attachments ?? []).concat(context);
}
if (!_attachments) {
return { attachments: undefined, tool_resources: _tool_resources };
return { attachments, tool_resources };
}
/** @type {Array<MongoFile | undefined> | undefined} */
const files = await _attachments;
const attachments = [];
const tool_resources = _tool_resources ?? {};
if (!attachments) {
/** @type {Array<MongoFile | undefined>} */
attachments = [];
}
for (const file of files) {
if (!file) {
@@ -81,7 +104,6 @@ const primeResources = async (_attachments, _tool_resources) => {
* @param {ServerResponse} params.res
* @param {Agent} params.agent
* @param {object} [params.endpointOption]
* @param {AgentToolResources} [params.tool_resources]
* @param {boolean} [params.isInitialAgent]
* @returns {Promise<Agent>}
*/
@@ -90,9 +112,28 @@ const initializeAgentOptions = async ({
res,
agent,
endpointOption,
tool_resources,
isInitialAgent = false,
}) => {
let currentFiles;
const requestFiles = req.body.files ?? [];
if (
isInitialAgent &&
req.body.conversationId != null &&
agent.model_parameters?.resendFiles === true
) {
const fileIds = (await getToolFiles(req.body.conversationId)).map((f) => f.file_id);
if (requestFiles.length || fileIds.length) {
currentFiles = await processFiles(requestFiles, fileIds);
}
} else if (isInitialAgent && requestFiles.length) {
currentFiles = await processFiles(requestFiles);
}
const { attachments, tool_resources } = await primeResources(
req,
currentFiles,
agent.tool_resources,
);
const { tools, toolContextMap } = await loadAgentTools({
req,
res,
@@ -159,6 +200,7 @@ const initializeAgentOptions = async ({
return {
...agent,
tools,
attachments,
toolContextMap,
maxContextTokens:
agent.max_context_tokens ??
@@ -196,11 +238,6 @@ const initializeClient = async ({ req, res, endpointOption }) => {
throw new Error('Agent not found');
}
const { attachments, tool_resources } = await primeResources(
endpointOption.attachments,
primaryAgent.tool_resources,
);
const agentConfigs = new Map();
// Handle primary agent
@@ -209,7 +246,6 @@ const initializeClient = async ({ req, res, endpointOption }) => {
res,
agent: primaryAgent,
endpointOption,
tool_resources,
isInitialAgent: true,
});
@@ -239,18 +275,19 @@ const initializeClient = async ({ req, res, endpointOption }) => {
const client = new AgentClient({
req,
agent: primaryConfig,
sender,
attachments,
contentParts,
agentConfigs,
eventHandlers,
collectedUsage,
artifactPromises,
agent: primaryConfig,
spec: endpointOption.spec,
iconURL: endpointOption.iconURL,
agentConfigs,
endpoint: EModelEndpoint.agents,
attachments: primaryConfig.attachments,
maxContextTokens: primaryConfig.maxContextTokens,
resendFiles: primaryConfig.model_parameters?.resendFiles ?? true,
});
return { client };

View File

@@ -141,6 +141,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
},
clientOptions,
);
clientOptions.modelOptions.user = req.user.id;
const options = getLLMConfig(apiKey, clientOptions, endpoint);
if (!customOptions.streamRate) {
return options;

View File

@@ -141,6 +141,7 @@ const initializeClient = async ({
},
clientOptions,
);
clientOptions.modelOptions.user = req.user.id;
const options = getLLMConfig(apiKey, clientOptions);
if (!clientOptions.streamRate) {
return options;

View File

@@ -9,6 +9,7 @@ const { isEnabled } = require('~/server/utils');
* @param {Object} options - Additional options for configuring the LLM.
* @param {Object} [options.modelOptions] - Model-specific options.
* @param {string} [options.modelOptions.model] - The name of the model to use.
* @param {string} [options.modelOptions.user] - The user ID
* @param {number} [options.modelOptions.temperature] - Controls randomness in output generation (0-2).
* @param {number} [options.modelOptions.top_p] - Controls diversity via nucleus sampling (0-1).
* @param {number} [options.modelOptions.frequency_penalty] - Reduces repetition of token sequences (-2 to 2).

View File

@@ -1,9 +1,10 @@
// Code Files
const axios = require('axios');
const FormData = require('form-data');
const { getCodeBaseURL } = require('@librechat/agents');
const { createAxiosInstance } = require('~/config');
const { logAxiosError } = require('~/utils');
const axios = createAxiosInstance();
const MAX_FILE_SIZE = 150 * 1024 * 1024;
/**
@@ -16,7 +17,8 @@ const MAX_FILE_SIZE = 150 * 1024 * 1024;
async function getCodeOutputDownloadStream(fileIdentifier, apiKey) {
try {
const baseURL = getCodeBaseURL();
const response = await axios({
/** @type {import('axios').AxiosRequestConfig} */
const options = {
method: 'get',
url: `${baseURL}/download/${fileIdentifier}`,
responseType: 'stream',
@@ -25,10 +27,15 @@ async function getCodeOutputDownloadStream(fileIdentifier, apiKey) {
'X-API-Key': apiKey,
},
timeout: 15000,
});
};
const response = await axios(options);
return response;
} catch (error) {
logAxiosError({
message: `Error downloading code environment file stream: ${error.message}`,
error,
});
throw new Error(`Error downloading file: ${error.message}`);
}
}
@@ -54,7 +61,8 @@ async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = ''
form.append('file', stream, filename);
const baseURL = getCodeBaseURL();
const response = await axios.post(`${baseURL}/upload`, form, {
/** @type {import('axios').AxiosRequestConfig} */
const options = {
headers: {
...form.getHeaders(),
'Content-Type': 'multipart/form-data',
@@ -64,7 +72,9 @@ async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = ''
},
maxContentLength: MAX_FILE_SIZE,
maxBodyLength: MAX_FILE_SIZE,
});
};
const response = await axios.post(`${baseURL}/upload`, form, options);
/** @type {{ message: string; session_id: string; files: Array<{ fileId: string; filename: string }> }} */
const result = response.data;

View File

@@ -0,0 +1,207 @@
// ~/server/services/Files/MistralOCR/crud.js
const fs = require('fs');
const path = require('path');
const FormData = require('form-data');
const { FileSources, envVarRegex, extractEnvVariable } = require('librechat-data-provider');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { logger, createAxiosInstance } = require('~/config');
const { logAxiosError } = require('~/utils');
const axios = createAxiosInstance();
/**
* Uploads a document to Mistral API using file streaming to avoid loading the entire file into memory
*
* @param {Object} params Upload parameters
* @param {string} params.filePath The path to the file on disk
* @param {string} [params.fileName] Optional filename to use (defaults to the name from filePath)
* @param {string} params.apiKey Mistral API key
* @param {string} [params.baseURL=https://api.mistral.ai/v1] Mistral API base URL
* @returns {Promise<Object>} The response from Mistral API
*/
async function uploadDocumentToMistral({
filePath,
fileName = '',
apiKey,
baseURL = 'https://api.mistral.ai/v1',
}) {
const form = new FormData();
form.append('purpose', 'ocr');
const actualFileName = fileName || path.basename(filePath);
const fileStream = fs.createReadStream(filePath);
form.append('file', fileStream, { filename: actualFileName });
return axios
.post(`${baseURL}/files`, form, {
headers: {
Authorization: `Bearer ${apiKey}`,
...form.getHeaders(),
},
maxBodyLength: Infinity,
maxContentLength: Infinity,
})
.then((res) => res.data)
.catch((error) => {
logger.error('Error uploading document to Mistral:', error.message);
throw error;
});
}
async function getSignedUrl({
apiKey,
fileId,
expiry = 24,
baseURL = 'https://api.mistral.ai/v1',
}) {
return axios
.get(`${baseURL}/files/${fileId}/url?expiry=${expiry}`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
})
.then((res) => res.data)
.catch((error) => {
logger.error('Error fetching signed URL:', error.message);
throw error;
});
}
/**
* @param {Object} params
* @param {string} params.apiKey
* @param {string} params.documentUrl
* @param {string} [params.baseURL]
* @returns {Promise<OCRResult>}
*/
async function performOCR({
apiKey,
documentUrl,
model = 'mistral-ocr-latest',
baseURL = 'https://api.mistral.ai/v1',
}) {
return axios
.post(
`${baseURL}/ocr`,
{
model,
include_image_base64: false,
document: {
type: 'document_url',
document_url: documentUrl,
},
},
{
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
},
)
.then((res) => res.data)
.catch((error) => {
logger.error('Error performing OCR:', error.message);
throw error;
});
}
function extractVariableName(str) {
const match = str.match(envVarRegex);
return match ? match[1] : null;
}
const uploadMistralOCR = async ({ req, file, file_id, entity_id }) => {
try {
/** @type {TCustomConfig['ocr']} */
const ocrConfig = req.app.locals?.ocr;
const apiKeyConfig = ocrConfig.apiKey || '';
const baseURLConfig = ocrConfig.baseURL || '';
const isApiKeyEnvVar = envVarRegex.test(apiKeyConfig);
const isBaseURLEnvVar = envVarRegex.test(baseURLConfig);
const isApiKeyEmpty = !apiKeyConfig.trim();
const isBaseURLEmpty = !baseURLConfig.trim();
let apiKey, baseURL;
if (isApiKeyEnvVar || isBaseURLEnvVar || isApiKeyEmpty || isBaseURLEmpty) {
const apiKeyVarName = isApiKeyEnvVar ? extractVariableName(apiKeyConfig) : 'OCR_API_KEY';
const baseURLVarName = isBaseURLEnvVar ? extractVariableName(baseURLConfig) : 'OCR_BASEURL';
const authValues = await loadAuthValues({
userId: req.user.id,
authFields: [baseURLVarName, apiKeyVarName],
optional: new Set([baseURLVarName]),
});
apiKey = authValues[apiKeyVarName];
baseURL = authValues[baseURLVarName];
} else {
apiKey = apiKeyConfig;
baseURL = baseURLConfig;
}
const mistralFile = await uploadDocumentToMistral({
filePath: file.path,
fileName: file.originalname,
apiKey,
baseURL,
});
const modelConfig = ocrConfig.mistralModel || '';
const model = envVarRegex.test(modelConfig)
? extractEnvVariable(modelConfig)
: modelConfig.trim() || 'mistral-ocr-latest';
const signedUrlResponse = await getSignedUrl({
apiKey,
baseURL,
fileId: mistralFile.id,
});
const ocrResult = await performOCR({
apiKey,
baseURL,
model,
documentUrl: signedUrlResponse.url,
});
let aggregatedText = '';
const images = [];
ocrResult.pages.forEach((page, index) => {
if (ocrResult.pages.length > 1) {
aggregatedText += `# PAGE ${index + 1}\n`;
}
aggregatedText += page.markdown + '\n\n';
if (page.images && page.images.length > 0) {
page.images.forEach((image) => {
if (image.image_base64) {
images.push(image.image_base64);
}
});
}
});
return {
filename: file.originalname,
bytes: aggregatedText.length * 4,
filepath: FileSources.mistral_ocr,
text: aggregatedText,
images,
};
} catch (error) {
const message = 'Error uploading document to Mistral OCR API';
logAxiosError({ error, message });
throw new Error(message);
}
};
module.exports = {
uploadDocumentToMistral,
uploadMistralOCR,
getSignedUrl,
performOCR,
};

View File

@@ -0,0 +1,737 @@
const fs = require('fs');
const mockAxios = {
interceptors: {
request: { use: jest.fn(), eject: jest.fn() },
response: { use: jest.fn(), eject: jest.fn() },
},
create: jest.fn().mockReturnValue({
defaults: {
proxy: null,
},
get: jest.fn().mockResolvedValue({ data: {} }),
post: jest.fn().mockResolvedValue({ data: {} }),
put: jest.fn().mockResolvedValue({ data: {} }),
delete: jest.fn().mockResolvedValue({ data: {} }),
}),
get: jest.fn().mockResolvedValue({ data: {} }),
post: jest.fn().mockResolvedValue({ data: {} }),
put: jest.fn().mockResolvedValue({ data: {} }),
delete: jest.fn().mockResolvedValue({ data: {} }),
reset: jest.fn().mockImplementation(function () {
this.get.mockClear();
this.post.mockClear();
this.put.mockClear();
this.delete.mockClear();
this.create.mockClear();
}),
};
jest.mock('axios', () => mockAxios);
jest.mock('fs');
jest.mock('~/utils', () => ({
logAxiosError: jest.fn(),
}));
jest.mock('~/config', () => ({
logger: {
error: jest.fn(),
},
createAxiosInstance: () => mockAxios,
}));
jest.mock('~/server/services/Tools/credentials', () => ({
loadAuthValues: jest.fn(),
}));
const { uploadDocumentToMistral, uploadMistralOCR, getSignedUrl, performOCR } = require('./crud');
describe('MistralOCR Service', () => {
afterEach(() => {
mockAxios.reset();
jest.clearAllMocks();
});
describe('uploadDocumentToMistral', () => {
beforeEach(() => {
// Create a more complete mock for file streams that FormData can work with
const mockReadStream = {
on: jest.fn().mockImplementation(function (event, handler) {
// Simulate immediate 'end' event to make FormData complete processing
if (event === 'end') {
handler();
}
return this;
}),
pipe: jest.fn().mockImplementation(function () {
return this;
}),
pause: jest.fn(),
resume: jest.fn(),
emit: jest.fn(),
once: jest.fn(),
destroy: jest.fn(),
};
fs.createReadStream = jest.fn().mockReturnValue(mockReadStream);
// Mock FormData's append to avoid actual stream processing
jest.mock('form-data', () => {
const mockFormData = function () {
return {
append: jest.fn(),
getHeaders: jest
.fn()
.mockReturnValue({ 'content-type': 'multipart/form-data; boundary=---boundary' }),
getBuffer: jest.fn().mockReturnValue(Buffer.from('mock-form-data')),
getLength: jest.fn().mockReturnValue(100),
};
};
return mockFormData;
});
});
it('should upload a document to Mistral API using file streaming', async () => {
const mockResponse = { data: { id: 'file-123', purpose: 'ocr' } };
mockAxios.post.mockResolvedValueOnce(mockResponse);
const result = await uploadDocumentToMistral({
filePath: '/path/to/test.pdf',
fileName: 'test.pdf',
apiKey: 'test-api-key',
});
// Check that createReadStream was called with the correct file path
expect(fs.createReadStream).toHaveBeenCalledWith('/path/to/test.pdf');
// Since we're mocking FormData, we'll just check that axios was called correctly
expect(mockAxios.post).toHaveBeenCalledWith(
'https://api.mistral.ai/v1/files',
expect.anything(),
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'Bearer test-api-key',
}),
maxBodyLength: Infinity,
maxContentLength: Infinity,
}),
);
expect(result).toEqual(mockResponse.data);
});
it('should handle errors during document upload', async () => {
const errorMessage = 'API error';
mockAxios.post.mockRejectedValueOnce(new Error(errorMessage));
await expect(
uploadDocumentToMistral({
filePath: '/path/to/test.pdf',
fileName: 'test.pdf',
apiKey: 'test-api-key',
}),
).rejects.toThrow();
const { logger } = require('~/config');
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining('Error uploading document to Mistral:'),
expect.any(String),
);
});
});
describe('getSignedUrl', () => {
it('should fetch signed URL from Mistral API', async () => {
const mockResponse = { data: { url: 'https://document-url.com' } };
mockAxios.get.mockResolvedValueOnce(mockResponse);
const result = await getSignedUrl({
fileId: 'file-123',
apiKey: 'test-api-key',
});
expect(mockAxios.get).toHaveBeenCalledWith(
'https://api.mistral.ai/v1/files/file-123/url?expiry=24',
{
headers: {
Authorization: 'Bearer test-api-key',
},
},
);
expect(result).toEqual(mockResponse.data);
});
it('should handle errors when fetching signed URL', async () => {
const errorMessage = 'API error';
mockAxios.get.mockRejectedValueOnce(new Error(errorMessage));
await expect(
getSignedUrl({
fileId: 'file-123',
apiKey: 'test-api-key',
}),
).rejects.toThrow();
const { logger } = require('~/config');
expect(logger.error).toHaveBeenCalledWith('Error fetching signed URL:', errorMessage);
});
});
describe('performOCR', () => {
it('should perform OCR using Mistral API', async () => {
const mockResponse = {
data: {
pages: [{ markdown: 'Page 1 content' }, { markdown: 'Page 2 content' }],
},
};
mockAxios.post.mockResolvedValueOnce(mockResponse);
const result = await performOCR({
apiKey: 'test-api-key',
documentUrl: 'https://document-url.com',
model: 'mistral-ocr-latest',
});
expect(mockAxios.post).toHaveBeenCalledWith(
'https://api.mistral.ai/v1/ocr',
{
model: 'mistral-ocr-latest',
include_image_base64: false,
document: {
type: 'document_url',
document_url: 'https://document-url.com',
},
},
{
headers: {
'Content-Type': 'application/json',
Authorization: 'Bearer test-api-key',
},
},
);
expect(result).toEqual(mockResponse.data);
});
it('should handle errors during OCR processing', async () => {
const errorMessage = 'OCR processing error';
mockAxios.post.mockRejectedValueOnce(new Error(errorMessage));
await expect(
performOCR({
apiKey: 'test-api-key',
documentUrl: 'https://document-url.com',
}),
).rejects.toThrow();
const { logger } = require('~/config');
expect(logger.error).toHaveBeenCalledWith('Error performing OCR:', errorMessage);
});
});
describe('uploadMistralOCR', () => {
beforeEach(() => {
const mockReadStream = {
on: jest.fn().mockImplementation(function (event, handler) {
if (event === 'end') {
handler();
}
return this;
}),
pipe: jest.fn().mockImplementation(function () {
return this;
}),
pause: jest.fn(),
resume: jest.fn(),
emit: jest.fn(),
once: jest.fn(),
destroy: jest.fn(),
};
fs.createReadStream = jest.fn().mockReturnValue(mockReadStream);
});
it('should process OCR for a file with standard configuration', async () => {
// Setup mocks
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'test-api-key',
OCR_BASEURL: 'https://api.mistral.ai/v1',
});
// Mock file upload response
mockAxios.post.mockResolvedValueOnce({
data: { id: 'file-123', purpose: 'ocr' },
});
// Mock signed URL response
mockAxios.get.mockResolvedValueOnce({
data: { url: 'https://signed-url.com' },
});
// Mock OCR response with text and images
mockAxios.post.mockResolvedValueOnce({
data: {
pages: [
{
markdown: 'Page 1 content',
images: [{ image_base64: 'base64image1' }],
},
{
markdown: 'Page 2 content',
images: [{ image_base64: 'base64image2' }],
},
],
},
});
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Use environment variable syntax to ensure loadAuthValues is called
apiKey: '${OCR_API_KEY}',
baseURL: '${OCR_BASEURL}',
mistralModel: 'mistral-medium',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['OCR_BASEURL', 'OCR_API_KEY'],
optional: expect.any(Set),
});
// Verify OCR result
expect(result).toEqual({
filename: 'document.pdf',
bytes: expect.any(Number),
filepath: 'mistral_ocr',
text: expect.stringContaining('# PAGE 1'),
images: ['base64image1', 'base64image2'],
});
});
it('should process variable references in configuration', async () => {
// Setup mocks with environment variables
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
CUSTOM_API_KEY: 'custom-api-key',
CUSTOM_BASEURL: 'https://custom-api.mistral.ai/v1',
});
// Mock API responses
mockAxios.post.mockResolvedValueOnce({
data: { id: 'file-123', purpose: 'ocr' },
});
mockAxios.get.mockResolvedValueOnce({
data: { url: 'https://signed-url.com' },
});
mockAxios.post.mockResolvedValueOnce({
data: {
pages: [{ markdown: 'Content from custom API' }],
},
});
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
apiKey: '${CUSTOM_API_KEY}',
baseURL: '${CUSTOM_BASEURL}',
mistralModel: '${CUSTOM_MODEL}',
},
},
},
};
// Set environment variable for model
process.env.CUSTOM_MODEL = 'mistral-large';
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify that custom environment variables were extracted and used
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['CUSTOM_BASEURL', 'CUSTOM_API_KEY'],
optional: expect.any(Set),
});
// Check that mistral-large was used in the OCR API call
expect(mockAxios.post).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
model: 'mistral-large',
}),
expect.anything(),
);
expect(result.text).toEqual('Content from custom API\n\n');
});
it('should fall back to default values when variables are not properly formatted', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'default-api-key',
OCR_BASEURL: undefined, // Testing optional parameter
});
mockAxios.post.mockResolvedValueOnce({
data: { id: 'file-123', purpose: 'ocr' },
});
mockAxios.get.mockResolvedValueOnce({
data: { url: 'https://signed-url.com' },
});
mockAxios.post.mockResolvedValueOnce({
data: {
pages: [{ markdown: 'Default API result' }],
},
});
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Use environment variable syntax to ensure loadAuthValues is called
apiKey: '${INVALID_FORMAT}', // Using valid env var format but with an invalid name
baseURL: '${OCR_BASEURL}', // Using valid env var format
mistralModel: 'mistral-ocr-latest', // Plain string value
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
};
await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Should use the default values
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['OCR_BASEURL', 'INVALID_FORMAT'],
optional: expect.any(Set),
});
// Should use the default model when not using environment variable format
expect(mockAxios.post).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
model: 'mistral-ocr-latest',
}),
expect.anything(),
);
});
it('should handle API errors during OCR process', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'test-api-key',
});
// Mock file upload to fail
mockAxios.post.mockRejectedValueOnce(new Error('Upload failed'));
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
apiKey: 'OCR_API_KEY',
baseURL: 'OCR_BASEURL',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
};
await expect(
uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
}),
).rejects.toThrow('Error uploading document to Mistral OCR API');
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
const { logAxiosError } = require('~/utils');
expect(logAxiosError).toHaveBeenCalled();
});
it('should handle single page documents without page numbering', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'test-api-key',
OCR_BASEURL: 'https://api.mistral.ai/v1', // Make sure this is included
});
// Clear all previous mocks
mockAxios.post.mockClear();
mockAxios.get.mockClear();
// 1. First mock: File upload response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
);
// 2. Second mock: Signed URL response
mockAxios.get.mockImplementationOnce(() =>
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
);
// 3. Third mock: OCR response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({
data: {
pages: [{ markdown: 'Single page content' }],
},
}),
);
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
apiKey: 'OCR_API_KEY',
baseURL: 'OCR_BASEURL',
mistralModel: 'mistral-ocr-latest',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'single-page.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify that single page documents don't include page numbering
expect(result.text).not.toContain('# PAGE');
expect(result.text).toEqual('Single page content\n\n');
});
it('should use literal values in configuration when provided directly', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
// We'll still mock this but it should not be used for literal values
loadAuthValues.mockResolvedValue({});
// Clear all previous mocks
mockAxios.post.mockClear();
mockAxios.get.mockClear();
// 1. First mock: File upload response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
);
// 2. Second mock: Signed URL response
mockAxios.get.mockImplementationOnce(() =>
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
);
// 3. Third mock: OCR response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({
data: {
pages: [{ markdown: 'Processed with literal config values' }],
},
}),
);
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Direct values that should be used as-is, without variable substitution
apiKey: 'actual-api-key-value',
baseURL: 'https://direct-api-url.mistral.ai/v1',
mistralModel: 'mistral-direct-model',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'direct-values.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify the correct URL was used with the direct baseURL value
expect(mockAxios.post).toHaveBeenCalledWith(
'https://direct-api-url.mistral.ai/v1/files',
expect.any(Object),
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'Bearer actual-api-key-value',
}),
}),
);
// Check the OCR call was made with the direct model value
expect(mockAxios.post).toHaveBeenCalledWith(
'https://direct-api-url.mistral.ai/v1/ocr',
expect.objectContaining({
model: 'mistral-direct-model',
}),
expect.any(Object),
);
// Verify the result
expect(result.text).toEqual('Processed with literal config values\n\n');
// Verify loadAuthValues was never called since we used direct values
expect(loadAuthValues).not.toHaveBeenCalled();
});
it('should handle empty configuration values and use defaults', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
// Set up the mock values to be returned by loadAuthValues
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'default-from-env-key',
OCR_BASEURL: 'https://default-from-env.mistral.ai/v1',
});
// Clear all previous mocks
mockAxios.post.mockClear();
mockAxios.get.mockClear();
// 1. First mock: File upload response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
);
// 2. Second mock: Signed URL response
mockAxios.get.mockImplementationOnce(() =>
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
);
// 3. Third mock: OCR response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({
data: {
pages: [{ markdown: 'Content from default configuration' }],
},
}),
);
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Empty string values - should fall back to defaults
apiKey: '',
baseURL: '',
mistralModel: '',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'empty-config.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify loadAuthValues was called with the default variable names
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['OCR_BASEURL', 'OCR_API_KEY'],
optional: expect.any(Set),
});
// Verify the API calls used the default values from loadAuthValues
expect(mockAxios.post).toHaveBeenCalledWith(
'https://default-from-env.mistral.ai/v1/files',
expect.any(Object),
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'Bearer default-from-env-key',
}),
}),
);
// Verify the OCR model defaulted to mistral-ocr-latest
expect(mockAxios.post).toHaveBeenCalledWith(
'https://default-from-env.mistral.ai/v1/ocr',
expect.objectContaining({
model: 'mistral-ocr-latest',
}),
expect.any(Object),
);
// Check result
expect(result.text).toEqual('Content from default configuration\n\n');
});
});
});

View File

@@ -0,0 +1,5 @@
const crud = require('./crud');
module.exports = {
...crud,
};

View File

@@ -49,6 +49,7 @@ async function encodeAndFormat(req, files, endpoint, mode) {
const promises = [];
const encodingMethods = {};
const result = {
text: '',
files: [],
image_urls: [],
};
@@ -59,6 +60,9 @@ async function encodeAndFormat(req, files, endpoint, mode) {
for (let file of files) {
const source = file.source ?? FileSources.local;
if (source === FileSources.text && file.text) {
result.text += `${!result.text ? 'Attached document(s):\n```md' : '\n\n---\n\n'}# "${file.filename}"\n${file.text}\n`;
}
if (!file.height) {
promises.push([file, null]);
@@ -85,6 +89,10 @@ async function encodeAndFormat(req, files, endpoint, mode) {
promises.push(preparePayload(req, file));
}
if (result.text) {
result.text += '\n```';
}
const detail = req.body.imageDetail ?? ImageDetail.auto;
/** @type {Array<[MongoFile, string]>} */

View File

@@ -28,8 +28,8 @@ const { addResourceFileId, deleteResourceFileId } = require('~/server/controller
const { addAgentResourceFile, removeAgentResourceFiles } = require('~/models/Agent');
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
const { createFile, updateFileUsage, deleteFiles } = require('~/models/File');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { getEndpointsConfig } = require('~/server/services/Config');
const { loadAuthValues } = require('~/app/clients/tools/util');
const { LB_QueueAsyncCall } = require('~/server/utils/queue');
const { getStrategyFunctions } = require('./strategies');
const { determineFileType } = require('~/server/utils');
@@ -162,7 +162,6 @@ const processDeleteRequest = async ({ req, files }) => {
for (const file of files) {
const source = file.source ?? FileSources.local;
if (req.body.agent_id && req.body.tool_resource) {
agentFiles.push({
tool_resource: req.body.tool_resource,
@@ -170,6 +169,11 @@ const processDeleteRequest = async ({ req, files }) => {
});
}
if (source === FileSources.text) {
resolvedFileIds.push(file.file_id);
continue;
}
if (checkOpenAIStorage(source) && !client[source]) {
await initializeClients();
}
@@ -521,6 +525,52 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
if (!isFileSearchEnabled) {
throw new Error('File search is not enabled for Agents');
}
} else if (tool_resource === EToolResources.ocr) {
const isOCREnabled = await checkCapability(req, AgentCapabilities.ocr);
if (!isOCREnabled) {
throw new Error('OCR capability is not enabled for Agents');
}
const { handleFileUpload } = getStrategyFunctions(
req.app.locals?.ocr?.strategy ?? FileSources.mistral_ocr,
);
const { file_id, temp_file_id } = metadata;
const {
text,
bytes,
// TODO: OCR images support?
images,
filename,
filepath: ocrFileURL,
} = await handleFileUpload({ req, file, file_id, entity_id: agent_id });
const fileInfo = removeNullishValues({
text,
bytes,
file_id,
temp_file_id,
user: req.user.id,
type: file.mimetype,
filepath: ocrFileURL,
source: FileSources.text,
filename: filename ?? file.originalname,
model: messageAttachment ? undefined : req.body.model,
context: messageAttachment ? FileContext.message_attachment : FileContext.agents,
});
if (!messageAttachment && tool_resource) {
await addAgentResourceFile({
req,
file_id,
agent_id,
tool_resource,
});
}
const result = await createFile(fileInfo, true);
return res
.status(200)
.json({ message: 'Agent file uploaded and processed successfully', ...result });
}
const source =

View File

@@ -24,6 +24,7 @@ const {
const { uploadOpenAIFile, deleteOpenAIFile, getOpenAIFileStream } = require('./OpenAI');
const { getCodeOutputDownloadStream, uploadCodeEnvFile } = require('./Code');
const { uploadVectors, deleteVectors } = require('./VectorDB');
const { uploadMistralOCR } = require('./MistralOCR');
/**
* Firebase Storage Strategy Functions
@@ -127,6 +128,26 @@ const codeOutputStrategy = () => ({
getDownloadStream: getCodeOutputDownloadStream,
});
const mistralOCRStrategy = () => ({
/** @type {typeof saveFileFromURL | null} */
saveURL: null,
/** @type {typeof getLocalFileURL | null} */
getFileURL: null,
/** @type {typeof saveLocalBuffer | null} */
saveBuffer: null,
/** @type {typeof processLocalAvatar | null} */
processAvatar: null,
/** @type {typeof uploadLocalImage | null} */
handleImageUpload: null,
/** @type {typeof prepareImagesLocal | null} */
prepareImagePayload: null,
/** @type {typeof deleteLocalFile | null} */
deleteFile: null,
/** @type {typeof getLocalFileStream | null} */
getDownloadStream: null,
handleFileUpload: uploadMistralOCR,
});
// Strategy Selector
const getStrategyFunctions = (fileSource) => {
if (fileSource === FileSources.firebase) {
@@ -141,6 +162,8 @@ const getStrategyFunctions = (fileSource) => {
return vectorStrategy();
} else if (fileSource === FileSources.execute_code) {
return codeOutputStrategy();
} else if (fileSource === FileSources.mistral_ocr) {
return mistralOCRStrategy();
} else {
throw new Error('Invalid file source');
}

View File

@@ -4,7 +4,9 @@ const { HttpsProxyAgent } = require('https-proxy-agent');
const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider');
const { inputSchema, logAxiosError, extractBaseURL, processModelData } = require('~/utils');
const { OllamaClient } = require('~/app/clients/OllamaClient');
const { isUserProvided } = require('~/server/utils');
const getLogStores = require('~/cache/getLogStores');
const { logger } = require('~/config');
/**
* Splits a string by commas and trims each resulting value.
@@ -42,7 +44,7 @@ const fetchModels = async ({
user,
apiKey,
baseURL,
name = 'OpenAI',
name = EModelEndpoint.openAI,
azure = false,
userIdQuery = false,
createTokenConfig = true,
@@ -64,12 +66,19 @@ const fetchModels = async ({
try {
const options = {
headers: {
Authorization: `Bearer ${apiKey}`,
},
headers: {},
timeout: 5000,
};
if (name === EModelEndpoint.anthropic) {
options.headers = {
'x-api-key': apiKey,
'anthropic-version': process.env.ANTHROPIC_VERSION || '2023-06-01',
};
} else {
options.headers.Authorization = `Bearer ${apiKey}`;
}
if (process.env.PROXY) {
options.httpsAgent = new HttpsProxyAgent(process.env.PROXY);
}
@@ -148,7 +157,7 @@ const fetchOpenAIModels = async (opts, _models = []) => {
baseURL,
azure: opts.azure,
user: opts.user,
name: baseURL,
name: EModelEndpoint.openAI,
});
}
@@ -157,7 +166,7 @@ const fetchOpenAIModels = async (opts, _models = []) => {
}
if (baseURL === openaiBaseURL) {
const regex = /(text-davinci-003|gpt-|o\d+-)/;
const regex = /(text-davinci-003|gpt-|o\d+)/;
const excludeRegex = /audio|realtime/;
models = models.filter((model) => regex.test(model) && !excludeRegex.test(model));
const instructModels = models.filter((model) => model.includes('instruct'));
@@ -231,13 +240,71 @@ const getChatGPTBrowserModels = () => {
return models;
};
const getAnthropicModels = () => {
/**
* Fetches models from the Anthropic API.
* @async
* @function
* @param {object} opts - The options for fetching the models.
* @param {string} opts.user - The user ID to send to the API.
* @param {string[]} [_models=[]] - The models to use as a fallback.
*/
const fetchAnthropicModels = async (opts, _models = []) => {
let models = _models.slice() ?? [];
let apiKey = process.env.ANTHROPIC_API_KEY;
const anthropicBaseURL = 'https://api.anthropic.com/v1';
let baseURL = anthropicBaseURL;
let reverseProxyUrl = process.env.ANTHROPIC_REVERSE_PROXY;
if (reverseProxyUrl) {
baseURL = extractBaseURL(reverseProxyUrl);
}
if (!apiKey) {
return models;
}
const modelsCache = getLogStores(CacheKeys.MODEL_QUERIES);
const cachedModels = await modelsCache.get(baseURL);
if (cachedModels) {
return cachedModels;
}
if (baseURL) {
models = await fetchModels({
apiKey,
baseURL,
user: opts.user,
name: EModelEndpoint.anthropic,
tokenKey: EModelEndpoint.anthropic,
});
}
if (models.length === 0) {
return _models;
}
await modelsCache.set(baseURL, models);
return models;
};
const getAnthropicModels = async (opts = {}) => {
let models = defaultModels[EModelEndpoint.anthropic];
if (process.env.ANTHROPIC_MODELS) {
models = splitAndTrim(process.env.ANTHROPIC_MODELS);
return models;
}
return models;
if (isUserProvided(process.env.ANTHROPIC_API_KEY)) {
return models;
}
try {
return await fetchAnthropicModels(opts, models);
} catch (error) {
logger.error('Error fetching Anthropic models:', error);
return models;
}
};
const getGoogleModels = () => {

View File

@@ -352,15 +352,15 @@ describe('splitAndTrim', () => {
});
describe('getAnthropicModels', () => {
it('returns default models when ANTHROPIC_MODELS is not set', () => {
it('returns default models when ANTHROPIC_MODELS is not set', async () => {
delete process.env.ANTHROPIC_MODELS;
const models = getAnthropicModels();
const models = await getAnthropicModels();
expect(models).toEqual(defaultModels[EModelEndpoint.anthropic]);
});
it('returns models from ANTHROPIC_MODELS when set', () => {
it('returns models from ANTHROPIC_MODELS when set', async () => {
process.env.ANTHROPIC_MODELS = 'claude-1, claude-2 ';
const models = getAnthropicModels();
const models = await getAnthropicModels();
expect(models).toEqual(['claude-1', 'claude-2']);
});
});

View File

@@ -0,0 +1,56 @@
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
/**
*
* @param {Object} params
* @param {string} params.userId
* @param {string[]} params.authFields
* @param {Set<string>} [params.optional]
* @param {boolean} [params.throwError]
* @returns
*/
const loadAuthValues = async ({ userId, authFields, optional, throwError = true }) => {
let authValues = {};
/**
* Finds the first non-empty value for the given authentication field, supporting alternate fields.
* @param {string[]} fields Array of strings representing the authentication fields. Supports alternate fields delimited by "||".
* @returns {Promise<{ authField: string, authValue: string} | null>} An object containing the authentication field and value, or null if not found.
*/
const findAuthValue = async (fields) => {
for (const field of fields) {
let value = process.env[field];
if (value) {
return { authField: field, authValue: value };
}
try {
value = await getUserPluginAuthValue(userId, field, throwError);
} catch (err) {
if (optional && optional.has(field)) {
return { authField: field, authValue: undefined };
}
if (field === fields[fields.length - 1] && !value) {
throw err;
}
}
if (value) {
return { authField: field, authValue: value };
}
}
return null;
};
for (let authField of authFields) {
const fields = authField.split('||');
const result = await findAuthValue(fields);
if (result) {
authValues[result.authField] = result.authValue;
}
}
return authValues;
};
module.exports = {
loadAuthValues,
};

View File

@@ -203,6 +203,7 @@ function generateConfig(key, baseURL, endpoint) {
AgentCapabilities.artifacts,
AgentCapabilities.actions,
AgentCapabilities.tools,
AgentCapabilities.ocr,
];
}

View File

@@ -39,7 +39,10 @@ jest.mock('winston-daily-rotate-file', () => {
});
jest.mock('~/config', () => {
const actualModule = jest.requireActual('~/config');
return {
sendEvent: actualModule.sendEvent,
createAxiosInstance: actualModule.createAxiosInstance,
logger: {
info: jest.fn(),
warn: jest.fn(),

View File

@@ -766,36 +766,6 @@
* @memberof typedefs
*/
/**
* @exports MongoFile
* @typedef {import('~/models/schema/fileSchema.js').MongoFile} MongoFile
* @memberof typedefs
*/
/**
* @exports ToolCallData
* @typedef {import('~/models/schema/toolCallSchema.js').ToolCallData} ToolCallData
* @memberof typedefs
*/
/**
* @exports MongoUser
* @typedef {import('~/models/schema/userSchema.js').MongoUser} MongoUser
* @memberof typedefs
*/
/**
* @exports MongoProject
* @typedef {import('~/models/schema/projectSchema.js').MongoProject} MongoProject
* @memberof typedefs
*/
/**
* @exports MongoPromptGroup
* @typedef {import('~/models/schema/promptSchema.js').MongoPromptGroup} MongoPromptGroup
* @memberof typedefs
*/
/**
* @exports uploadImageBuffer
* @typedef {import('~/server/services/Files/process').uploadImageBuffer} uploadImageBuffer
@@ -1817,3 +1787,51 @@
* @typedef {Promise<{ message: TMessage, conversation: TConversation }> | undefined} ClientDatabaseSavePromise
* @memberof typedefs
*/
/**
* @exports OCRImage
* @typedef {Object} OCRImage
* @property {string} id - The identifier of the image.
* @property {number} top_left_x - X-coordinate of the top left corner of the image.
* @property {number} top_left_y - Y-coordinate of the top left corner of the image.
* @property {number} bottom_right_x - X-coordinate of the bottom right corner of the image.
* @property {number} bottom_right_y - Y-coordinate of the bottom right corner of the image.
* @property {string} image_base64 - Base64-encoded image data.
* @memberof typedefs
*/
/**
* @exports PageDimensions
* @typedef {Object} PageDimensions
* @property {number} dpi - The dots per inch resolution of the page.
* @property {number} height - The height of the page in pixels.
* @property {number} width - The width of the page in pixels.
* @memberof typedefs
*/
/**
* @exports OCRPage
* @typedef {Object} OCRPage
* @property {number} index - The index of the page in the document.
* @property {string} markdown - The extracted text content of the page in markdown format.
* @property {OCRImage[]} images - Array of images found on the page.
* @property {PageDimensions} dimensions - The dimensions of the page.
* @memberof typedefs
*/
/**
* @exports OCRUsageInfo
* @typedef {Object} OCRUsageInfo
* @property {number} pages_processed - Number of pages processed in the document.
* @property {number} doc_size_bytes - Size of the document in bytes.
* @memberof typedefs
*/
/**
* @exports OCRResult
* @typedef {Object} OCRResult
* @property {OCRPage[]} pages - Array of pages extracted from the document.
* @property {string} model - The model used for OCR processing.
* @property {OCRUsageInfo} usage_info - Usage information for the OCR operation.
* @memberof typedefs
*/

View File

@@ -1,56 +0,0 @@
const levels = {
NONE: 0,
LOW: 1,
MEDIUM: 2,
HIGH: 3,
};
let level = levels.HIGH;
module.exports = {
levels,
setLevel: (l) => (level = l),
log: {
parameters: (parameters) => {
if (levels.HIGH > level) {
return;
}
console.group();
parameters.forEach((p) => console.log(`${p.name}:`, p.value));
console.groupEnd();
},
functionName: (name) => {
if (levels.MEDIUM > level) {
return;
}
console.log(`\nEXECUTING: ${name}\n`);
},
flow: (flow) => {
if (levels.LOW > level) {
return;
}
console.log(`\n\n\nBEGIN FLOW: ${flow}\n\n\n`);
},
variable: ({ name, value }) => {
if (levels.HIGH > level) {
return;
}
console.group();
console.group();
console.log(`VARIABLE ${name}:`, value);
console.groupEnd();
console.groupEnd();
},
request: () => (req, res, next) => {
if (levels.HIGH > level) {
return next();
}
console.log('Hit URL', req.url, 'with following:');
console.group();
console.log('Query:', req.query);
console.log('Body:', req.body);
console.groupEnd();
return next();
},
},
};

View File

@@ -1,6 +1,6 @@
{
"name": "@librechat/frontend",
"version": "v0.7.7-rc1",
"version": "v0.7.7",
"description": "",
"type": "module",
"scripts": {

View File

@@ -5,6 +5,7 @@ import type { OptionWithIcon, ExtendedFile } from './types';
export type TAgentOption = OptionWithIcon &
Agent & {
knowledge_files?: Array<[string, ExtendedFile]>;
context_files?: Array<[string, ExtendedFile]>;
code_files?: Array<[string, ExtendedFile]>;
};

View File

@@ -483,6 +483,7 @@ export interface ExtendedFile {
attached?: boolean;
embedded?: boolean;
tool_resource?: string;
metadata?: t.TFile['metadata'];
}
export type ContextType = { navVisible: boolean; setNavVisible: (visible: boolean) => void };

View File

@@ -8,8 +8,8 @@ import {
import { SandpackProviderProps } from '@codesandbox/sandpack-react/unstyled';
import type { CodeEditorRef } from '@codesandbox/sandpack-react';
import type { ArtifactFiles, Artifact } from '~/common';
import { useEditArtifact, useGetStartupConfig } from '~/data-provider';
import { sharedFiles, sharedOptions } from '~/utils/artifacts';
import { useEditArtifact } from '~/data-provider';
import { useEditorContext } from '~/Providers';
const createDebouncedMutation = (
@@ -124,6 +124,17 @@ export const ArtifactCodeEditor = memo(function ({
sharedProps: Partial<SandpackProviderProps>;
editorRef: React.MutableRefObject<CodeEditorRef>;
}) {
const { data: config } = useGetStartupConfig();
const options: typeof sharedOptions = useMemo(() => {
if (!config) {
return sharedOptions;
}
return {
...sharedOptions,
bundlerURL: config.bundlerURL,
};
}, [config]);
if (Object.keys(files).length === 0) {
return null;
}
@@ -135,7 +146,7 @@ export const ArtifactCodeEditor = memo(function ({
...files,
...sharedFiles,
}}
options={{ ...sharedOptions }}
options={options}
{...sharedProps}
template={template}
>

View File

@@ -7,6 +7,7 @@ import {
import type { SandpackPreviewRef } from '@codesandbox/sandpack-react/unstyled';
import type { ArtifactFiles } from '~/common';
import { sharedFiles, sharedOptions } from '~/utils/artifacts';
import { useGetStartupConfig } from '~/data-provider';
import { useEditorContext } from '~/Providers';
export const ArtifactPreview = memo(function ({
@@ -23,6 +24,8 @@ export const ArtifactPreview = memo(function ({
previewRef: React.MutableRefObject<SandpackPreviewRef>;
}) {
const { currentCode } = useEditorContext();
const { data: config } = useGetStartupConfig();
const artifactFiles = useMemo(() => {
if (Object.keys(files).length === 0) {
return files;
@@ -38,6 +41,17 @@ export const ArtifactPreview = memo(function ({
},
};
}, [currentCode, files, fileKey]);
const options: typeof sharedOptions = useMemo(() => {
if (!config) {
return sharedOptions;
}
return {
...sharedOptions,
bundlerURL: config.bundlerURL,
};
}, [config]);
if (Object.keys(artifactFiles).length === 0) {
return null;
}
@@ -48,7 +62,7 @@ export const ArtifactPreview = memo(function ({
...artifactFiles,
...sharedFiles,
}}
options={{ ...sharedOptions }}
options={options}
{...sharedProps}
template={template}
>

View File

@@ -1,7 +1,8 @@
import { useState, useMemo, memo, useCallback } from 'react';
import React, { useState, useMemo, memo, useCallback } from 'react';
import { useRecoilValue } from 'recoil';
import { Atom, ChevronDown } from 'lucide-react';
import type { MouseEvent, FC } from 'react';
import Markdown from '~/components/Chat/Messages/Content/Markdown';
import { useLocalize } from '~/hooks';
import { cn } from '~/utils';
import store from '~/store';
@@ -21,12 +22,18 @@ const CONTENT_STYLES = {
} as const;
export const ThinkingContent: FC<{ children: React.ReactNode; isPart?: boolean }> = memo(
({ isPart, children }) => (
<div className={CONTENT_STYLES.wrapper}>
<div className={isPart === true ? CONTENT_STYLES.partBorder : CONTENT_STYLES.border} />
<p className={CONTENT_STYLES.text}>{children}</p>
</div>
),
({ isPart, children }) => {
return (
<div className={CONTENT_STYLES.wrapper}>
<div className={isPart === true ? CONTENT_STYLES.partBorder : CONTENT_STYLES.border} />
{typeof children === 'string' ? (
<Markdown content={children} isLatestMessage={false} />
) : (
<p className={CONTENT_STYLES.text}>{children}</p>
)}
</div>
);
},
);
export const ThinkingButton = memo(

View File

@@ -1,7 +1,7 @@
import * as Ariakit from '@ariakit/react';
import React, { useRef, useState, useMemo } from 'react';
import { FileSearch, ImageUpIcon, TerminalSquareIcon } from 'lucide-react';
import { EToolResources, EModelEndpoint } from 'librechat-data-provider';
import { FileSearch, ImageUpIcon, TerminalSquareIcon, FileType2Icon } from 'lucide-react';
import { FileUpload, TooltipAnchor, DropdownPopup } from '~/components/ui';
import { useGetEndpointsQuery } from '~/data-provider';
import { AttachmentIcon } from '~/components/svg';
@@ -49,6 +49,17 @@ const AttachFile = ({ isRTL, disabled, handleFileChange }: AttachFileProps) => {
},
];
if (capabilities.includes(EToolResources.ocr)) {
items.push({
label: localize('com_ui_upload_ocr_text'),
onClick: () => {
setToolResource(EToolResources.ocr);
handleUploadClick();
},
icon: <FileType2Icon className="icon-md" />,
});
}
if (capabilities.includes(EToolResources.file_search)) {
items.push({
label: localize('com_ui_upload_file_search'),

View File

@@ -1,6 +1,6 @@
import React, { useMemo } from 'react';
import { EModelEndpoint, EToolResources } from 'librechat-data-provider';
import { FileSearch, ImageUpIcon, TerminalSquareIcon } from 'lucide-react';
import { FileSearch, ImageUpIcon, FileType2Icon, TerminalSquareIcon } from 'lucide-react';
import OGDialogTemplate from '~/components/ui/OGDialogTemplate';
import { useGetEndpointsQuery } from '~/data-provider';
import useLocalize from '~/hooks/useLocalize';
@@ -50,6 +50,12 @@ const DragDropModal = ({ onOptionSelect, setShowModal, files, isVisible }: DragD
value: EToolResources.execute_code,
icon: <TerminalSquareIcon className="icon-md" />,
});
} else if (capability === EToolResources.ocr) {
_options.push({
label: localize('com_ui_upload_ocr_text'),
value: EToolResources.ocr,
icon: <FileType2Icon className="icon-md" />,
});
}
}

View File

@@ -19,7 +19,7 @@ const FilePreview = ({
};
className?: string;
}) => {
const radius = 55; // Radius of the SVG circle
const radius = 55;
const circumference = 2 * Math.PI * radius;
const progress = useProgress(
file?.['progress'] ?? 1,
@@ -27,16 +27,15 @@ const FilePreview = ({
(file as ExtendedFile | undefined)?.size ?? 1,
);
// Calculate the offset based on the loading progress
const offset = circumference - progress * circumference;
const circleCSSProperties = {
transition: 'stroke-dashoffset 0.5s linear',
};
return (
<div className={cn('size-10 shrink-0 overflow-hidden rounded-xl', className)}>
<div className={cn('relative size-10 shrink-0 overflow-hidden rounded-xl', className)}>
<FileIcon file={file} fileType={fileType} />
<SourceIcon source={file?.source} />
<SourceIcon source={file?.source} isCodeFile={!!file?.['metadata']?.fileIdentifier} />
{progress < 1 && (
<ProgressCircle
circumference={circumference}

Some files were not shown because too many files have changed in this diff Show More