Compare commits
10 Commits
feat/realt
...
feat/E2EE
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
40e59bc55c | ||
|
|
94f0d1cb41 | ||
|
|
d37cc1cf4d | ||
|
|
7346d20224 | ||
|
|
0cc0e5d287 | ||
|
|
d01674a4c6 | ||
|
|
d4621c3ea8 | ||
|
|
94d32906f1 | ||
|
|
606fea044a | ||
|
|
18d019d8b3 |
28
.env.example
28
.env.example
@@ -142,7 +142,7 @@ GOOGLE_KEY=user_provided
|
||||
# GOOGLE_AUTH_HEADER=true
|
||||
|
||||
# Gemini API (AI Studio)
|
||||
# GOOGLE_MODELS=gemini-2.5-pro-exp-03-25,gemini-2.0-flash-exp,gemini-2.0-flash-thinking-exp-1219,gemini-exp-1121,gemini-exp-1114,gemini-1.5-flash-latest,gemini-1.0-pro,gemini-1.0-pro-001,gemini-1.0-pro-latest,gemini-1.0-pro-vision-latest,gemini-1.5-pro-latest,gemini-pro,gemini-pro-vision
|
||||
# GOOGLE_MODELS=gemini-2.0-flash-exp,gemini-2.0-flash-thinking-exp-1219,gemini-exp-1121,gemini-exp-1114,gemini-1.5-flash-latest,gemini-1.0-pro,gemini-1.0-pro-001,gemini-1.0-pro-latest,gemini-1.0-pro-vision-latest,gemini-1.5-pro-latest,gemini-pro,gemini-pro-vision
|
||||
|
||||
# Vertex AI
|
||||
# GOOGLE_MODELS=gemini-1.5-flash-preview-0514,gemini-1.5-pro-preview-0514,gemini-1.0-pro-vision-001,gemini-1.0-pro-002,gemini-1.0-pro-001,gemini-pro-vision,gemini-1.0-pro
|
||||
@@ -364,7 +364,7 @@ ILLEGAL_MODEL_REQ_SCORE=5
|
||||
# Balance #
|
||||
#========================#
|
||||
|
||||
# CHECK_BALANCE=false
|
||||
CHECK_BALANCE=false
|
||||
# START_BALANCE=20000 # note: the number of tokens that will be credited after registration.
|
||||
|
||||
#========================#
|
||||
@@ -432,19 +432,15 @@ OPENID_NAME_CLAIM=
|
||||
|
||||
OPENID_BUTTON_LABEL=
|
||||
OPENID_IMAGE_URL=
|
||||
# Set to true to automatically redirect to the OpenID provider when a user visits the login page
|
||||
# This will bypass the login form completely for users, only use this if OpenID is your only authentication method
|
||||
OPENID_AUTO_REDIRECT=false
|
||||
|
||||
# LDAP
|
||||
LDAP_URL=
|
||||
LDAP_BIND_DN=
|
||||
LDAP_BIND_CREDENTIALS=
|
||||
LDAP_USER_SEARCH_BASE=
|
||||
#LDAP_SEARCH_FILTER="mail="
|
||||
LDAP_SEARCH_FILTER=mail={{username}}
|
||||
LDAP_CA_CERT_PATH=
|
||||
# LDAP_TLS_REJECT_UNAUTHORIZED=
|
||||
# LDAP_STARTTLS=
|
||||
# LDAP_LOGIN_USES_USERNAME=true
|
||||
# LDAP_ID=
|
||||
# LDAP_USERNAME=
|
||||
@@ -477,24 +473,6 @@ FIREBASE_STORAGE_BUCKET=
|
||||
FIREBASE_MESSAGING_SENDER_ID=
|
||||
FIREBASE_APP_ID=
|
||||
|
||||
#========================#
|
||||
# S3 AWS Bucket #
|
||||
#========================#
|
||||
|
||||
AWS_ENDPOINT_URL=
|
||||
AWS_ACCESS_KEY_ID=
|
||||
AWS_SECRET_ACCESS_KEY=
|
||||
AWS_REGION=
|
||||
AWS_BUCKET_NAME=
|
||||
|
||||
#========================#
|
||||
# Azure Blob Storage #
|
||||
#========================#
|
||||
|
||||
AZURE_STORAGE_CONNECTION_STRING=
|
||||
AZURE_STORAGE_PUBLIC_ACCESS=false
|
||||
AZURE_CONTAINER_NAME=files
|
||||
|
||||
#========================#
|
||||
# Shared Links #
|
||||
#========================#
|
||||
|
||||
3
.github/workflows/backend-review.yml
vendored
3
.github/workflows/backend-review.yml
vendored
@@ -39,9 +39,6 @@ jobs:
|
||||
- name: Install MCP Package
|
||||
run: npm run build:mcp
|
||||
|
||||
- name: Install Data Schemas Package
|
||||
run: npm run build:data-schemas
|
||||
|
||||
- name: Create empty auth.json file
|
||||
run: |
|
||||
mkdir -p api/data
|
||||
|
||||
58
.github/workflows/data-schemas.yml
vendored
58
.github/workflows/data-schemas.yml
vendored
@@ -1,58 +0,0 @@
|
||||
name: Publish `@librechat/data-schemas` to NPM
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'packages/data-schemas/package.json'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
reason:
|
||||
description: 'Reason for manual trigger'
|
||||
required: false
|
||||
default: 'Manual publish requested'
|
||||
|
||||
jobs:
|
||||
build-and-publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18.x'
|
||||
|
||||
- name: Install dependencies
|
||||
run: cd packages/data-schemas && npm ci
|
||||
|
||||
- name: Build
|
||||
run: cd packages/data-schemas && npm run build
|
||||
|
||||
- name: Set up npm authentication
|
||||
run: echo "//registry.npmjs.org/:_authToken=${{ secrets.PUBLISH_NPM_TOKEN }}" > ~/.npmrc
|
||||
|
||||
- name: Check version change
|
||||
id: check
|
||||
working-directory: packages/data-schemas
|
||||
run: |
|
||||
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||
PUBLISHED_VERSION=$(npm view @librechat/data-schemas version 2>/dev/null || echo "0.0.0")
|
||||
if [ "$PACKAGE_VERSION" = "$PUBLISHED_VERSION" ]; then
|
||||
echo "No version change, skipping publish"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Version changed, proceeding with publish"
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Pack package
|
||||
if: steps.check.outputs.skip != 'true'
|
||||
working-directory: packages/data-schemas
|
||||
run: npm pack
|
||||
|
||||
- name: Publish
|
||||
if: steps.check.outputs.skip != 'true'
|
||||
working-directory: packages/data-schemas
|
||||
run: npm publish *.tgz --access public
|
||||
@@ -84,11 +84,11 @@ jobs:
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
sign-commits: true
|
||||
commit-message: "chore: update CHANGELOG for release ${{ github.ref_name }}"
|
||||
commit-message: "chore: update CHANGELOG for release ${GITHUB_REF##*/}"
|
||||
base: main
|
||||
branch: "changelog/${{ github.ref_name }}"
|
||||
branch: "changelog/${GITHUB_REF##*/}"
|
||||
reviewers: danny-avila
|
||||
title: "chore: update CHANGELOG for release ${{ github.ref_name }}"
|
||||
title: "chore: update CHANGELOG for release ${GITHUB_REF##*/}"
|
||||
body: |
|
||||
**Description**:
|
||||
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${{ github.ref_name }} above previous releases.
|
||||
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${GITHUB_REF##*/} above previous releases.
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -37,10 +37,6 @@ client/public/main.js
|
||||
client/public/main.js.map
|
||||
client/public/main.js.LICENSE.txt
|
||||
|
||||
# Azure Blob Storage Emulator (Azurite)
|
||||
__azurite**
|
||||
__blobstorage__/**/*
|
||||
|
||||
# Dependency directorys
|
||||
# Deployed apps should consider commenting these lines out:
|
||||
# see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git
|
||||
|
||||
16
CHANGELOG.md
16
CHANGELOG.md
@@ -1,16 +0,0 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### ✨ New Features
|
||||
|
||||
- 🪄 feat: Agent Artifacts by **@danny-avila** in [#5804](https://github.com/danny-avila/LibreChat/pull/5804)
|
||||
|
||||
### ⚙️ Other Changes
|
||||
|
||||
- 🔄 chore: Enforce 18next Language Keys by **@rubentalstra** in [#5803](https://github.com/danny-avila/LibreChat/pull/5803)
|
||||
- 🔃 refactor: Parent Message ID Handling on Error, Update Translations, Bump Agents by **@danny-avila** in [#5833](https://github.com/danny-avila/LibreChat/pull/5833)
|
||||
|
||||
---
|
||||
@@ -1,4 +1,4 @@
|
||||
# v0.7.7
|
||||
# v0.7.7-rc1
|
||||
|
||||
# Base node image
|
||||
FROM node:20-alpine AS node
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Dockerfile.multi
|
||||
# v0.7.7
|
||||
# v0.7.7-rc1
|
||||
|
||||
# Base for all builds
|
||||
FROM node:20-alpine AS base-min
|
||||
@@ -11,7 +11,6 @@ RUN npm config set fetch-retry-maxtimeout 600000 && \
|
||||
COPY package*.json ./
|
||||
COPY packages/data-provider/package*.json ./packages/data-provider/
|
||||
COPY packages/mcp/package*.json ./packages/mcp/
|
||||
COPY packages/data-schemas/package*.json ./packages/data-schemas/
|
||||
COPY client/package*.json ./client/
|
||||
COPY api/package*.json ./api/
|
||||
|
||||
@@ -33,13 +32,6 @@ COPY packages/mcp ./
|
||||
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
||||
RUN npm run build
|
||||
|
||||
# Build data-schemas
|
||||
FROM base AS data-schemas-build
|
||||
WORKDIR /app/packages/data-schemas
|
||||
COPY packages/data-schemas ./
|
||||
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
||||
RUN npm run build
|
||||
|
||||
# Client build
|
||||
FROM base AS client-build
|
||||
WORKDIR /app/client
|
||||
@@ -57,9 +49,8 @@ COPY api ./api
|
||||
COPY config ./config
|
||||
COPY --from=data-provider-build /app/packages/data-provider/dist ./packages/data-provider/dist
|
||||
COPY --from=mcp-build /app/packages/mcp/dist ./packages/mcp/dist
|
||||
COPY --from=data-schemas-build /app/packages/data-schemas/dist ./packages/data-schemas/dist
|
||||
COPY --from=client-build /app/client/dist ./client/dist
|
||||
WORKDIR /app/api
|
||||
EXPOSE 3080
|
||||
ENV HOST=0.0.0.0
|
||||
CMD ["node", "server/index.js"]
|
||||
CMD ["node", "server/index.js"]
|
||||
|
||||
@@ -197,6 +197,6 @@ We thank [Locize](https://locize.com) for their translation management tools tha
|
||||
|
||||
<p align="center">
|
||||
<a href="https://locize.com" target="_blank" rel="noopener noreferrer">
|
||||
<img src="https://github.com/user-attachments/assets/d6b70894-6064-475e-bb65-92a9e23e0077" alt="Locize Logo" height="50">
|
||||
<img src="https://locize.com/img/locize_color.svg" alt="Locize Logo" height="50">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
@@ -2,7 +2,6 @@ const Anthropic = require('@anthropic-ai/sdk');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const {
|
||||
Constants,
|
||||
ErrorTypes,
|
||||
EModelEndpoint,
|
||||
anthropicSettings,
|
||||
getResponseSender,
|
||||
@@ -148,17 +147,12 @@ class AnthropicClient extends BaseClient {
|
||||
this.maxPromptTokens =
|
||||
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
|
||||
|
||||
const reservedTokens = this.maxPromptTokens + this.maxResponseTokens;
|
||||
if (reservedTokens > this.maxContextTokens) {
|
||||
const info = `Total Possible Tokens + Max Output Tokens must be less than or equal to Max Context Tokens: ${this.maxPromptTokens} (total possible output) + ${this.maxResponseTokens} (max output) = ${reservedTokens}/${this.maxContextTokens} (max context)`;
|
||||
const errorMessage = `{ "type": "${ErrorTypes.INPUT_LENGTH}", "info": "${info}" }`;
|
||||
logger.warn(info);
|
||||
throw new Error(errorMessage);
|
||||
} else if (this.maxResponseTokens === this.maxContextTokens) {
|
||||
const info = `Max Output Tokens must be less than Max Context Tokens: ${this.maxResponseTokens} (max output) = ${this.maxContextTokens} (max context)`;
|
||||
const errorMessage = `{ "type": "${ErrorTypes.INPUT_LENGTH}", "info": "${info}" }`;
|
||||
logger.warn(info);
|
||||
throw new Error(errorMessage);
|
||||
if (this.maxPromptTokens + this.maxResponseTokens > this.maxContextTokens) {
|
||||
throw new Error(
|
||||
`maxPromptTokens + maxOutputTokens (${this.maxPromptTokens} + ${this.maxResponseTokens} = ${
|
||||
this.maxPromptTokens + this.maxResponseTokens
|
||||
}) must be less than or equal to maxContextTokens (${this.maxContextTokens})`,
|
||||
);
|
||||
}
|
||||
|
||||
this.sender =
|
||||
@@ -695,16 +689,6 @@ class AnthropicClient extends BaseClient {
|
||||
return (msg) => {
|
||||
if (msg.text != null && msg.text && msg.text.startsWith(':::thinking')) {
|
||||
msg.text = msg.text.replace(/:::thinking.*?:::/gs, '').trim();
|
||||
} else if (msg.content != null) {
|
||||
/** @type {import('@librechat/agents').MessageContentComplex} */
|
||||
const newContent = [];
|
||||
for (let part of msg.content) {
|
||||
if (part.think != null) {
|
||||
continue;
|
||||
}
|
||||
newContent.push(part);
|
||||
}
|
||||
msg.content = newContent;
|
||||
}
|
||||
|
||||
return msg;
|
||||
|
||||
@@ -1,23 +1,63 @@
|
||||
const crypto = require('crypto');
|
||||
const fetch = require('node-fetch');
|
||||
const {
|
||||
supportsBalanceCheck,
|
||||
isAgentsEndpoint,
|
||||
isParamEndpoint,
|
||||
EModelEndpoint,
|
||||
ContentTypes,
|
||||
excludedKeys,
|
||||
ErrorTypes,
|
||||
Constants,
|
||||
} = require('librechat-data-provider');
|
||||
const { getMessages, saveMessage, updateMessage, saveConvo, getConvo } = require('~/models');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { getMessages, saveMessage, updateMessage, saveConvo, getConvo, getUserById } = require('~/models');
|
||||
const { addSpaceIfNeeded, isEnabled } = require('~/server/utils');
|
||||
const { truncateToolCallOutputs } = require('./prompts');
|
||||
const { addSpaceIfNeeded } = require('~/server/utils');
|
||||
const checkBalance = require('~/models/checkBalance');
|
||||
const { getFiles } = require('~/models/File');
|
||||
const TextStream = require('./TextStream');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
let crypto;
|
||||
try {
|
||||
crypto = require('crypto');
|
||||
} catch (err) {
|
||||
logger.error('[AskController] crypto support is disabled!', err);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to encrypt plaintext using AES-256-GCM and then RSA-encrypt the AES key.
|
||||
* @param {string} plainText - The plaintext to encrypt.
|
||||
* @param {string} pemPublicKey - The RSA public key in PEM format.
|
||||
* @returns {Object} An object containing the ciphertext, iv, authTag, and encryptedKey.
|
||||
*/
|
||||
function encryptText(plainText, pemPublicKey) {
|
||||
// Generate a random 256-bit AES key and a 12-byte IV.
|
||||
const aesKey = crypto.randomBytes(32);
|
||||
const iv = crypto.randomBytes(12);
|
||||
|
||||
// Encrypt the plaintext using AES-256-GCM.
|
||||
const cipher = crypto.createCipheriv('aes-256-gcm', aesKey, iv);
|
||||
let ciphertext = cipher.update(plainText, 'utf8', 'base64');
|
||||
ciphertext += cipher.final('base64');
|
||||
const authTag = cipher.getAuthTag().toString('base64');
|
||||
|
||||
// Encrypt the AES key using the user's RSA public key.
|
||||
const encryptedKey = crypto.publicEncrypt(
|
||||
{
|
||||
key: pemPublicKey,
|
||||
padding: crypto.constants.RSA_PKCS1_OAEP_PADDING,
|
||||
oaepHash: 'sha256',
|
||||
},
|
||||
aesKey,
|
||||
).toString('base64');
|
||||
|
||||
return {
|
||||
ciphertext,
|
||||
iv: iv.toString('base64'),
|
||||
authTag,
|
||||
encryptedKey,
|
||||
};
|
||||
}
|
||||
|
||||
class BaseClient {
|
||||
constructor(apiKey, options = {}) {
|
||||
this.apiKey = apiKey;
|
||||
@@ -366,14 +406,17 @@ class BaseClient {
|
||||
* context: TMessage[],
|
||||
* remainingContextTokens: number,
|
||||
* messagesToRefine: TMessage[],
|
||||
* }>} An object with three properties: `context`, `remainingContextTokens`, and `messagesToRefine`.
|
||||
* summaryIndex: number,
|
||||
* }>} An object with four properties: `context`, `summaryIndex`, `remainingContextTokens`, and `messagesToRefine`.
|
||||
* `context` is an array of messages that fit within the token limit.
|
||||
* `summaryIndex` is the index of the first message in the `messagesToRefine` array.
|
||||
* `remainingContextTokens` is the number of tokens remaining within the limit after adding the messages to the context.
|
||||
* `messagesToRefine` is an array of messages that were not added to the context because they would have exceeded the token limit.
|
||||
*/
|
||||
async getMessagesWithinTokenLimit({ messages: _messages, maxContextTokens, instructions }) {
|
||||
// Every reply is primed with <|start|>assistant<|message|>, so we
|
||||
// start with 3 tokens for the label after all messages have been counted.
|
||||
let summaryIndex = -1;
|
||||
let currentTokenCount = 3;
|
||||
const instructionsTokenCount = instructions?.tokenCount ?? 0;
|
||||
let remainingContextTokens =
|
||||
@@ -406,12 +449,14 @@ class BaseClient {
|
||||
}
|
||||
|
||||
const prunedMemory = messages;
|
||||
summaryIndex = prunedMemory.length - 1;
|
||||
remainingContextTokens -= currentTokenCount;
|
||||
|
||||
return {
|
||||
context: context.reverse(),
|
||||
remainingContextTokens,
|
||||
messagesToRefine: prunedMemory,
|
||||
summaryIndex,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -454,7 +499,7 @@ class BaseClient {
|
||||
|
||||
let orderedWithInstructions = this.addInstructions(orderedMessages, instructions);
|
||||
|
||||
let { context, remainingContextTokens, messagesToRefine } =
|
||||
let { context, remainingContextTokens, messagesToRefine, summaryIndex } =
|
||||
await this.getMessagesWithinTokenLimit({
|
||||
messages: orderedWithInstructions,
|
||||
instructions,
|
||||
@@ -524,7 +569,7 @@ class BaseClient {
|
||||
}
|
||||
|
||||
// Make sure to only continue summarization logic if the summary message was generated
|
||||
shouldSummarize = summaryMessage != null && shouldSummarize === true;
|
||||
shouldSummarize = summaryMessage && shouldSummarize;
|
||||
|
||||
logger.debug('[BaseClient] Context Count (2/2)', {
|
||||
remainingContextTokens,
|
||||
@@ -534,18 +579,17 @@ class BaseClient {
|
||||
/** @type {Record<string, number> | undefined} */
|
||||
let tokenCountMap;
|
||||
if (buildTokenMap) {
|
||||
const currentPayload = shouldSummarize ? orderedWithInstructions : context;
|
||||
tokenCountMap = currentPayload.reduce((map, message, index) => {
|
||||
tokenCountMap = orderedWithInstructions.reduce((map, message, index) => {
|
||||
const { messageId } = message;
|
||||
if (!messageId) {
|
||||
return map;
|
||||
}
|
||||
|
||||
if (shouldSummarize && index === messagesToRefine.length - 1 && !usePrevSummary) {
|
||||
if (shouldSummarize && index === summaryIndex && !usePrevSummary) {
|
||||
map.summaryMessage = { ...summaryMessage, messageId, tokenCount: summaryTokenCount };
|
||||
}
|
||||
|
||||
map[messageId] = currentPayload[index].tokenCount;
|
||||
map[messageId] = orderedWithInstructions[index].tokenCount;
|
||||
return map;
|
||||
}, {});
|
||||
}
|
||||
@@ -634,9 +678,8 @@ class BaseClient {
|
||||
}
|
||||
}
|
||||
|
||||
const balance = this.options.req?.app?.locals?.balance;
|
||||
if (
|
||||
balance?.enabled &&
|
||||
isEnabled(process.env.CHECK_BALANCE) &&
|
||||
supportsBalanceCheck[this.options.endpointType ?? this.options.endpoint]
|
||||
) {
|
||||
await checkBalance({
|
||||
@@ -847,18 +890,64 @@ class BaseClient {
|
||||
* @param {string | null} user
|
||||
*/
|
||||
async saveMessageToDatabase(message, endpointOptions, user = null) {
|
||||
if (this.user && user !== this.user) {
|
||||
// Normalize the user information:
|
||||
// If "user" is an object, use it; otherwise, if a string is passed use req.user (if available)
|
||||
const currentUser =
|
||||
user && typeof user === 'object'
|
||||
? user
|
||||
: (this.options.req && this.options.req.user
|
||||
? this.options.req.user
|
||||
: { id: user });
|
||||
const currentUserId = currentUser.id || currentUser;
|
||||
|
||||
// Check if the client’s stored user matches the current user.
|
||||
// (this.user might have been set earlier in setMessageOptions)
|
||||
const storedUserId =
|
||||
this.user && typeof this.user === 'object' ? this.user.id : this.user;
|
||||
if (storedUserId && currentUserId && storedUserId !== currentUserId) {
|
||||
throw new Error('User mismatch.');
|
||||
}
|
||||
|
||||
// console.log('User ID:', currentUserId);
|
||||
|
||||
const dbUser = await getUserById(currentUserId, 'encryptionPublicKey');
|
||||
|
||||
// --- NEW ENCRYPTION BLOCK: Encrypt AI response if encryptionPublicKey exists ---
|
||||
if (dbUser.encryptionPublicKey && message && message.text) {
|
||||
try {
|
||||
// Rebuild the PEM format if necessary.
|
||||
const pemPublicKey = `-----BEGIN PUBLIC KEY-----\n${dbUser.encryptionPublicKey
|
||||
.match(/.{1,64}/g)
|
||||
.join('\n')}\n-----END PUBLIC KEY-----`;
|
||||
const { ciphertext, iv, authTag, encryptedKey } = encryptText(
|
||||
message.text,
|
||||
pemPublicKey,
|
||||
);
|
||||
message.text = ciphertext;
|
||||
message.iv = iv;
|
||||
message.authTag = authTag;
|
||||
message.encryptedKey = encryptedKey;
|
||||
logger.debug('[BaseClient.saveMessageToDatabase] Encrypted message text');
|
||||
} catch (err) {
|
||||
logger.error('[BaseClient.saveMessageToDatabase] Error encrypting message text', err);
|
||||
}
|
||||
}
|
||||
// --- End Encryption Block ---
|
||||
|
||||
// Build update parameters including encryption fields.
|
||||
const updateParams = {
|
||||
...message,
|
||||
endpoint: this.options.endpoint,
|
||||
unfinished: false,
|
||||
user: currentUserId, // store the user id (ensured to be a string)
|
||||
iv: message.iv ?? null,
|
||||
authTag: message.authTag ?? null,
|
||||
encryptedKey: message.encryptedKey ?? null,
|
||||
};
|
||||
|
||||
const savedMessage = await saveMessage(
|
||||
this.options.req,
|
||||
{
|
||||
...message,
|
||||
endpoint: this.options.endpoint,
|
||||
unfinished: false,
|
||||
user,
|
||||
},
|
||||
updateParams,
|
||||
{ context: 'api/app/clients/BaseClient.js - saveMessageToDatabase #saveMessage' },
|
||||
);
|
||||
|
||||
@@ -879,14 +968,13 @@ class BaseClient {
|
||||
: await getConvo(this.options.req?.user?.id, message.conversationId);
|
||||
|
||||
const unsetFields = {};
|
||||
const exceptions = new Set(['spec', 'iconURL']);
|
||||
if (existingConvo != null) {
|
||||
this.fetchedConvo = true;
|
||||
for (const key in existingConvo) {
|
||||
if (!key) {
|
||||
continue;
|
||||
}
|
||||
if (excludedKeys.has(key) && !exceptions.has(key)) {
|
||||
if (excludedKeys.has(key)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -1020,17 +1108,11 @@ class BaseClient {
|
||||
const processValue = (value) => {
|
||||
if (Array.isArray(value)) {
|
||||
for (let item of value) {
|
||||
if (
|
||||
!item ||
|
||||
!item.type ||
|
||||
item.type === ContentTypes.THINK ||
|
||||
item.type === ContentTypes.ERROR ||
|
||||
item.type === ContentTypes.IMAGE_URL
|
||||
) {
|
||||
if (!item || !item.type || item.type === 'image_url') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (item.type === ContentTypes.TOOL_CALL && item.tool_call != null) {
|
||||
if (item.type === 'tool_call' && item.tool_call != null) {
|
||||
const toolName = item.tool_call?.name || '';
|
||||
if (toolName != null && toolName && typeof toolName === 'string') {
|
||||
numTokens += this.getTokenCount(toolName);
|
||||
@@ -1126,13 +1208,9 @@ class BaseClient {
|
||||
return message;
|
||||
}
|
||||
|
||||
const files = await getFiles(
|
||||
{
|
||||
file_id: { $in: fileIds },
|
||||
},
|
||||
{},
|
||||
{},
|
||||
);
|
||||
const files = await getFiles({
|
||||
file_id: { $in: fileIds },
|
||||
});
|
||||
|
||||
await this.addImageURLs(message, files, this.visionMode);
|
||||
|
||||
@@ -1158,4 +1236,4 @@ class BaseClient {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BaseClient;
|
||||
module.exports = BaseClient;
|
||||
@@ -198,11 +198,7 @@ class GoogleClient extends BaseClient {
|
||||
*/
|
||||
checkVisionRequest(attachments) {
|
||||
/* Validation vision request */
|
||||
this.defaultVisionModel =
|
||||
this.options.visionModel ??
|
||||
(!EXCLUDED_GENAI_MODELS.test(this.modelOptions.model)
|
||||
? this.modelOptions.model
|
||||
: 'gemini-pro-vision');
|
||||
this.defaultVisionModel = this.options.visionModel ?? 'gemini-pro-vision';
|
||||
const availableModels = this.options.modelsConfig?.[EModelEndpoint.google];
|
||||
this.isVisionModel = validateVisionModel({ model: this.modelOptions.model, availableModels });
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ const { SplitStreamHandler, GraphEvents } = require('@librechat/agents');
|
||||
const {
|
||||
Constants,
|
||||
ImageDetail,
|
||||
ContentTypes,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
KnownEndpoints,
|
||||
@@ -226,6 +225,10 @@ class OpenAIClient extends BaseClient {
|
||||
logger.debug('Using Azure endpoint');
|
||||
}
|
||||
|
||||
if (this.useOpenRouter) {
|
||||
this.completionsUrl = 'https://openrouter.ai/api/v1/chat/completions';
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -502,24 +505,8 @@ class OpenAIClient extends BaseClient {
|
||||
if (promptPrefix && this.isOmni === true) {
|
||||
const lastUserMessageIndex = payload.findLastIndex((message) => message.role === 'user');
|
||||
if (lastUserMessageIndex !== -1) {
|
||||
if (Array.isArray(payload[lastUserMessageIndex].content)) {
|
||||
const firstTextPartIndex = payload[lastUserMessageIndex].content.findIndex(
|
||||
(part) => part.type === ContentTypes.TEXT,
|
||||
);
|
||||
if (firstTextPartIndex !== -1) {
|
||||
const firstTextPart = payload[lastUserMessageIndex].content[firstTextPartIndex];
|
||||
payload[lastUserMessageIndex].content[firstTextPartIndex].text =
|
||||
`${promptPrefix}\n${firstTextPart.text}`;
|
||||
} else {
|
||||
payload[lastUserMessageIndex].content.unshift({
|
||||
type: ContentTypes.TEXT,
|
||||
text: promptPrefix,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
payload[lastUserMessageIndex].content =
|
||||
`${promptPrefix}\n${payload[lastUserMessageIndex].content}`;
|
||||
}
|
||||
payload[lastUserMessageIndex].content =
|
||||
`${promptPrefix}\n${payload[lastUserMessageIndex].content}`;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1120,16 +1107,6 @@ ${convo}
|
||||
return (msg) => {
|
||||
if (msg.text != null && msg.text && msg.text.startsWith(':::thinking')) {
|
||||
msg.text = msg.text.replace(/:::thinking.*?:::/gs, '').trim();
|
||||
} else if (msg.content != null) {
|
||||
/** @type {import('@librechat/agents').MessageContentComplex} */
|
||||
const newContent = [];
|
||||
for (let part of msg.content) {
|
||||
if (part.think != null) {
|
||||
continue;
|
||||
}
|
||||
newContent.push(part);
|
||||
}
|
||||
msg.content = newContent;
|
||||
}
|
||||
|
||||
return msg;
|
||||
@@ -1181,6 +1158,10 @@ ${convo}
|
||||
opts.httpAgent = new HttpsProxyAgent(this.options.proxy);
|
||||
}
|
||||
|
||||
if (this.isVisionModel) {
|
||||
modelOptions.max_tokens = 4000;
|
||||
}
|
||||
|
||||
/** @type {TAzureConfig | undefined} */
|
||||
const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI];
|
||||
|
||||
@@ -1291,29 +1272,6 @@ ${convo}
|
||||
});
|
||||
}
|
||||
|
||||
/** Note: OpenAI Web Search models do not support any known parameters besdies `max_tokens` */
|
||||
if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) {
|
||||
const searchExcludeParams = [
|
||||
'frequency_penalty',
|
||||
'presence_penalty',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'stop',
|
||||
'logit_bias',
|
||||
'seed',
|
||||
'response_format',
|
||||
'n',
|
||||
'logprobs',
|
||||
'user',
|
||||
];
|
||||
|
||||
this.options.dropParams = this.options.dropParams || [];
|
||||
this.options.dropParams = [
|
||||
...new Set([...this.options.dropParams, ...searchExcludeParams]),
|
||||
];
|
||||
}
|
||||
|
||||
if (this.options.dropParams && Array.isArray(this.options.dropParams)) {
|
||||
this.options.dropParams.forEach((param) => {
|
||||
delete modelOptions[param];
|
||||
@@ -1342,11 +1300,15 @@ ${convo}
|
||||
let streamResolve;
|
||||
|
||||
if (
|
||||
(!this.isOmni || /^o1-(mini|preview)/i.test(modelOptions.model)) &&
|
||||
modelOptions.reasoning_effort != null
|
||||
this.isOmni === true &&
|
||||
(this.azure || /o1(?!-(?:mini|preview)).*$/.test(modelOptions.model)) &&
|
||||
!/o3-.*$/.test(this.modelOptions.model) &&
|
||||
modelOptions.stream
|
||||
) {
|
||||
delete modelOptions.stream;
|
||||
delete modelOptions.stop;
|
||||
} else if (!this.isOmni && modelOptions.reasoning_effort != null) {
|
||||
delete modelOptions.reasoning_effort;
|
||||
delete modelOptions.temperature;
|
||||
}
|
||||
|
||||
let reasoningKey = 'reasoning_content';
|
||||
|
||||
@@ -5,8 +5,9 @@ const { addImages, buildErrorInput, buildPromptPrefix } = require('./output_pars
|
||||
const { initializeCustomAgent, initializeFunctionsAgent } = require('./agents');
|
||||
const { processFileURL } = require('~/server/services/Files/process');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { formatLangChainMessages } = require('./prompts');
|
||||
const checkBalance = require('~/models/checkBalance');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { extractBaseURL } = require('~/utils');
|
||||
const { loadTools } = require('./tools/util');
|
||||
const { logger } = require('~/config');
|
||||
@@ -335,8 +336,7 @@ class PluginsClient extends OpenAIClient {
|
||||
}
|
||||
}
|
||||
|
||||
const balance = this.options.req?.app?.locals?.balance;
|
||||
if (balance?.enabled) {
|
||||
if (isEnabled(process.env.CHECK_BALANCE)) {
|
||||
await checkBalance({
|
||||
req: this.options.req,
|
||||
res: this.options.res,
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
const { promptTokensEstimate } = require('openai-chat-tokens');
|
||||
const { EModelEndpoint, supportsBalanceCheck } = require('librechat-data-provider');
|
||||
const { formatFromLangChain } = require('~/app/clients/prompts');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const checkBalance = require('~/models/checkBalance');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const createStartHandler = ({
|
||||
@@ -49,8 +49,8 @@ const createStartHandler = ({
|
||||
prelimPromptTokens += tokenBuffer;
|
||||
|
||||
try {
|
||||
const balance = await getBalanceConfig();
|
||||
if (balance?.enabled && supportsBalanceCheck[EModelEndpoint.openAI]) {
|
||||
// TODO: if plugins extends to non-OpenAI models, this will need to be updated
|
||||
if (isEnabled(process.env.CHECK_BALANCE) && supportsBalanceCheck[EModelEndpoint.openAI]) {
|
||||
const generations =
|
||||
initialMessageCount && messages.length > initialMessageCount
|
||||
? messages.slice(initialMessageCount)
|
||||
|
||||
@@ -325,37 +325,4 @@ describe('formatAgentMessages', () => {
|
||||
);
|
||||
expect(result[0].content).not.toContain('Analyzing the problem...');
|
||||
});
|
||||
|
||||
it('should exclude ERROR type content parts', () => {
|
||||
const payload = [
|
||||
{
|
||||
role: 'assistant',
|
||||
content: [
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello there' },
|
||||
{
|
||||
type: ContentTypes.ERROR,
|
||||
[ContentTypes.ERROR]:
|
||||
'An error occurred while processing the request: Something went wrong',
|
||||
},
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final answer' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const result = formatAgentMessages(payload);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toBeInstanceOf(AIMessage);
|
||||
expect(result[0].content).toEqual([
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello there' },
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final answer' },
|
||||
]);
|
||||
|
||||
// Make sure no error content exists in the result
|
||||
const hasErrorContent = result[0].content.some(
|
||||
(item) =>
|
||||
item.type === ContentTypes.ERROR || JSON.stringify(item).includes('An error occurred'),
|
||||
);
|
||||
expect(hasErrorContent).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -211,8 +211,6 @@ const formatAgentMessages = (payload) => {
|
||||
} else if (part.type === ContentTypes.THINK) {
|
||||
hasReasoning = true;
|
||||
continue;
|
||||
} else if (part.type === ContentTypes.ERROR || part.type === ContentTypes.AGENT_UPDATE) {
|
||||
continue;
|
||||
} else {
|
||||
currentContent.push(part);
|
||||
}
|
||||
|
||||
@@ -164,7 +164,7 @@ describe('BaseClient', () => {
|
||||
const result = await TestClient.getMessagesWithinTokenLimit({ messages });
|
||||
|
||||
expect(result.context).toEqual(expectedContext);
|
||||
expect(result.messagesToRefine.length - 1).toEqual(expectedIndex);
|
||||
expect(result.summaryIndex).toEqual(expectedIndex);
|
||||
expect(result.remainingContextTokens).toBe(expectedRemainingContextTokens);
|
||||
expect(result.messagesToRefine).toEqual(expectedMessagesToRefine);
|
||||
});
|
||||
@@ -200,7 +200,7 @@ describe('BaseClient', () => {
|
||||
const result = await TestClient.getMessagesWithinTokenLimit({ messages });
|
||||
|
||||
expect(result.context).toEqual(expectedContext);
|
||||
expect(result.messagesToRefine.length - 1).toEqual(expectedIndex);
|
||||
expect(result.summaryIndex).toEqual(expectedIndex);
|
||||
expect(result.remainingContextTokens).toBe(expectedRemainingContextTokens);
|
||||
expect(result.messagesToRefine).toEqual(expectedMessagesToRefine);
|
||||
});
|
||||
|
||||
@@ -136,11 +136,10 @@ OpenAI.mockImplementation(() => ({
|
||||
}));
|
||||
|
||||
describe('OpenAIClient', () => {
|
||||
const mockSet = jest.fn();
|
||||
const mockCache = { set: mockSet };
|
||||
|
||||
beforeEach(() => {
|
||||
const mockCache = {
|
||||
get: jest.fn().mockResolvedValue({}),
|
||||
set: jest.fn(),
|
||||
};
|
||||
getLogStores.mockReturnValue(mockCache);
|
||||
});
|
||||
let client;
|
||||
|
||||
@@ -172,7 +172,7 @@ Error Message: ${error.message}`);
|
||||
{
|
||||
type: ContentTypes.IMAGE_URL,
|
||||
image_url: {
|
||||
url: `data:image/png;base64,${base64}`,
|
||||
url: `data:image/jpeg;base64,${base64}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
@@ -21,7 +21,6 @@ const {
|
||||
} = require('../');
|
||||
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
|
||||
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { createMCPTool } = require('~/server/services/MCP');
|
||||
const { loadSpecs } = require('./loadSpecs');
|
||||
const { logger } = require('~/config');
|
||||
@@ -91,6 +90,45 @@ const validateTools = async (user, tools = []) => {
|
||||
}
|
||||
};
|
||||
|
||||
const loadAuthValues = async ({ userId, authFields, throwError = true }) => {
|
||||
let authValues = {};
|
||||
|
||||
/**
|
||||
* Finds the first non-empty value for the given authentication field, supporting alternate fields.
|
||||
* @param {string[]} fields Array of strings representing the authentication fields. Supports alternate fields delimited by "||".
|
||||
* @returns {Promise<{ authField: string, authValue: string} | null>} An object containing the authentication field and value, or null if not found.
|
||||
*/
|
||||
const findAuthValue = async (fields) => {
|
||||
for (const field of fields) {
|
||||
let value = process.env[field];
|
||||
if (value) {
|
||||
return { authField: field, authValue: value };
|
||||
}
|
||||
try {
|
||||
value = await getUserPluginAuthValue(userId, field, throwError);
|
||||
} catch (err) {
|
||||
if (field === fields[fields.length - 1] && !value) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
if (value) {
|
||||
return { authField: field, authValue: value };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
for (let authField of authFields) {
|
||||
const fields = authField.split('||');
|
||||
const result = await findAuthValue(fields);
|
||||
if (result) {
|
||||
authValues[result.authField] = result.authValue;
|
||||
}
|
||||
}
|
||||
|
||||
return authValues;
|
||||
};
|
||||
|
||||
/** @typedef {typeof import('@langchain/core/tools').Tool} ToolConstructor */
|
||||
/** @typedef {import('@langchain/core/tools').Tool} Tool */
|
||||
|
||||
@@ -310,6 +348,7 @@ const loadTools = async ({
|
||||
|
||||
module.exports = {
|
||||
loadToolWithAuth,
|
||||
loadAuthValues,
|
||||
validateTools,
|
||||
loadTools,
|
||||
};
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
const { validateTools, loadTools } = require('./handleTools');
|
||||
const { validateTools, loadTools, loadAuthValues } = require('./handleTools');
|
||||
const handleOpenAIErrors = require('./handleOpenAIErrors');
|
||||
|
||||
module.exports = {
|
||||
handleOpenAIErrors,
|
||||
loadAuthValues,
|
||||
validateTools,
|
||||
loadTools,
|
||||
};
|
||||
|
||||
5
api/cache/getLogStores.js
vendored
5
api/cache/getLogStores.js
vendored
@@ -49,10 +49,6 @@ const genTitle = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.TWO_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.GEN_TITLE, ttl: Time.TWO_MINUTES });
|
||||
|
||||
const s3ExpiryInterval = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.THIRTY_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.S3_EXPIRY_INTERVAL, ttl: Time.THIRTY_MINUTES });
|
||||
|
||||
const modelQueries = isEnabled(process.env.USE_REDIS)
|
||||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: CacheKeys.MODEL_QUERIES });
|
||||
@@ -93,7 +89,6 @@ const namespaces = {
|
||||
[CacheKeys.ABORT_KEYS]: abortKeys,
|
||||
[CacheKeys.TOKEN_CONFIG]: tokenConfig,
|
||||
[CacheKeys.GEN_TITLE]: genTitle,
|
||||
[CacheKeys.S3_EXPIRY_INTERVAL]: s3ExpiryInterval,
|
||||
[CacheKeys.MODEL_QUERIES]: modelQueries,
|
||||
[CacheKeys.AUDIO_RUNS]: audioRuns,
|
||||
[CacheKeys.MESSAGES]: messages,
|
||||
|
||||
6
api/cache/keyvRedis.js
vendored
6
api/cache/keyvRedis.js
vendored
@@ -9,7 +9,7 @@ const { REDIS_URI, USE_REDIS, USE_REDIS_CLUSTER, REDIS_CA, REDIS_KEY_PREFIX, RED
|
||||
|
||||
let keyvRedis;
|
||||
const redis_prefix = REDIS_KEY_PREFIX || '';
|
||||
const redis_max_listeners = Number(REDIS_MAX_LISTENERS) || 40;
|
||||
const redis_max_listeners = Number(REDIS_MAX_LISTENERS) || 10;
|
||||
|
||||
function mapURI(uri) {
|
||||
const regex =
|
||||
@@ -77,10 +77,10 @@ if (REDIS_URI && isEnabled(USE_REDIS)) {
|
||||
keyvRedis.on('error', (err) => logger.error('KeyvRedis connection error:', err));
|
||||
keyvRedis.setMaxListeners(redis_max_listeners);
|
||||
logger.info(
|
||||
'[Optional] Redis initialized. If you have issues, or seeing older values, disable it or flush cache to refresh values.',
|
||||
'[Optional] Redis initialized. Note: Redis support is experimental. If you have issues, disable it. Cache needs to be flushed for values to refresh.',
|
||||
);
|
||||
} else {
|
||||
logger.info('[Optional] Redis not initialized.');
|
||||
logger.info('[Optional] Redis not initialized. Note: Redis support is experimental.');
|
||||
}
|
||||
|
||||
module.exports = keyvRedis;
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
const axios = require('axios');
|
||||
const { EventSource } = require('eventsource');
|
||||
const { Time, CacheKeys } = require('librechat-data-provider');
|
||||
const logger = require('./winston');
|
||||
@@ -48,46 +47,9 @@ const sendEvent = (res, event) => {
|
||||
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates and configures an Axios instance with optional proxy settings.
|
||||
*
|
||||
* @typedef {import('axios').AxiosInstance} AxiosInstance
|
||||
* @typedef {import('axios').AxiosProxyConfig} AxiosProxyConfig
|
||||
*
|
||||
* @returns {AxiosInstance} A configured Axios instance
|
||||
* @throws {Error} If there's an issue creating the Axios instance or parsing the proxy URL
|
||||
*/
|
||||
function createAxiosInstance() {
|
||||
const instance = axios.create();
|
||||
|
||||
if (process.env.proxy) {
|
||||
try {
|
||||
const url = new URL(process.env.proxy);
|
||||
|
||||
/** @type {AxiosProxyConfig} */
|
||||
const proxyConfig = {
|
||||
host: url.hostname.replace(/^\[|\]$/g, ''),
|
||||
protocol: url.protocol.replace(':', ''),
|
||||
};
|
||||
|
||||
if (url.port) {
|
||||
proxyConfig.port = parseInt(url.port, 10);
|
||||
}
|
||||
|
||||
instance.defaults.proxy = proxyConfig;
|
||||
} catch (error) {
|
||||
console.error('Error parsing proxy URL:', error);
|
||||
throw new Error(`Invalid proxy URL: ${process.env.proxy}`);
|
||||
}
|
||||
}
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
logger,
|
||||
sendEvent,
|
||||
getMCPManager,
|
||||
createAxiosInstance,
|
||||
getFlowStateManager,
|
||||
};
|
||||
|
||||
@@ -1,126 +0,0 @@
|
||||
const axios = require('axios');
|
||||
const { createAxiosInstance } = require('./index');
|
||||
|
||||
// Mock axios
|
||||
jest.mock('axios', () => ({
|
||||
interceptors: {
|
||||
request: { use: jest.fn(), eject: jest.fn() },
|
||||
response: { use: jest.fn(), eject: jest.fn() },
|
||||
},
|
||||
create: jest.fn().mockReturnValue({
|
||||
defaults: {
|
||||
proxy: null,
|
||||
},
|
||||
get: jest.fn().mockResolvedValue({ data: {} }),
|
||||
post: jest.fn().mockResolvedValue({ data: {} }),
|
||||
put: jest.fn().mockResolvedValue({ data: {} }),
|
||||
delete: jest.fn().mockResolvedValue({ data: {} }),
|
||||
}),
|
||||
get: jest.fn().mockResolvedValue({ data: {} }),
|
||||
post: jest.fn().mockResolvedValue({ data: {} }),
|
||||
put: jest.fn().mockResolvedValue({ data: {} }),
|
||||
delete: jest.fn().mockResolvedValue({ data: {} }),
|
||||
reset: jest.fn().mockImplementation(function () {
|
||||
this.get.mockClear();
|
||||
this.post.mockClear();
|
||||
this.put.mockClear();
|
||||
this.delete.mockClear();
|
||||
this.create.mockClear();
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('createAxiosInstance', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset mocks
|
||||
jest.clearAllMocks();
|
||||
// Create a clean copy of process.env
|
||||
process.env = { ...originalEnv };
|
||||
// Default: no proxy
|
||||
delete process.env.proxy;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original process.env
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
test('creates an axios instance without proxy when no proxy env is set', () => {
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toBeNull();
|
||||
});
|
||||
|
||||
test('configures proxy correctly with hostname and protocol', () => {
|
||||
process.env.proxy = 'http://example.com';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'example.com',
|
||||
protocol: 'http',
|
||||
});
|
||||
});
|
||||
|
||||
test('configures proxy correctly with hostname, protocol and port', () => {
|
||||
process.env.proxy = 'https://proxy.example.com:8080';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'https',
|
||||
port: 8080,
|
||||
});
|
||||
});
|
||||
|
||||
test('handles proxy URLs with authentication', () => {
|
||||
process.env.proxy = 'http://user:pass@proxy.example.com:3128';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'http',
|
||||
port: 3128,
|
||||
// Note: The current implementation doesn't handle auth - if needed, add this functionality
|
||||
});
|
||||
});
|
||||
|
||||
test('throws error when proxy URL is invalid', () => {
|
||||
process.env.proxy = 'invalid-url';
|
||||
|
||||
expect(() => createAxiosInstance()).toThrow('Invalid proxy URL');
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
// If you want to test the actual URL parsing more thoroughly
|
||||
test('handles edge case proxy URLs correctly', () => {
|
||||
// IPv6 address
|
||||
process.env.proxy = 'http://[::1]:8080';
|
||||
|
||||
let instance = createAxiosInstance();
|
||||
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: '::1',
|
||||
protocol: 'http',
|
||||
port: 8080,
|
||||
});
|
||||
|
||||
// URL with path (which should be ignored for proxy config)
|
||||
process.env.proxy = 'http://proxy.example.com:8080/some/path';
|
||||
|
||||
instance = createAxiosInstance();
|
||||
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'http',
|
||||
port: 8080,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -4,11 +4,7 @@ require('winston-daily-rotate-file');
|
||||
|
||||
const logDir = path.join(__dirname, '..', 'logs');
|
||||
|
||||
const { NODE_ENV, DEBUG_LOGGING = false } = process.env;
|
||||
|
||||
const useDebugLogging =
|
||||
(typeof DEBUG_LOGGING === 'string' && DEBUG_LOGGING?.toLowerCase() === 'true') ||
|
||||
DEBUG_LOGGING === true;
|
||||
const { NODE_ENV } = process.env;
|
||||
|
||||
const levels = {
|
||||
error: 0,
|
||||
@@ -40,10 +36,9 @@ const fileFormat = winston.format.combine(
|
||||
winston.format.splat(),
|
||||
);
|
||||
|
||||
const logLevel = useDebugLogging ? 'debug' : 'error';
|
||||
const transports = [
|
||||
new winston.transports.DailyRotateFile({
|
||||
level: logLevel,
|
||||
level: 'debug',
|
||||
filename: `${logDir}/meiliSync-%DATE%.log`,
|
||||
datePattern: 'YYYY-MM-DD',
|
||||
zippedArchive: true,
|
||||
@@ -53,6 +48,14 @@ const transports = [
|
||||
}),
|
||||
];
|
||||
|
||||
// if (NODE_ENV !== 'production') {
|
||||
// transports.push(
|
||||
// new winston.transports.Console({
|
||||
// format: winston.format.combine(winston.format.colorize(), winston.format.simple()),
|
||||
// }),
|
||||
// );
|
||||
// }
|
||||
|
||||
const consoleFormat = winston.format.combine(
|
||||
winston.format.colorize({ all: true }),
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||
|
||||
@@ -5,7 +5,7 @@ const { redactFormat, redactMessage, debugTraverse, jsonTruncateFormat } = requi
|
||||
|
||||
const logDir = path.join(__dirname, '..', 'logs');
|
||||
|
||||
const { NODE_ENV, DEBUG_LOGGING = true, CONSOLE_JSON = false, DEBUG_CONSOLE = false } = process.env;
|
||||
const { NODE_ENV, DEBUG_LOGGING = true, DEBUG_CONSOLE = false, CONSOLE_JSON = false } = process.env;
|
||||
|
||||
const useConsoleJson =
|
||||
(typeof CONSOLE_JSON === 'string' && CONSOLE_JSON?.toLowerCase() === 'true') ||
|
||||
@@ -15,10 +15,6 @@ const useDebugConsole =
|
||||
(typeof DEBUG_CONSOLE === 'string' && DEBUG_CONSOLE?.toLowerCase() === 'true') ||
|
||||
DEBUG_CONSOLE === true;
|
||||
|
||||
const useDebugLogging =
|
||||
(typeof DEBUG_LOGGING === 'string' && DEBUG_LOGGING?.toLowerCase() === 'true') ||
|
||||
DEBUG_LOGGING === true;
|
||||
|
||||
const levels = {
|
||||
error: 0,
|
||||
warn: 1,
|
||||
@@ -61,9 +57,28 @@ const transports = [
|
||||
maxFiles: '14d',
|
||||
format: fileFormat,
|
||||
}),
|
||||
// new winston.transports.DailyRotateFile({
|
||||
// level: 'info',
|
||||
// filename: `${logDir}/info-%DATE%.log`,
|
||||
// datePattern: 'YYYY-MM-DD',
|
||||
// zippedArchive: true,
|
||||
// maxSize: '20m',
|
||||
// maxFiles: '14d',
|
||||
// }),
|
||||
];
|
||||
|
||||
if (useDebugLogging) {
|
||||
// if (NODE_ENV !== 'production') {
|
||||
// transports.push(
|
||||
// new winston.transports.Console({
|
||||
// format: winston.format.combine(winston.format.colorize(), winston.format.simple()),
|
||||
// }),
|
||||
// );
|
||||
// }
|
||||
|
||||
if (
|
||||
(typeof DEBUG_LOGGING === 'string' && DEBUG_LOGGING?.toLowerCase() === 'true') ||
|
||||
DEBUG_LOGGING === true
|
||||
) {
|
||||
transports.push(
|
||||
new winston.transports.DailyRotateFile({
|
||||
level: 'debug',
|
||||
@@ -92,16 +107,10 @@ const consoleFormat = winston.format.combine(
|
||||
}),
|
||||
);
|
||||
|
||||
// Determine console log level
|
||||
let consoleLogLevel = 'info';
|
||||
if (useDebugConsole) {
|
||||
consoleLogLevel = 'debug';
|
||||
}
|
||||
|
||||
if (useDebugConsole) {
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: consoleLogLevel,
|
||||
level: 'debug',
|
||||
format: useConsoleJson
|
||||
? winston.format.combine(fileFormat, jsonTruncateFormat(), winston.format.json())
|
||||
: winston.format.combine(fileFormat, debugTraverse),
|
||||
@@ -110,14 +119,14 @@ if (useDebugConsole) {
|
||||
} else if (useConsoleJson) {
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: consoleLogLevel,
|
||||
level: 'info',
|
||||
format: winston.format.combine(fileFormat, jsonTruncateFormat(), winston.format.json()),
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: consoleLogLevel,
|
||||
level: 'info',
|
||||
format: consoleFormat,
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { MeiliSearch } = require('meilisearch');
|
||||
const { Conversation } = require('~/models/Conversation');
|
||||
const { Message } = require('~/models/Message');
|
||||
const Conversation = require('~/models/schema/convoSchema');
|
||||
const Message = require('~/models/schema/messageSchema');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { actionSchema } = require('@librechat/data-schemas');
|
||||
const actionSchema = require('./schema/action');
|
||||
|
||||
const Action = mongoose.model('action', actionSchema);
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ const {
|
||||
removeAgentFromAllProjects,
|
||||
} = require('./Project');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { agentSchema } = require('@librechat/data-schemas');
|
||||
const agentSchema = require('./schema/agent');
|
||||
|
||||
const Agent = mongoose.model('agent', agentSchema);
|
||||
|
||||
@@ -46,10 +46,6 @@ const loadAgent = async ({ req, agent_id }) => {
|
||||
id: agent_id,
|
||||
});
|
||||
|
||||
if (!agent) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (agent.author.toString() === req.user.id) {
|
||||
return agent;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { assistantSchema } = require('@librechat/data-schemas');
|
||||
const assistantSchema = require('./schema/assistant');
|
||||
|
||||
const Assistant = mongoose.model('assistant', assistantSchema);
|
||||
|
||||
|
||||
@@ -1,4 +1,44 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { balanceSchema } = require('@librechat/data-schemas');
|
||||
const balanceSchema = require('./schema/balance');
|
||||
const { getMultiplier } = require('./tx');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
balanceSchema.statics.check = async function ({
|
||||
user,
|
||||
model,
|
||||
endpoint,
|
||||
valueKey,
|
||||
tokenType,
|
||||
amount,
|
||||
endpointTokenConfig,
|
||||
}) {
|
||||
const multiplier = getMultiplier({ valueKey, tokenType, model, endpoint, endpointTokenConfig });
|
||||
const tokenCost = amount * multiplier;
|
||||
const { tokenCredits: balance } = (await this.findOne({ user }, 'tokenCredits').lean()) ?? {};
|
||||
|
||||
logger.debug('[Balance.check]', {
|
||||
user,
|
||||
model,
|
||||
endpoint,
|
||||
valueKey,
|
||||
tokenType,
|
||||
amount,
|
||||
balance,
|
||||
multiplier,
|
||||
endpointTokenConfig: !!endpointTokenConfig,
|
||||
});
|
||||
|
||||
if (!balance) {
|
||||
return {
|
||||
canSpend: false,
|
||||
balance: 0,
|
||||
tokenCost,
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug('[Balance.check]', { tokenCost });
|
||||
|
||||
return { canSpend: balance >= tokenCost, balance, tokenCost };
|
||||
};
|
||||
|
||||
module.exports = mongoose.model('Balance', balanceSchema);
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const Banner = require('./schema/banner');
|
||||
const logger = require('~/config/winston');
|
||||
const { bannerSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Banner = mongoose.model('Banner', bannerSchema);
|
||||
|
||||
/**
|
||||
* Retrieves the current active banner.
|
||||
* @returns {Promise<Object|null>} The active banner object or null if no active banner is found.
|
||||
@@ -28,4 +24,4 @@ const getBanner = async (user) => {
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = { Banner, getBanner };
|
||||
module.exports = { getBanner };
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const { logger } = require('~/config');
|
||||
// const { Categories } = require('./schema/categories');
|
||||
|
||||
const options = [
|
||||
{
|
||||
|
||||
@@ -15,6 +15,19 @@ const searchConversation = async (conversationId) => {
|
||||
throw new Error('Error searching conversation');
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Searches for a conversation by conversationId and returns associated file ids.
|
||||
* @param {string} conversationId - The conversation's ID.
|
||||
* @returns {Promise<string[] | null>}
|
||||
*/
|
||||
const getConvoFiles = async (conversationId) => {
|
||||
try {
|
||||
return (await Conversation.findOne({ conversationId }, 'files').lean())?.files ?? [];
|
||||
} catch (error) {
|
||||
logger.error('[getConvoFiles] Error getting conversation files', error);
|
||||
throw new Error('Error getting conversation files');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves a single conversation for a given user and conversation ID.
|
||||
@@ -60,20 +73,6 @@ const deleteNullOrEmptyConversations = async () => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Searches for a conversation by conversationId and returns associated file ids.
|
||||
* @param {string} conversationId - The conversation's ID.
|
||||
* @returns {Promise<string[] | null>}
|
||||
*/
|
||||
const getConvoFiles = async (conversationId) => {
|
||||
try {
|
||||
return (await Conversation.findOne({ conversationId }, 'files').lean())?.files ?? [];
|
||||
} catch (error) {
|
||||
logger.error('[getConvoFiles] Error getting conversation files', error);
|
||||
throw new Error('Error getting conversation files');
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Conversation,
|
||||
getConvoFiles,
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const ConversationTag = require('./schema/conversationTagSchema');
|
||||
const Conversation = require('./schema/convoSchema');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
const { conversationTagSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const ConversationTag = mongoose.model('ConversationTag', conversationTagSchema);
|
||||
|
||||
/**
|
||||
* Retrieves all conversation tags for a user.
|
||||
* @param {string} user - The user ID.
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { fileSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
const fileSchema = require('./schema/fileSchema');
|
||||
|
||||
const File = mongoose.model('File', fileSchema);
|
||||
|
||||
@@ -8,7 +7,7 @@ const File = mongoose.model('File', fileSchema);
|
||||
* Finds a file by its file_id with additional query options.
|
||||
* @param {string} file_id - The unique identifier of the file.
|
||||
* @param {object} options - Query options for filtering, projection, etc.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the file document or null.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the file document or null.
|
||||
*/
|
||||
const findFileById = async (file_id, options = {}) => {
|
||||
return await File.findOne({ file_id, ...options }).lean();
|
||||
@@ -18,46 +17,18 @@ const findFileById = async (file_id, options = {}) => {
|
||||
* Retrieves files matching a given filter, sorted by the most recently updated.
|
||||
* @param {Object} filter - The filter criteria to apply.
|
||||
* @param {Object} [_sortOptions] - Optional sort parameters.
|
||||
* @param {Object|String} [selectFields={ text: 0 }] - Fields to include/exclude in the query results.
|
||||
* Default excludes the 'text' field.
|
||||
* @returns {Promise<Array<IMongoFile>>} A promise that resolves to an array of file documents.
|
||||
* @returns {Promise<Array<MongoFile>>} A promise that resolves to an array of file documents.
|
||||
*/
|
||||
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
|
||||
const getFiles = async (filter, _sortOptions) => {
|
||||
const sortOptions = { updatedAt: -1, ..._sortOptions };
|
||||
return await File.find(filter).select(selectFields).sort(sortOptions).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves tool files (files that are embedded or have a fileIdentifier) from an array of file IDs
|
||||
* @param {string[]} fileIds - Array of file_id strings to search for
|
||||
* @returns {Promise<Array<IMongoFile>>} Files that match the criteria
|
||||
*/
|
||||
const getToolFilesByIds = async (fileIds) => {
|
||||
if (!fileIds || !fileIds.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const filter = {
|
||||
file_id: { $in: fileIds },
|
||||
$or: [{ embedded: true }, { 'metadata.fileIdentifier': { $exists: true } }],
|
||||
};
|
||||
|
||||
const selectFields = { text: 0 };
|
||||
const sortOptions = { updatedAt: -1 };
|
||||
|
||||
return await getFiles(filter, sortOptions, selectFields);
|
||||
} catch (error) {
|
||||
logger.error('[getToolFilesByIds] Error retrieving tool files:', error);
|
||||
throw new Error('Error retrieving tool files');
|
||||
}
|
||||
return await File.find(filter).sort(sortOptions).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a new file with a TTL of 1 hour.
|
||||
* @param {IMongoFile} data - The file data to be created, must contain file_id.
|
||||
* @param {MongoFile} data - The file data to be created, must contain file_id.
|
||||
* @param {boolean} disableTTL - Whether to disable the TTL.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the created file document.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the created file document.
|
||||
*/
|
||||
const createFile = async (data, disableTTL) => {
|
||||
const fileData = {
|
||||
@@ -77,8 +48,8 @@ const createFile = async (data, disableTTL) => {
|
||||
|
||||
/**
|
||||
* Updates a file identified by file_id with new data and removes the TTL.
|
||||
* @param {IMongoFile} data - The data to update, must contain file_id.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
|
||||
* @param {MongoFile} data - The data to update, must contain file_id.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
|
||||
*/
|
||||
const updateFile = async (data) => {
|
||||
const { file_id, ...update } = data;
|
||||
@@ -91,8 +62,8 @@ const updateFile = async (data) => {
|
||||
|
||||
/**
|
||||
* Increments the usage of a file identified by file_id.
|
||||
* @param {IMongoFile} data - The data to update, must contain file_id and the increment value for usage.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
|
||||
* @param {MongoFile} data - The data to update, must contain file_id and the increment value for usage.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
|
||||
*/
|
||||
const updateFileUsage = async (data) => {
|
||||
const { file_id, inc = 1 } = data;
|
||||
@@ -106,7 +77,7 @@ const updateFileUsage = async (data) => {
|
||||
/**
|
||||
* Deletes a file identified by file_id.
|
||||
* @param {string} file_id - The unique identifier of the file to delete.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
|
||||
*/
|
||||
const deleteFile = async (file_id) => {
|
||||
return await File.findOneAndDelete({ file_id }).lean();
|
||||
@@ -115,7 +86,7 @@ const deleteFile = async (file_id) => {
|
||||
/**
|
||||
* Deletes a file identified by a filter.
|
||||
* @param {object} filter - The filter criteria to apply.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
|
||||
*/
|
||||
const deleteFileByFilter = async (filter) => {
|
||||
return await File.findOneAndDelete(filter).lean();
|
||||
@@ -134,38 +105,14 @@ const deleteFiles = async (file_ids, user) => {
|
||||
return await File.deleteMany(deleteQuery);
|
||||
};
|
||||
|
||||
/**
|
||||
* Batch updates files with new signed URLs in MongoDB
|
||||
*
|
||||
* @param {MongoFile[]} updates - Array of updates in the format { file_id, filepath }
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function batchUpdateFiles(updates) {
|
||||
if (!updates || updates.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const bulkOperations = updates.map((update) => ({
|
||||
updateOne: {
|
||||
filter: { file_id: update.file_id },
|
||||
update: { $set: { filepath: update.filepath } },
|
||||
},
|
||||
}));
|
||||
|
||||
const result = await File.bulkWrite(bulkOperations);
|
||||
logger.info(`Updated ${result.modifiedCount} files with new S3 URLs`);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
File,
|
||||
findFileById,
|
||||
getFiles,
|
||||
getToolFilesByIds,
|
||||
createFile,
|
||||
updateFile,
|
||||
updateFileUsage,
|
||||
deleteFile,
|
||||
deleteFiles,
|
||||
deleteFileByFilter,
|
||||
batchUpdateFiles,
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { keySchema } = require('@librechat/data-schemas');
|
||||
const keySchema = require('./schema/key');
|
||||
|
||||
module.exports = mongoose.model('Key', keySchema);
|
||||
|
||||
@@ -2,6 +2,7 @@ const { z } = require('zod');
|
||||
const Message = require('./schema/messageSchema');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
// Validate conversation ID as a UUID (if your conversation IDs follow UUID format)
|
||||
const idSchema = z.string().uuid();
|
||||
|
||||
/**
|
||||
@@ -28,8 +29,11 @@ const idSchema = z.string().uuid();
|
||||
* @param {string} [params.plugin] - Plugin associated with the message.
|
||||
* @param {string[]} [params.plugins] - An array of plugins associated with the message.
|
||||
* @param {string} [params.model] - The model used to generate the message.
|
||||
* @param {Object} [metadata] - Additional metadata for this operation
|
||||
* @param {string} [metadata.context] - The context of the operation
|
||||
* @param {string} [params.iv] - (Optional) Base64-encoded initialization vector for encryption.
|
||||
* @param {string} [params.authTag] - (Optional) Base64-encoded authentication tag from AES-GCM.
|
||||
* @param {string} [params.encryptedKey] - (Optional) Base64-encoded AES key encrypted with RSA.
|
||||
* @param {Object} [metadata] - Additional metadata for this operation.
|
||||
* @param {string} [metadata.context] - The context of the operation.
|
||||
* @returns {Promise<TMessage>} The updated or newly inserted message document.
|
||||
* @throws {Error} If there is an error in saving the message.
|
||||
*/
|
||||
@@ -51,6 +55,9 @@ async function saveMessage(req, params, metadata) {
|
||||
...params,
|
||||
user: req.user.id,
|
||||
messageId: params.newMessageId || params.messageId,
|
||||
iv: params.iv ?? null,
|
||||
authTag: params.authTag ?? null,
|
||||
encryptedKey: params.encryptedKey ?? null,
|
||||
};
|
||||
|
||||
if (req?.body?.isTemporary) {
|
||||
@@ -71,42 +78,7 @@ async function saveMessage(req, params, metadata) {
|
||||
} catch (err) {
|
||||
logger.error('Error saving message:', err);
|
||||
logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
|
||||
|
||||
// Check if this is a duplicate key error (MongoDB error code 11000)
|
||||
if (err.code === 11000 && err.message.includes('duplicate key error')) {
|
||||
// Log the duplicate key error but don't crash the application
|
||||
logger.warn(`Duplicate messageId detected: ${params.messageId}. Continuing execution.`);
|
||||
|
||||
try {
|
||||
// Try to find the existing message with this ID
|
||||
const existingMessage = await Message.findOne({
|
||||
messageId: params.messageId,
|
||||
user: req.user.id,
|
||||
});
|
||||
|
||||
// If we found it, return it
|
||||
if (existingMessage) {
|
||||
return existingMessage.toObject();
|
||||
}
|
||||
|
||||
// If we can't find it (unlikely but possible in race conditions)
|
||||
return {
|
||||
...params,
|
||||
messageId: params.messageId,
|
||||
user: req.user.id,
|
||||
};
|
||||
} catch (findError) {
|
||||
// If the findOne also fails, log it but don't crash
|
||||
logger.warn(`Could not retrieve existing message with ID ${params.messageId}: ${findError.message}`);
|
||||
return {
|
||||
...params,
|
||||
messageId: params.messageId,
|
||||
user: req.user.id,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
throw err; // Re-throw other errors
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,7 +97,12 @@ async function bulkSaveMessages(messages, overrideTimestamp = false) {
|
||||
const bulkOps = messages.map((message) => ({
|
||||
updateOne: {
|
||||
filter: { messageId: message.messageId },
|
||||
update: message,
|
||||
update: {
|
||||
...message,
|
||||
iv: message.iv ?? null,
|
||||
authTag: message.authTag ?? null,
|
||||
encryptedKey: message.encryptedKey ?? null,
|
||||
},
|
||||
timestamps: !overrideTimestamp,
|
||||
upsert: true,
|
||||
},
|
||||
@@ -154,14 +131,7 @@ async function bulkSaveMessages(messages, overrideTimestamp = false) {
|
||||
* @returns {Promise<Object>} The updated or newly inserted message document.
|
||||
* @throws {Error} If there is an error in saving the message.
|
||||
*/
|
||||
async function recordMessage({
|
||||
user,
|
||||
endpoint,
|
||||
messageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
...rest
|
||||
}) {
|
||||
async function recordMessage({ user, endpoint, messageId, conversationId, parentMessageId, ...rest }) {
|
||||
try {
|
||||
// No parsing of convoId as may use threadId
|
||||
const message = {
|
||||
@@ -171,6 +141,9 @@ async function recordMessage({
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
...rest,
|
||||
iv: rest.iv ?? null,
|
||||
authTag: rest.authTag ?? null,
|
||||
encryptedKey: rest.encryptedKey ?? null,
|
||||
};
|
||||
|
||||
return await Message.findOneAndUpdate({ user, messageId }, message, {
|
||||
@@ -225,12 +198,15 @@ async function updateMessageText(req, { messageId, text }) {
|
||||
async function updateMessage(req, message, metadata) {
|
||||
try {
|
||||
const { messageId, ...update } = message;
|
||||
// Ensure encryption fields are explicitly updated (if provided)
|
||||
update.iv = update.iv ?? null;
|
||||
update.authTag = update.authTag ?? null;
|
||||
update.encryptedKey = update.encryptedKey ?? null;
|
||||
|
||||
const updatedMessage = await Message.findOneAndUpdate(
|
||||
{ messageId, user: req.user.id },
|
||||
update,
|
||||
{
|
||||
new: true,
|
||||
},
|
||||
{ new: true },
|
||||
);
|
||||
|
||||
if (!updatedMessage) {
|
||||
@@ -260,11 +236,11 @@ async function updateMessage(req, message, metadata) {
|
||||
*
|
||||
* @async
|
||||
* @function deleteMessagesSince
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {Object} req - The request object.
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.messageId - The unique identifier for the message.
|
||||
* @param {string} params.conversationId - The identifier of the conversation.
|
||||
* @returns {Promise<Number>} The number of deleted messages.
|
||||
* @returns {Promise<number>} The number of deleted messages.
|
||||
* @throws {Error} If there is an error in deleting messages.
|
||||
*/
|
||||
async function deleteMessagesSince(req, { messageId, conversationId }) {
|
||||
@@ -298,7 +274,6 @@ async function getMessages(filter, select) {
|
||||
if (select) {
|
||||
return await Message.find(filter).select(select).sort({ createdAt: 1 }).lean();
|
||||
}
|
||||
|
||||
return await Message.find(filter).sort({ createdAt: 1 }).lean();
|
||||
} catch (err) {
|
||||
logger.error('Error getting messages:', err);
|
||||
@@ -316,10 +291,7 @@ async function getMessages(filter, select) {
|
||||
*/
|
||||
async function getMessage({ user, messageId }) {
|
||||
try {
|
||||
return await Message.findOne({
|
||||
user,
|
||||
messageId,
|
||||
}).lean();
|
||||
return await Message.findOne({ user, messageId }).lean();
|
||||
} catch (err) {
|
||||
logger.error('Error getting message:', err);
|
||||
throw err;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { model } = require('mongoose');
|
||||
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
||||
const { projectSchema } = require('@librechat/data-schemas');
|
||||
const projectSchema = require('~/models/schema/projectSchema');
|
||||
|
||||
const Project = model('Project', projectSchema);
|
||||
|
||||
@@ -9,7 +9,7 @@ const Project = model('Project', projectSchema);
|
||||
*
|
||||
* @param {string} projectId - The ID of the project to find and return as a plain object.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<IMongoProject>} A plain object representing the project document, or `null` if no project is found.
|
||||
* @returns {Promise<MongoProject>} A plain object representing the project document, or `null` if no project is found.
|
||||
*/
|
||||
const getProjectById = async function (projectId, fieldsToSelect = null) {
|
||||
const query = Project.findById(projectId);
|
||||
@@ -27,7 +27,7 @@ const getProjectById = async function (projectId, fieldsToSelect = null) {
|
||||
*
|
||||
* @param {string} projectName - The name of the project to find or create.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<IMongoProject>} A plain object representing the project document.
|
||||
* @returns {Promise<MongoProject>} A plain object representing the project document.
|
||||
*/
|
||||
const getProjectByName = async function (projectName, fieldsToSelect = null) {
|
||||
const query = { name: projectName };
|
||||
@@ -47,7 +47,7 @@ const getProjectByName = async function (projectName, fieldsToSelect = null) {
|
||||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} promptGroupIds - The array of prompt group IDs to add to the project.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
*/
|
||||
const addGroupIdsToProject = async function (projectId, promptGroupIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
@@ -62,7 +62,7 @@ const addGroupIdsToProject = async function (projectId, promptGroupIds) {
|
||||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} promptGroupIds - The array of prompt group IDs to remove from the project.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
*/
|
||||
const removeGroupIdsFromProject = async function (projectId, promptGroupIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
@@ -87,7 +87,7 @@ const removeGroupFromAllProjects = async (promptGroupId) => {
|
||||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} agentIds - The array of agent IDs to add to the project.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
*/
|
||||
const addAgentIdsToProject = async function (projectId, agentIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
@@ -102,7 +102,7 @@ const addAgentIdsToProject = async function (projectId, agentIds) {
|
||||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} agentIds - The array of agent IDs to remove from the project.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
*/
|
||||
const removeAgentIdsFromProject = async function (projectId, agentIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { ObjectId } = require('mongodb');
|
||||
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
@@ -7,13 +6,10 @@ const {
|
||||
removeGroupIdsFromProject,
|
||||
removeGroupFromAllProjects,
|
||||
} = require('./Project');
|
||||
const { promptGroupSchema, promptSchema } = require('@librechat/data-schemas');
|
||||
const { Prompt, PromptGroup } = require('./schema/promptSchema');
|
||||
const { escapeRegExp } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
|
||||
const Prompt = mongoose.model('Prompt', promptSchema);
|
||||
|
||||
/**
|
||||
* Create a pipeline for the aggregation to get prompt groups
|
||||
* @param {Object} query
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
const mongoose = require('mongoose');
|
||||
const {
|
||||
CacheKeys,
|
||||
SystemRoles,
|
||||
@@ -13,11 +12,9 @@ const {
|
||||
temporaryChatPermissionsSchema,
|
||||
} = require('librechat-data-provider');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { roleSchema } = require('@librechat/data-schemas');
|
||||
const Role = require('~/models/schema/roleSchema');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Role = mongoose.model('Role', roleSchema);
|
||||
|
||||
/**
|
||||
* Retrieve a role by name and convert the found role document to a plain object.
|
||||
* If the role with the given name doesn't exist and the name is a system defined role, create it and return the lean version.
|
||||
@@ -171,7 +168,6 @@ const initializeRoles = async function () {
|
||||
}
|
||||
};
|
||||
module.exports = {
|
||||
Role,
|
||||
getRoleByName,
|
||||
initializeRoles,
|
||||
updateRoleByName,
|
||||
|
||||
@@ -8,7 +8,7 @@ const {
|
||||
} = require('librechat-data-provider');
|
||||
const { updateAccessPermissions, initializeRoles } = require('~/models/Role');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { Role } = require('~/models/Role');
|
||||
const Role = require('~/models/schema/roleSchema');
|
||||
|
||||
// Mock the cache
|
||||
jest.mock('~/cache/getLogStores', () => {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const signPayload = require('~/server/services/signPayload');
|
||||
const { hashToken } = require('~/server/utils/crypto');
|
||||
const { sessionSchema } = require('@librechat/data-schemas');
|
||||
const sessionSchema = require('./schema/session');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Session = mongoose.model('Session', sessionSchema);
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { nanoid } = require('nanoid');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { Conversation } = require('~/models/Conversation');
|
||||
const { shareSchema } = require('@librechat/data-schemas');
|
||||
const SharedLink = mongoose.model('SharedLink', shareSchema);
|
||||
const SharedLink = require('./schema/shareSchema');
|
||||
const { getMessages } = require('./Message');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { encryptV2 } = require('~/server/utils/crypto');
|
||||
const { tokenSchema } = require('@librechat/data-schemas');
|
||||
const tokenSchema = require('./schema/tokenSchema');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { toolCallSchema } = require('@librechat/data-schemas');
|
||||
const ToolCall = mongoose.model('ToolCall', toolCallSchema);
|
||||
const ToolCall = require('./schema/toolCallSchema');
|
||||
|
||||
/**
|
||||
* Create a new tool call
|
||||
* @param {IToolCallData} toolCallData - The tool call data
|
||||
* @returns {Promise<IToolCallData>} The created tool call document
|
||||
* @param {ToolCallData} toolCallData - The tool call data
|
||||
* @returns {Promise<ToolCallData>} The created tool call document
|
||||
*/
|
||||
async function createToolCall(toolCallData) {
|
||||
try {
|
||||
@@ -18,7 +16,7 @@ async function createToolCall(toolCallData) {
|
||||
/**
|
||||
* Get a tool call by ID
|
||||
* @param {string} id - The tool call document ID
|
||||
* @returns {Promise<IToolCallData|null>} The tool call document or null if not found
|
||||
* @returns {Promise<ToolCallData|null>} The tool call document or null if not found
|
||||
*/
|
||||
async function getToolCallById(id) {
|
||||
try {
|
||||
@@ -46,7 +44,7 @@ async function getToolCallsByMessage(messageId, userId) {
|
||||
* Get tool calls by conversation ID and user
|
||||
* @param {string} conversationId - The conversation ID
|
||||
* @param {string} userId - The user's ObjectId
|
||||
* @returns {Promise<IToolCallData[]>} Array of tool call documents
|
||||
* @returns {Promise<ToolCallData[]>} Array of tool call documents
|
||||
*/
|
||||
async function getToolCallsByConvo(conversationId, userId) {
|
||||
try {
|
||||
@@ -59,8 +57,8 @@ async function getToolCallsByConvo(conversationId, userId) {
|
||||
/**
|
||||
* Update a tool call
|
||||
* @param {string} id - The tool call document ID
|
||||
* @param {Partial<IToolCallData>} updateData - The data to update
|
||||
* @returns {Promise<IToolCallData|null>} The updated tool call document or null if not found
|
||||
* @param {Partial<ToolCallData>} updateData - The data to update
|
||||
* @returns {Promise<ToolCallData|null>} The updated tool call document or null if not found
|
||||
*/
|
||||
async function updateToolCall(id, updateData) {
|
||||
try {
|
||||
|
||||
@@ -1,144 +1,11 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { transactionSchema } = require('@librechat/data-schemas');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const { isEnabled } = require('~/server/utils/handleText');
|
||||
const transactionSchema = require('./schema/transaction');
|
||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||
const { logger } = require('~/config');
|
||||
const Balance = require('./Balance');
|
||||
|
||||
const cancelRate = 1.15;
|
||||
|
||||
/**
|
||||
* Updates a user's token balance based on a transaction using optimistic concurrency control
|
||||
* without schema changes. Compatible with DocumentDB.
|
||||
* @async
|
||||
* @function
|
||||
* @param {Object} params - The function parameters.
|
||||
* @param {string|mongoose.Types.ObjectId} params.user - The user ID.
|
||||
* @param {number} params.incrementValue - The value to increment the balance by (can be negative).
|
||||
* @param {import('mongoose').UpdateQuery<import('@librechat/data-schemas').IBalance>['$set']} [params.setValues] - Optional additional fields to set.
|
||||
* @returns {Promise<Object>} Returns the updated balance document (lean).
|
||||
* @throws {Error} Throws an error if the update fails after multiple retries.
|
||||
*/
|
||||
const updateBalance = async ({ user, incrementValue, setValues }) => {
|
||||
let maxRetries = 10; // Number of times to retry on conflict
|
||||
let delay = 50; // Initial retry delay in ms
|
||||
let lastError = null;
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
let currentBalanceDoc;
|
||||
try {
|
||||
// 1. Read the current document state
|
||||
currentBalanceDoc = await Balance.findOne({ user }).lean();
|
||||
const currentCredits = currentBalanceDoc ? currentBalanceDoc.tokenCredits : 0;
|
||||
|
||||
// 2. Calculate the desired new state
|
||||
const potentialNewCredits = currentCredits + incrementValue;
|
||||
const newCredits = Math.max(0, potentialNewCredits); // Ensure balance doesn't go below zero
|
||||
|
||||
// 3. Prepare the update payload
|
||||
const updatePayload = {
|
||||
$set: {
|
||||
tokenCredits: newCredits,
|
||||
...(setValues || {}), // Merge other values to set
|
||||
},
|
||||
};
|
||||
|
||||
// 4. Attempt the conditional update or upsert
|
||||
let updatedBalance = null;
|
||||
if (currentBalanceDoc) {
|
||||
// --- Document Exists: Perform Conditional Update ---
|
||||
// Try to update only if the tokenCredits match the value we read (currentCredits)
|
||||
updatedBalance = await Balance.findOneAndUpdate(
|
||||
{
|
||||
user: user,
|
||||
tokenCredits: currentCredits, // Optimistic lock: condition based on the read value
|
||||
},
|
||||
updatePayload,
|
||||
{
|
||||
new: true, // Return the modified document
|
||||
// lean: true, // .lean() is applied after query execution in Mongoose >= 6
|
||||
},
|
||||
).lean(); // Use lean() for plain JS object
|
||||
|
||||
if (updatedBalance) {
|
||||
// Success! The update was applied based on the expected current state.
|
||||
return updatedBalance;
|
||||
}
|
||||
// If updatedBalance is null, it means tokenCredits changed between read and write (conflict).
|
||||
lastError = new Error(`Concurrency conflict for user ${user} on attempt ${attempt}.`);
|
||||
// Proceed to retry logic below.
|
||||
} else {
|
||||
// --- Document Does Not Exist: Perform Conditional Upsert ---
|
||||
// Try to insert the document, but only if it still doesn't exist.
|
||||
// Using tokenCredits: {$exists: false} helps prevent race conditions where
|
||||
// another process creates the doc between our findOne and findOneAndUpdate.
|
||||
try {
|
||||
updatedBalance = await Balance.findOneAndUpdate(
|
||||
{
|
||||
user: user,
|
||||
// Attempt to match only if the document doesn't exist OR was just created
|
||||
// without tokenCredits (less likely but possible). A simple { user } filter
|
||||
// might also work, relying on the retry for conflicts.
|
||||
// Let's use a simpler filter and rely on retry for races.
|
||||
// tokenCredits: { $exists: false } // This condition might be too strict if doc exists with 0 credits
|
||||
},
|
||||
updatePayload,
|
||||
{
|
||||
upsert: true, // Create if doesn't exist
|
||||
new: true, // Return the created/updated document
|
||||
// setDefaultsOnInsert: true, // Ensure schema defaults are applied on insert
|
||||
// lean: true,
|
||||
},
|
||||
).lean();
|
||||
|
||||
if (updatedBalance) {
|
||||
// Upsert succeeded (likely created the document)
|
||||
return updatedBalance;
|
||||
}
|
||||
// If null, potentially a rare race condition during upsert. Retry should handle it.
|
||||
lastError = new Error(
|
||||
`Upsert race condition suspected for user ${user} on attempt ${attempt}.`,
|
||||
);
|
||||
} catch (error) {
|
||||
if (error.code === 11000) {
|
||||
// E11000 duplicate key error on index
|
||||
// This means another process created the document *just* before our upsert.
|
||||
// It's a concurrency conflict during creation. We should retry.
|
||||
lastError = error; // Store the error
|
||||
// Proceed to retry logic below.
|
||||
} else {
|
||||
// Different error, rethrow
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
} // End if/else (document exists?)
|
||||
} catch (error) {
|
||||
// Catch errors from findOne or unexpected findOneAndUpdate errors
|
||||
logger.error(`[updateBalance] Error during attempt ${attempt} for user ${user}:`, error);
|
||||
lastError = error; // Store the error
|
||||
// Consider stopping retries for non-transient errors, but for now, we retry.
|
||||
}
|
||||
|
||||
// If we reached here, it means the update failed (conflict or error), wait and retry
|
||||
if (attempt < maxRetries) {
|
||||
const jitter = Math.random() * delay * 0.5; // Add jitter to delay
|
||||
await new Promise((resolve) => setTimeout(resolve, delay + jitter));
|
||||
delay = Math.min(delay * 2, 2000); // Exponential backoff with cap
|
||||
}
|
||||
} // End for loop (retries)
|
||||
|
||||
// If loop finishes without success, throw the last encountered error or a generic one
|
||||
logger.error(
|
||||
`[updateBalance] Failed to update balance for user ${user} after ${maxRetries} attempts.`,
|
||||
);
|
||||
throw (
|
||||
lastError ||
|
||||
new Error(
|
||||
`Failed to update balance for user ${user} after maximum retries due to persistent conflicts.`,
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
/** Method to calculate and set the tokenValue for a transaction */
|
||||
transactionSchema.methods.calculateTokenValue = function () {
|
||||
if (!this.valueKey || !this.tokenType) {
|
||||
@@ -154,39 +21,6 @@ transactionSchema.methods.calculateTokenValue = function () {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* New static method to create an auto-refill transaction that does NOT trigger a balance update.
|
||||
* @param {object} txData - Transaction data.
|
||||
* @param {string} txData.user - The user ID.
|
||||
* @param {string} txData.tokenType - The type of token.
|
||||
* @param {string} txData.context - The context of the transaction.
|
||||
* @param {number} txData.rawAmount - The raw amount of tokens.
|
||||
* @returns {Promise<object>} - The created transaction.
|
||||
*/
|
||||
transactionSchema.statics.createAutoRefillTransaction = async function (txData) {
|
||||
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
||||
return;
|
||||
}
|
||||
const transaction = new this(txData);
|
||||
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
||||
transaction.calculateTokenValue();
|
||||
await transaction.save();
|
||||
|
||||
const balanceResponse = await updateBalance({
|
||||
user: transaction.user,
|
||||
incrementValue: txData.rawAmount,
|
||||
setValues: { lastRefill: new Date() },
|
||||
});
|
||||
const result = {
|
||||
rate: transaction.rate,
|
||||
user: transaction.user.toString(),
|
||||
balance: balanceResponse.tokenCredits,
|
||||
};
|
||||
logger.debug('[Balance.check] Auto-refill performed', result);
|
||||
result.transaction = transaction;
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Static method to create a transaction and update the balance
|
||||
* @param {txData} txData - Transaction data.
|
||||
@@ -203,22 +37,27 @@ transactionSchema.statics.create = async function (txData) {
|
||||
|
||||
await transaction.save();
|
||||
|
||||
const balance = await getBalanceConfig();
|
||||
if (!balance?.enabled) {
|
||||
if (!isEnabled(process.env.CHECK_BALANCE)) {
|
||||
return;
|
||||
}
|
||||
|
||||
let balance = await Balance.findOne({ user: transaction.user }).lean();
|
||||
let incrementValue = transaction.tokenValue;
|
||||
|
||||
const balanceResponse = await updateBalance({
|
||||
user: transaction.user,
|
||||
incrementValue,
|
||||
});
|
||||
if (balance && balance?.tokenCredits + incrementValue < 0) {
|
||||
incrementValue = -balance.tokenCredits;
|
||||
}
|
||||
|
||||
balance = await Balance.findOneAndUpdate(
|
||||
{ user: transaction.user },
|
||||
{ $inc: { tokenCredits: incrementValue } },
|
||||
{ upsert: true, new: true },
|
||||
).lean();
|
||||
|
||||
return {
|
||||
rate: transaction.rate,
|
||||
user: transaction.user.toString(),
|
||||
balance: balanceResponse.tokenCredits,
|
||||
balance: balance.tokenCredits,
|
||||
[transaction.tokenType]: incrementValue,
|
||||
};
|
||||
};
|
||||
@@ -239,22 +78,27 @@ transactionSchema.statics.createStructured = async function (txData) {
|
||||
|
||||
await transaction.save();
|
||||
|
||||
const balance = await getBalanceConfig();
|
||||
if (!balance?.enabled) {
|
||||
if (!isEnabled(process.env.CHECK_BALANCE)) {
|
||||
return;
|
||||
}
|
||||
|
||||
let balance = await Balance.findOne({ user: transaction.user }).lean();
|
||||
let incrementValue = transaction.tokenValue;
|
||||
|
||||
const balanceResponse = await updateBalance({
|
||||
user: transaction.user,
|
||||
incrementValue,
|
||||
});
|
||||
if (balance && balance?.tokenCredits + incrementValue < 0) {
|
||||
incrementValue = -balance.tokenCredits;
|
||||
}
|
||||
|
||||
balance = await Balance.findOneAndUpdate(
|
||||
{ user: transaction.user },
|
||||
{ $inc: { tokenCredits: incrementValue } },
|
||||
{ upsert: true, new: true },
|
||||
).lean();
|
||||
|
||||
return {
|
||||
rate: transaction.rate,
|
||||
user: transaction.user.toString(),
|
||||
balance: balanceResponse.tokenCredits,
|
||||
balance: balance.tokenCredits,
|
||||
[transaction.tokenType]: incrementValue,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||
const { Transaction } = require('./Transaction');
|
||||
const Balance = require('./Balance');
|
||||
|
||||
// Mock the custom config module so we can control the balance flag.
|
||||
jest.mock('~/server/services/Config');
|
||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||
|
||||
let mongoServer;
|
||||
|
||||
@@ -24,8 +20,6 @@ afterAll(async () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await mongoose.connection.dropDatabase();
|
||||
// Default: enable balance updates in tests.
|
||||
getBalanceConfig.mockResolvedValue({ enabled: true });
|
||||
});
|
||||
|
||||
describe('Regular Token Spending Tests', () => {
|
||||
@@ -50,22 +44,34 @@ describe('Regular Token Spending Tests', () => {
|
||||
};
|
||||
|
||||
// Act
|
||||
process.env.CHECK_BALANCE = 'true';
|
||||
await spendTokens(txData, tokenUsage);
|
||||
|
||||
// Assert
|
||||
console.log('Initial Balance:', initialBalance);
|
||||
|
||||
const updatedBalance = await Balance.findOne({ user: userId });
|
||||
console.log('Updated Balance:', updatedBalance.tokenCredits);
|
||||
|
||||
const promptMultiplier = getMultiplier({ model, tokenType: 'prompt' });
|
||||
const completionMultiplier = getMultiplier({ model, tokenType: 'completion' });
|
||||
const expectedTotalCost = 100 * promptMultiplier + 50 * completionMultiplier;
|
||||
|
||||
const expectedPromptCost = tokenUsage.promptTokens * promptMultiplier;
|
||||
const expectedCompletionCost = tokenUsage.completionTokens * completionMultiplier;
|
||||
const expectedTotalCost = expectedPromptCost + expectedCompletionCost;
|
||||
const expectedBalance = initialBalance - expectedTotalCost;
|
||||
|
||||
expect(updatedBalance.tokenCredits).toBeLessThan(initialBalance);
|
||||
expect(updatedBalance.tokenCredits).toBeCloseTo(expectedBalance, 0);
|
||||
|
||||
console.log('Expected Total Cost:', expectedTotalCost);
|
||||
console.log('Actual Balance Decrease:', initialBalance - updatedBalance.tokenCredits);
|
||||
});
|
||||
|
||||
test('spendTokens should handle zero completion tokens', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 10000000;
|
||||
const initialBalance = 10000000; // $10.00
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
|
||||
const model = 'gpt-3.5-turbo';
|
||||
@@ -83,19 +89,24 @@ describe('Regular Token Spending Tests', () => {
|
||||
};
|
||||
|
||||
// Act
|
||||
process.env.CHECK_BALANCE = 'true';
|
||||
await spendTokens(txData, tokenUsage);
|
||||
|
||||
// Assert
|
||||
const updatedBalance = await Balance.findOne({ user: userId });
|
||||
|
||||
const promptMultiplier = getMultiplier({ model, tokenType: 'prompt' });
|
||||
const expectedCost = 100 * promptMultiplier;
|
||||
const expectedCost = tokenUsage.promptTokens * promptMultiplier;
|
||||
expect(updatedBalance.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
|
||||
|
||||
console.log('Initial Balance:', initialBalance);
|
||||
console.log('Updated Balance:', updatedBalance.tokenCredits);
|
||||
console.log('Expected Cost:', expectedCost);
|
||||
});
|
||||
|
||||
test('spendTokens should handle undefined token counts', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 10000000;
|
||||
const initialBalance = 10000000; // $10.00
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
|
||||
const model = 'gpt-3.5-turbo';
|
||||
@@ -109,17 +120,14 @@ describe('Regular Token Spending Tests', () => {
|
||||
|
||||
const tokenUsage = {};
|
||||
|
||||
// Act
|
||||
const result = await spendTokens(txData, tokenUsage);
|
||||
|
||||
// Assert: No transaction should be created
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
test('spendTokens should handle only prompt tokens', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 10000000;
|
||||
const initialBalance = 10000000; // $10.00
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
|
||||
const model = 'gpt-3.5-turbo';
|
||||
@@ -133,44 +141,14 @@ describe('Regular Token Spending Tests', () => {
|
||||
|
||||
const tokenUsage = { promptTokens: 100 };
|
||||
|
||||
// Act
|
||||
await spendTokens(txData, tokenUsage);
|
||||
|
||||
// Assert
|
||||
const updatedBalance = await Balance.findOne({ user: userId });
|
||||
|
||||
const promptMultiplier = getMultiplier({ model, tokenType: 'prompt' });
|
||||
const expectedCost = 100 * promptMultiplier;
|
||||
expect(updatedBalance.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
|
||||
});
|
||||
|
||||
test('spendTokens should not update balance when balance feature is disabled', async () => {
|
||||
// Arrange: Override the config to disable balance updates.
|
||||
getBalanceConfig.mockResolvedValue({ balance: { enabled: false } });
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 10000000;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
|
||||
const model = 'gpt-3.5-turbo';
|
||||
const txData = {
|
||||
user: userId,
|
||||
conversationId: 'test-conversation-id',
|
||||
model,
|
||||
context: 'test',
|
||||
endpointTokenConfig: null,
|
||||
};
|
||||
|
||||
const tokenUsage = {
|
||||
promptTokens: 100,
|
||||
completionTokens: 50,
|
||||
};
|
||||
|
||||
// Act
|
||||
await spendTokens(txData, tokenUsage);
|
||||
|
||||
// Assert: Balance should remain unchanged.
|
||||
const updatedBalance = await Balance.findOne({ user: userId });
|
||||
expect(updatedBalance.tokenCredits).toBe(initialBalance);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Structured Token Spending Tests', () => {
|
||||
@@ -186,7 +164,7 @@ describe('Structured Token Spending Tests', () => {
|
||||
conversationId: 'c23a18da-706c-470a-ac28-ec87ed065199',
|
||||
model,
|
||||
context: 'message',
|
||||
endpointTokenConfig: null,
|
||||
endpointTokenConfig: null, // We'll use the default rates
|
||||
};
|
||||
|
||||
const tokenUsage = {
|
||||
@@ -198,15 +176,28 @@ describe('Structured Token Spending Tests', () => {
|
||||
completionTokens: 5,
|
||||
};
|
||||
|
||||
// Get the actual multipliers
|
||||
const promptMultiplier = getMultiplier({ model, tokenType: 'prompt' });
|
||||
const completionMultiplier = getMultiplier({ model, tokenType: 'completion' });
|
||||
const writeMultiplier = getCacheMultiplier({ model, cacheType: 'write' });
|
||||
const readMultiplier = getCacheMultiplier({ model, cacheType: 'read' });
|
||||
|
||||
console.log('Multipliers:', {
|
||||
promptMultiplier,
|
||||
completionMultiplier,
|
||||
writeMultiplier,
|
||||
readMultiplier,
|
||||
});
|
||||
|
||||
// Act
|
||||
process.env.CHECK_BALANCE = 'true';
|
||||
const result = await spendStructuredTokens(txData, tokenUsage);
|
||||
|
||||
// Calculate expected costs.
|
||||
// Assert
|
||||
console.log('Initial Balance:', initialBalance);
|
||||
console.log('Updated Balance:', result.completion.balance);
|
||||
console.log('Transaction Result:', result);
|
||||
|
||||
const expectedPromptCost =
|
||||
tokenUsage.promptTokens.input * promptMultiplier +
|
||||
tokenUsage.promptTokens.write * writeMultiplier +
|
||||
@@ -215,21 +206,37 @@ describe('Structured Token Spending Tests', () => {
|
||||
const expectedTotalCost = expectedPromptCost + expectedCompletionCost;
|
||||
const expectedBalance = initialBalance - expectedTotalCost;
|
||||
|
||||
// Assert
|
||||
console.log('Expected Cost:', expectedTotalCost);
|
||||
console.log('Expected Balance:', expectedBalance);
|
||||
|
||||
expect(result.completion.balance).toBeLessThan(initialBalance);
|
||||
|
||||
// Allow for a small difference (e.g., 100 token credits, which is $0.0001)
|
||||
const allowedDifference = 100;
|
||||
expect(Math.abs(result.completion.balance - expectedBalance)).toBeLessThan(allowedDifference);
|
||||
|
||||
// Check if the decrease is approximately as expected
|
||||
const balanceDecrease = initialBalance - result.completion.balance;
|
||||
expect(balanceDecrease).toBeCloseTo(expectedTotalCost, 0);
|
||||
|
||||
const expectedPromptTokenValue = -expectedPromptCost;
|
||||
const expectedCompletionTokenValue = -expectedCompletionCost;
|
||||
// Check token values
|
||||
const expectedPromptTokenValue = -(
|
||||
tokenUsage.promptTokens.input * promptMultiplier +
|
||||
tokenUsage.promptTokens.write * writeMultiplier +
|
||||
tokenUsage.promptTokens.read * readMultiplier
|
||||
);
|
||||
const expectedCompletionTokenValue = -tokenUsage.completionTokens * completionMultiplier;
|
||||
|
||||
expect(result.prompt.prompt).toBeCloseTo(expectedPromptTokenValue, 1);
|
||||
expect(result.completion.completion).toBe(expectedCompletionTokenValue);
|
||||
|
||||
console.log('Expected prompt tokenValue:', expectedPromptTokenValue);
|
||||
console.log('Actual prompt tokenValue:', result.prompt.prompt);
|
||||
console.log('Expected completion tokenValue:', expectedCompletionTokenValue);
|
||||
console.log('Actual completion tokenValue:', result.completion.completion);
|
||||
});
|
||||
|
||||
test('should handle zero completion tokens in structured spending', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 17613154.55;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
@@ -251,17 +258,15 @@ describe('Structured Token Spending Tests', () => {
|
||||
completionTokens: 0,
|
||||
};
|
||||
|
||||
// Act
|
||||
process.env.CHECK_BALANCE = 'true';
|
||||
const result = await spendStructuredTokens(txData, tokenUsage);
|
||||
|
||||
// Assert
|
||||
expect(result.prompt).toBeDefined();
|
||||
expect(result.completion).toBeUndefined();
|
||||
expect(result.prompt.prompt).toBeLessThan(0);
|
||||
});
|
||||
|
||||
test('should handle only prompt tokens in structured spending', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 17613154.55;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
@@ -282,17 +287,15 @@ describe('Structured Token Spending Tests', () => {
|
||||
},
|
||||
};
|
||||
|
||||
// Act
|
||||
process.env.CHECK_BALANCE = 'true';
|
||||
const result = await spendStructuredTokens(txData, tokenUsage);
|
||||
|
||||
// Assert
|
||||
expect(result.prompt).toBeDefined();
|
||||
expect(result.completion).toBeUndefined();
|
||||
expect(result.prompt.prompt).toBeLessThan(0);
|
||||
});
|
||||
|
||||
test('should handle undefined token counts in structured spending', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 17613154.55;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
@@ -307,10 +310,9 @@ describe('Structured Token Spending Tests', () => {
|
||||
|
||||
const tokenUsage = {};
|
||||
|
||||
// Act
|
||||
process.env.CHECK_BALANCE = 'true';
|
||||
const result = await spendStructuredTokens(txData, tokenUsage);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual({
|
||||
prompt: undefined,
|
||||
completion: undefined,
|
||||
@@ -318,7 +320,6 @@ describe('Structured Token Spending Tests', () => {
|
||||
});
|
||||
|
||||
test('should handle incomplete context for completion tokens', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 17613154.55;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
@@ -340,18 +341,15 @@ describe('Structured Token Spending Tests', () => {
|
||||
completionTokens: 50,
|
||||
};
|
||||
|
||||
// Act
|
||||
process.env.CHECK_BALANCE = 'true';
|
||||
const result = await spendStructuredTokens(txData, tokenUsage);
|
||||
|
||||
// Assert:
|
||||
// (Assuming a multiplier for completion of 15 and a cancel rate of 1.15 as noted in the original test.)
|
||||
expect(result.completion.completion).toBeCloseTo(-50 * 15 * 1.15, 0);
|
||||
expect(result.completion.completion).toBeCloseTo(-50 * 15 * 1.15, 0); // Assuming multiplier is 15 and cancelRate is 1.15
|
||||
});
|
||||
});
|
||||
|
||||
describe('NaN Handling Tests', () => {
|
||||
test('should skip transaction creation when rawAmount is NaN', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 10000000;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
@@ -367,11 +365,9 @@ describe('NaN Handling Tests', () => {
|
||||
tokenType: 'prompt',
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await Transaction.create(txData);
|
||||
|
||||
// Assert: No transaction should be created and balance remains unchanged.
|
||||
expect(result).toBeUndefined();
|
||||
|
||||
const balance = await Balance.findOne({ user: userId });
|
||||
expect(balance.tokenCredits).toBe(initialBalance);
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { userSchema } = require('@librechat/data-schemas');
|
||||
const userSchema = require('~/models/schema/userSchema');
|
||||
|
||||
const User = mongoose.model('User', userSchema);
|
||||
|
||||
|
||||
@@ -1,156 +0,0 @@
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { Transaction } = require('./Transaction');
|
||||
const { logViolation } = require('~/cache');
|
||||
const { getMultiplier } = require('./tx');
|
||||
const { logger } = require('~/config');
|
||||
const Balance = require('./Balance');
|
||||
|
||||
function isInvalidDate(date) {
|
||||
return isNaN(date);
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple check method that calculates token cost and returns balance info.
|
||||
* The auto-refill logic has been moved to balanceMethods.js to prevent circular dependencies.
|
||||
*/
|
||||
const checkBalanceRecord = async function ({
|
||||
user,
|
||||
model,
|
||||
endpoint,
|
||||
valueKey,
|
||||
tokenType,
|
||||
amount,
|
||||
endpointTokenConfig,
|
||||
}) {
|
||||
const multiplier = getMultiplier({ valueKey, tokenType, model, endpoint, endpointTokenConfig });
|
||||
const tokenCost = amount * multiplier;
|
||||
|
||||
// Retrieve the balance record
|
||||
let record = await Balance.findOne({ user }).lean();
|
||||
if (!record) {
|
||||
logger.debug('[Balance.check] No balance record found for user', { user });
|
||||
return {
|
||||
canSpend: false,
|
||||
balance: 0,
|
||||
tokenCost,
|
||||
};
|
||||
}
|
||||
let balance = record.tokenCredits;
|
||||
|
||||
logger.debug('[Balance.check] Initial state', {
|
||||
user,
|
||||
model,
|
||||
endpoint,
|
||||
valueKey,
|
||||
tokenType,
|
||||
amount,
|
||||
balance,
|
||||
multiplier,
|
||||
endpointTokenConfig: !!endpointTokenConfig,
|
||||
});
|
||||
|
||||
// Only perform auto-refill if spending would bring the balance to 0 or below
|
||||
if (balance - tokenCost <= 0 && record.autoRefillEnabled && record.refillAmount > 0) {
|
||||
const lastRefillDate = new Date(record.lastRefill);
|
||||
const now = new Date();
|
||||
if (
|
||||
isInvalidDate(lastRefillDate) ||
|
||||
now >=
|
||||
addIntervalToDate(lastRefillDate, record.refillIntervalValue, record.refillIntervalUnit)
|
||||
) {
|
||||
try {
|
||||
/** @type {{ rate: number, user: string, balance: number, transaction: import('@librechat/data-schemas').ITransaction}} */
|
||||
const result = await Transaction.createAutoRefillTransaction({
|
||||
user: user,
|
||||
tokenType: 'credits',
|
||||
context: 'autoRefill',
|
||||
rawAmount: record.refillAmount,
|
||||
});
|
||||
balance = result.balance;
|
||||
} catch (error) {
|
||||
logger.error('[Balance.check] Failed to record transaction for auto-refill', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug('[Balance.check] Token cost', { tokenCost });
|
||||
return { canSpend: balance >= tokenCost, balance, tokenCost };
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds a time interval to a given date.
|
||||
* @param {Date} date - The starting date.
|
||||
* @param {number} value - The numeric value of the interval.
|
||||
* @param {'seconds'|'minutes'|'hours'|'days'|'weeks'|'months'} unit - The unit of time.
|
||||
* @returns {Date} A new Date representing the starting date plus the interval.
|
||||
*/
|
||||
const addIntervalToDate = (date, value, unit) => {
|
||||
const result = new Date(date);
|
||||
switch (unit) {
|
||||
case 'seconds':
|
||||
result.setSeconds(result.getSeconds() + value);
|
||||
break;
|
||||
case 'minutes':
|
||||
result.setMinutes(result.getMinutes() + value);
|
||||
break;
|
||||
case 'hours':
|
||||
result.setHours(result.getHours() + value);
|
||||
break;
|
||||
case 'days':
|
||||
result.setDate(result.getDate() + value);
|
||||
break;
|
||||
case 'weeks':
|
||||
result.setDate(result.getDate() + value * 7);
|
||||
break;
|
||||
case 'months':
|
||||
result.setMonth(result.getMonth() + value);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks the balance for a user and determines if they can spend a certain amount.
|
||||
* If the user cannot spend the amount, it logs a violation and denies the request.
|
||||
*
|
||||
* @async
|
||||
* @function
|
||||
* @param {Object} params - The function parameters.
|
||||
* @param {Express.Request} params.req - The Express request object.
|
||||
* @param {Express.Response} params.res - The Express response object.
|
||||
* @param {Object} params.txData - The transaction data.
|
||||
* @param {string} params.txData.user - The user ID or identifier.
|
||||
* @param {('prompt' | 'completion')} params.txData.tokenType - The type of token.
|
||||
* @param {number} params.txData.amount - The amount of tokens.
|
||||
* @param {string} params.txData.model - The model name or identifier.
|
||||
* @param {string} [params.txData.endpointTokenConfig] - The token configuration for the endpoint.
|
||||
* @returns {Promise<boolean>} Throws error if the user cannot spend the amount.
|
||||
* @throws {Error} Throws an error if there's an issue with the balance check.
|
||||
*/
|
||||
const checkBalance = async ({ req, res, txData }) => {
|
||||
const { canSpend, balance, tokenCost } = await checkBalanceRecord(txData);
|
||||
if (canSpend) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const type = ViolationTypes.TOKEN_BALANCE;
|
||||
const errorMessage = {
|
||||
type,
|
||||
balance,
|
||||
tokenCost,
|
||||
promptTokens: txData.amount,
|
||||
};
|
||||
|
||||
if (txData.generations && txData.generations.length > 0) {
|
||||
errorMessage.generations = txData.generations;
|
||||
}
|
||||
|
||||
await logViolation(req, res, type, errorMessage, 0);
|
||||
throw new Error(JSON.stringify(errorMessage));
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
checkBalance,
|
||||
};
|
||||
45
api/models/checkBalance.js
Normal file
45
api/models/checkBalance.js
Normal file
@@ -0,0 +1,45 @@
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { logViolation } = require('~/cache');
|
||||
const Balance = require('./Balance');
|
||||
/**
|
||||
* Checks the balance for a user and determines if they can spend a certain amount.
|
||||
* If the user cannot spend the amount, it logs a violation and denies the request.
|
||||
*
|
||||
* @async
|
||||
* @function
|
||||
* @param {Object} params - The function parameters.
|
||||
* @param {Express.Request} params.req - The Express request object.
|
||||
* @param {Express.Response} params.res - The Express response object.
|
||||
* @param {Object} params.txData - The transaction data.
|
||||
* @param {string} params.txData.user - The user ID or identifier.
|
||||
* @param {('prompt' | 'completion')} params.txData.tokenType - The type of token.
|
||||
* @param {number} params.txData.amount - The amount of tokens.
|
||||
* @param {string} params.txData.model - The model name or identifier.
|
||||
* @param {string} [params.txData.endpointTokenConfig] - The token configuration for the endpoint.
|
||||
* @returns {Promise<boolean>} Returns true if the user can spend the amount, otherwise denies the request.
|
||||
* @throws {Error} Throws an error if there's an issue with the balance check.
|
||||
*/
|
||||
const checkBalance = async ({ req, res, txData }) => {
|
||||
const { canSpend, balance, tokenCost } = await Balance.check(txData);
|
||||
|
||||
if (canSpend) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const type = ViolationTypes.TOKEN_BALANCE;
|
||||
const errorMessage = {
|
||||
type,
|
||||
balance,
|
||||
tokenCost,
|
||||
promptTokens: txData.amount,
|
||||
};
|
||||
|
||||
if (txData.generations && txData.generations.length > 0) {
|
||||
errorMessage.generations = txData.generations;
|
||||
}
|
||||
|
||||
await logViolation(req, res, type, errorMessage, 0);
|
||||
throw new Error(JSON.stringify(errorMessage));
|
||||
};
|
||||
|
||||
module.exports = checkBalance;
|
||||
@@ -1,32 +1,12 @@
|
||||
const _ = require('lodash');
|
||||
const mongoose = require('mongoose');
|
||||
const { MeiliSearch } = require('meilisearch');
|
||||
const { parseTextParts, ContentTypes } = require('librechat-data-provider');
|
||||
const { cleanUpPrimaryKeyValue } = require('~/lib/utils/misc');
|
||||
const logger = require('~/config/meiliLogger');
|
||||
|
||||
// Environment flags
|
||||
/**
|
||||
* Flag to indicate if search is enabled based on environment variables.
|
||||
* @type {boolean}
|
||||
*/
|
||||
const searchEnabled = process.env.SEARCH && process.env.SEARCH.toLowerCase() === 'true';
|
||||
|
||||
/**
|
||||
* Flag to indicate if MeiliSearch is enabled based on required environment variables.
|
||||
* @type {boolean}
|
||||
*/
|
||||
const meiliEnabled = process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY && searchEnabled;
|
||||
|
||||
/**
|
||||
* Validates the required options for configuring the mongoMeili plugin.
|
||||
*
|
||||
* @param {Object} options - The configuration options.
|
||||
* @param {string} options.host - The MeiliSearch host.
|
||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
||||
* @param {string} options.indexName - The name of the index.
|
||||
* @throws {Error} Throws an error if any required option is missing.
|
||||
*/
|
||||
const validateOptions = function (options) {
|
||||
const requiredKeys = ['host', 'apiKey', 'indexName'];
|
||||
requiredKeys.forEach((key) => {
|
||||
@@ -36,64 +16,53 @@ const validateOptions = function (options) {
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Factory function to create a MeiliMongooseModel class which extends a Mongoose model.
|
||||
* This class contains static and instance methods to synchronize and manage the MeiliSearch index
|
||||
* corresponding to the MongoDB collection.
|
||||
*
|
||||
* @param {Object} config - Configuration object.
|
||||
* @param {Object} config.index - The MeiliSearch index object.
|
||||
* @param {Array<string>} config.attributesToIndex - List of attributes to index.
|
||||
* @returns {Function} A class definition that will be loaded into the Mongoose schema.
|
||||
*/
|
||||
// const createMeiliMongooseModel = function ({ index, indexName, client, attributesToIndex }) {
|
||||
const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
// The primary key is assumed to be the first attribute in the attributesToIndex array.
|
||||
const primaryKey = attributesToIndex[0];
|
||||
|
||||
// MeiliMongooseModel is of type Mongoose.Model
|
||||
class MeiliMongooseModel {
|
||||
/**
|
||||
* Synchronizes the data between the MongoDB collection and the MeiliSearch index.
|
||||
* `syncWithMeili`: synchronizes the data between a MongoDB collection and a MeiliSearch index,
|
||||
* only triggered if there's ever a discrepancy determined by `api\lib\db\indexSync.js`.
|
||||
*
|
||||
* The synchronization process involves:
|
||||
* 1. Fetching all documents from the MongoDB collection and MeiliSearch index.
|
||||
* 2. Comparing documents from both sources.
|
||||
* 3. Deleting documents from MeiliSearch that no longer exist in MongoDB.
|
||||
* 4. Adding documents to MeiliSearch that exist in MongoDB but not in the index.
|
||||
* 5. Updating documents in MeiliSearch if key fields (such as `text` or `title`) differ.
|
||||
* 6. Updating the `_meiliIndex` field in MongoDB to indicate the indexing status.
|
||||
* 1. Fetches all documents from the MongoDB collection and the MeiliSearch index.
|
||||
* 2. Compares the documents from both sources.
|
||||
* 3. If a document exists in MeiliSearch but not in MongoDB, it's deleted from MeiliSearch.
|
||||
* 4. If a document exists in MongoDB but not in MeiliSearch, it's added to MeiliSearch.
|
||||
* 5. If a document exists in both but has different `text` or `title` fields (depending on the `primaryKey`), it's updated in MeiliSearch.
|
||||
* 6. After all operations, it updates the `_meiliIndex` field in MongoDB to indicate whether the document is indexed in MeiliSearch.
|
||||
*
|
||||
* Note: The function processes documents in batches because MeiliSearch's
|
||||
* `index.getDocuments` requires an exact limit and `index.addDocuments` does not handle
|
||||
* partial failures in a batch.
|
||||
* Note: This strategy does not use batch operations for Meilisearch as the `index.addDocuments` will discard
|
||||
* the entire batch if there's an error with one document, and will not throw an error if there's an issue.
|
||||
* Also, `index.getDocuments` needs an exact limit on the amount of documents to return, so we build the map in batches.
|
||||
*
|
||||
* @returns {Promise<void>} Resolves when the synchronization is complete.
|
||||
* @returns {Promise} A promise that resolves when the synchronization is complete.
|
||||
*
|
||||
* @throws {Error} Throws an error if there's an issue with adding a document to MeiliSearch.
|
||||
*/
|
||||
static async syncWithMeili() {
|
||||
try {
|
||||
let moreDocuments = true;
|
||||
// Retrieve all MongoDB documents from the collection as plain JavaScript objects.
|
||||
const mongoDocuments = await this.find().lean();
|
||||
const format = (doc) => _.pick(doc, attributesToIndex);
|
||||
|
||||
// Helper function to format a document by selecting only the attributes to index
|
||||
// and omitting keys starting with '$'.
|
||||
const format = (doc) =>
|
||||
_.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$'));
|
||||
|
||||
// Build a map of MongoDB documents for quick lookup based on the primary key.
|
||||
// Prepare for comparison
|
||||
const mongoMap = new Map(mongoDocuments.map((doc) => [doc[primaryKey], format(doc)]));
|
||||
const indexMap = new Map();
|
||||
let offset = 0;
|
||||
const batchSize = 1000;
|
||||
|
||||
// Fetch documents from the MeiliSearch index in batches.
|
||||
while (moreDocuments) {
|
||||
const batch = await index.getDocuments({ limit: batchSize, offset });
|
||||
|
||||
if (batch.results.length === 0) {
|
||||
moreDocuments = false;
|
||||
}
|
||||
|
||||
for (const doc of batch.results) {
|
||||
indexMap.set(doc[primaryKey], format(doc));
|
||||
}
|
||||
|
||||
offset += batchSize;
|
||||
}
|
||||
|
||||
@@ -101,12 +70,13 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
|
||||
const updateOps = [];
|
||||
|
||||
// Process documents present in the MeiliSearch index.
|
||||
// Iterate over Meili index documents
|
||||
for (const [id, doc] of indexMap) {
|
||||
const update = {};
|
||||
update[primaryKey] = id;
|
||||
if (mongoMap.has(id)) {
|
||||
// If document exists in MongoDB, check for discrepancies in key fields.
|
||||
// Case: Update
|
||||
// If document also exists in MongoDB, would be update case
|
||||
if (
|
||||
(doc.text && doc.text !== mongoMap.get(id).text) ||
|
||||
(doc.title && doc.title !== mongoMap.get(id).title)
|
||||
@@ -122,7 +92,8 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
await index.addDocuments([doc]);
|
||||
}
|
||||
} else {
|
||||
// If the document does not exist in MongoDB, delete it from MeiliSearch.
|
||||
// Case: Delete
|
||||
// If document does not exist in MongoDB, its a delete case from meili index
|
||||
await index.deleteDocument(id);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: false } } },
|
||||
@@ -130,25 +101,24 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
}
|
||||
}
|
||||
|
||||
// Process documents present in MongoDB.
|
||||
// Iterate over MongoDB documents
|
||||
for (const [id, doc] of mongoMap) {
|
||||
const update = {};
|
||||
update[primaryKey] = id;
|
||||
// If the document is missing in the Meili index, add it.
|
||||
// Case: Insert
|
||||
// If document does not exist in Meili Index, Its an insert case
|
||||
if (!indexMap.has(id)) {
|
||||
await index.addDocuments([doc]);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
} else if (doc._meiliIndex === false) {
|
||||
// If the document exists but is marked as not indexed, update the flag.
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Execute bulk update operations in MongoDB to update the _meiliIndex flags.
|
||||
if (updateOps.length > 0) {
|
||||
await this.collection.bulkWrite(updateOps);
|
||||
logger.debug(
|
||||
@@ -162,47 +132,34 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates settings for the MeiliSearch index.
|
||||
*
|
||||
* @param {Object} settings - The settings to update on the MeiliSearch index.
|
||||
* @returns {Promise<Object>} Promise resolving to the update result.
|
||||
*/
|
||||
// Set one or more settings of the meili index
|
||||
static async setMeiliIndexSettings(settings) {
|
||||
return await index.updateSettings(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the MeiliSearch index and optionally populates the results with data from MongoDB.
|
||||
*
|
||||
* @param {string} q - The search query.
|
||||
* @param {Object} params - Additional search parameters for MeiliSearch.
|
||||
* @param {boolean} populate - Whether to populate search hits with full MongoDB documents.
|
||||
* @returns {Promise<Object>} The search results with populated hits if requested.
|
||||
*/
|
||||
// Search the index
|
||||
static async meiliSearch(q, params, populate) {
|
||||
const data = await index.search(q, params);
|
||||
|
||||
// Populate hits with content from mongodb
|
||||
if (populate) {
|
||||
// Build a query using the primary key values from the search hits.
|
||||
// Find objects into mongodb matching `objectID` from Meili search
|
||||
const query = {};
|
||||
// query[primaryKey] = { $in: _.map(data.hits, primaryKey) };
|
||||
query[primaryKey] = _.map(data.hits, (hit) => cleanUpPrimaryKeyValue(hit[primaryKey]));
|
||||
// logger.debug('query', query);
|
||||
const hitsFromMongoose = await this.find(
|
||||
query,
|
||||
_.reduce(
|
||||
this.schema.obj,
|
||||
function (results, value, key) {
|
||||
return { ...results, [key]: 1 };
|
||||
},
|
||||
{ _id: 1, __v: 1 },
|
||||
),
|
||||
).lean();
|
||||
|
||||
// Build a projection object, including only keys that do not start with '$'.
|
||||
const projection = Object.keys(this.schema.obj).reduce(
|
||||
(results, key) => {
|
||||
if (!key.startsWith('$')) {
|
||||
results[key] = 1;
|
||||
}
|
||||
return results;
|
||||
},
|
||||
{ _id: 1, __v: 1 },
|
||||
);
|
||||
|
||||
// Retrieve the full documents from MongoDB.
|
||||
const hitsFromMongoose = await this.find(query, projection).lean();
|
||||
|
||||
// Merge the MongoDB documents with the search hits.
|
||||
// Add additional data from mongodb into Meili search hits
|
||||
const populatedHits = data.hits.map(function (hit) {
|
||||
const query = {};
|
||||
query[primaryKey] = hit[primaryKey];
|
||||
@@ -219,80 +176,51 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocesses the current document for indexing.
|
||||
*
|
||||
* This method:
|
||||
* - Picks only the defined attributes to index.
|
||||
* - Omits any keys starting with '$'.
|
||||
* - Replaces pipe characters ('|') in `conversationId` with '--'.
|
||||
* - Extracts and concatenates text from an array of content items.
|
||||
*
|
||||
* @returns {Object} The preprocessed object ready for indexing.
|
||||
*/
|
||||
preprocessObjectForIndex() {
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
const object = _.pick(this.toJSON(), attributesToIndex);
|
||||
// NOTE: MeiliSearch does not allow | in primary key, so we replace it with - for Bing convoIds
|
||||
// object.conversationId = object.conversationId.replace(/\|/g, '-');
|
||||
if (object.conversationId && object.conversationId.includes('|')) {
|
||||
object.conversationId = object.conversationId.replace(/\|/g, '--');
|
||||
}
|
||||
|
||||
if (object.content && Array.isArray(object.content)) {
|
||||
object.text = parseTextParts(object.content);
|
||||
object.text = object.content
|
||||
.filter((item) => item.type === 'text' && item.text && item.text.value)
|
||||
.map((item) => item.text.value)
|
||||
.join(' ');
|
||||
delete object.content;
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the current document to the MeiliSearch index.
|
||||
*
|
||||
* The method preprocesses the document, adds it to MeiliSearch, and then updates
|
||||
* the MongoDB document's `_meiliIndex` flag to true.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
// Push new document to Meili
|
||||
async addObjectToMeili() {
|
||||
const object = this.preprocessObjectForIndex();
|
||||
try {
|
||||
// logger.debug('Adding document to Meili', object);
|
||||
await index.addDocuments([object]);
|
||||
} catch (error) {
|
||||
// Error handling can be enhanced as needed.
|
||||
logger.error('[addObjectToMeili] Error adding document to Meili', error);
|
||||
// logger.debug('Error adding document to Meili');
|
||||
// logger.error(error);
|
||||
}
|
||||
|
||||
await this.collection.updateMany({ _id: this._id }, { $set: { _meiliIndex: true } });
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the current document in the MeiliSearch index.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
// Update an existing document in Meili
|
||||
async updateObjectToMeili() {
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
const object = _.pick(this.toJSON(), attributesToIndex);
|
||||
await index.updateDocuments([object]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the current document from the MeiliSearch index.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
// Delete a document from Meili
|
||||
async deleteObjectFromMeili() {
|
||||
await index.deleteDocument(this._id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-save hook to synchronize the document with MeiliSearch.
|
||||
*
|
||||
* If the document is already indexed (i.e. `_meiliIndex` is true), it updates it;
|
||||
* otherwise, it adds the document to the index.
|
||||
*/
|
||||
// * schema.post('save')
|
||||
postSaveHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili();
|
||||
@@ -301,24 +229,14 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-update hook to update the document in MeiliSearch.
|
||||
*
|
||||
* This hook is triggered after a document update, ensuring that changes are
|
||||
* propagated to the MeiliSearch index if the document is indexed.
|
||||
*/
|
||||
// * schema.post('update')
|
||||
postUpdateHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-remove hook to delete the document from MeiliSearch.
|
||||
*
|
||||
* This hook is triggered after a document is removed, ensuring that the document
|
||||
* is also removed from the MeiliSearch index if it was previously indexed.
|
||||
*/
|
||||
// * schema.post('remove')
|
||||
postRemoveHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.deleteObjectFromMeili();
|
||||
@@ -329,27 +247,11 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
return MeiliMongooseModel;
|
||||
};
|
||||
|
||||
/**
|
||||
* Mongoose plugin to synchronize MongoDB collections with a MeiliSearch index.
|
||||
*
|
||||
* This plugin:
|
||||
* - Validates the provided options.
|
||||
* - Adds a `_meiliIndex` field to the schema to track indexing status.
|
||||
* - Sets up a MeiliSearch client and creates an index if it doesn't already exist.
|
||||
* - Loads class methods for syncing, searching, and managing documents in MeiliSearch.
|
||||
* - Registers Mongoose hooks (post-save, post-update, post-remove, etc.) to maintain index consistency.
|
||||
*
|
||||
* @param {mongoose.Schema} schema - The Mongoose schema to which the plugin is applied.
|
||||
* @param {Object} options - Configuration options.
|
||||
* @param {string} options.host - The MeiliSearch host.
|
||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
||||
* @param {string} options.indexName - The name of the MeiliSearch index.
|
||||
* @param {string} options.primaryKey - The primary key field for indexing.
|
||||
*/
|
||||
module.exports = function mongoMeili(schema, options) {
|
||||
// Vaidate Options for mongoMeili
|
||||
validateOptions(options);
|
||||
|
||||
// Add _meiliIndex field to the schema to track if a document has been indexed in MeiliSearch.
|
||||
// Add meiliIndex to schema
|
||||
schema.add({
|
||||
_meiliIndex: {
|
||||
type: Boolean,
|
||||
@@ -361,77 +263,69 @@ module.exports = function mongoMeili(schema, options) {
|
||||
|
||||
const { host, apiKey, indexName, primaryKey } = options;
|
||||
|
||||
// Setup the MeiliSearch client.
|
||||
// Setup MeiliSearch Client
|
||||
const client = new MeiliSearch({ host, apiKey });
|
||||
|
||||
// Create the index asynchronously if it doesn't exist.
|
||||
// Asynchronously create the index
|
||||
client.createIndex(indexName, { primaryKey });
|
||||
|
||||
// Setup the MeiliSearch index for this schema.
|
||||
// Setup the index to search for this schema
|
||||
const index = client.index(indexName);
|
||||
|
||||
// Collect attributes from the schema that should be indexed.
|
||||
const attributesToIndex = [
|
||||
..._.reduce(
|
||||
schema.obj,
|
||||
function (results, value, key) {
|
||||
return value.meiliIndex ? [...results, key] : results;
|
||||
// }, []), '_id'];
|
||||
},
|
||||
[],
|
||||
),
|
||||
];
|
||||
|
||||
// Load the class methods into the schema.
|
||||
schema.loadClass(createMeiliMongooseModel({ index, indexName, client, attributesToIndex }));
|
||||
|
||||
// Register Mongoose hooks to synchronize with MeiliSearch.
|
||||
|
||||
// Post-save: synchronize after a document is saved.
|
||||
// Register hooks
|
||||
schema.post('save', function (doc) {
|
||||
doc.postSaveHook();
|
||||
});
|
||||
|
||||
// Post-update: synchronize after a document is updated.
|
||||
schema.post('update', function (doc) {
|
||||
doc.postUpdateHook();
|
||||
});
|
||||
|
||||
// Post-remove: synchronize after a document is removed.
|
||||
schema.post('remove', function (doc) {
|
||||
doc.postRemoveHook();
|
||||
});
|
||||
|
||||
// Pre-deleteMany hook: remove corresponding documents from MeiliSearch when multiple documents are deleted.
|
||||
schema.pre('deleteMany', async function (next) {
|
||||
if (!meiliEnabled) {
|
||||
return next();
|
||||
next();
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if the schema has a "messages" field to determine if it's a conversation schema.
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messages')) {
|
||||
const convoIndex = client.index('convos');
|
||||
const deletedConvos = await mongoose.model('Conversation').find(this._conditions).lean();
|
||||
const promises = deletedConvos.map((convo) =>
|
||||
convoIndex.deleteDocument(convo.conversationId),
|
||||
);
|
||||
let promises = [];
|
||||
for (const convo of deletedConvos) {
|
||||
promises.push(convoIndex.deleteDocument(convo.conversationId));
|
||||
}
|
||||
await Promise.all(promises);
|
||||
}
|
||||
|
||||
// Check if the schema has a "messageId" field to determine if it's a message schema.
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messageId')) {
|
||||
const messageIndex = client.index('messages');
|
||||
const deletedMessages = await mongoose.model('Message').find(this._conditions).lean();
|
||||
const promises = deletedMessages.map((message) =>
|
||||
messageIndex.deleteDocument(message.messageId),
|
||||
);
|
||||
let promises = [];
|
||||
for (const message of deletedMessages) {
|
||||
promises.push(messageIndex.deleteDocument(message.messageId));
|
||||
}
|
||||
await Promise.all(promises);
|
||||
}
|
||||
return next();
|
||||
} catch (error) {
|
||||
if (meiliEnabled) {
|
||||
logger.error(
|
||||
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion. Next startup may be slow due to syncing.',
|
||||
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion, next startup may be slow due to syncing',
|
||||
error,
|
||||
);
|
||||
}
|
||||
@@ -439,19 +333,17 @@ module.exports = function mongoMeili(schema, options) {
|
||||
}
|
||||
});
|
||||
|
||||
// Post-findOneAndUpdate hook: update MeiliSearch index after a document is updated via findOneAndUpdate.
|
||||
schema.post('findOneAndUpdate', async function (doc) {
|
||||
if (!meiliEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the document is unfinished, do not update the index.
|
||||
if (doc.unfinished) {
|
||||
return;
|
||||
}
|
||||
|
||||
let meiliDoc;
|
||||
// For conversation documents, try to fetch the document from the "convos" index.
|
||||
// Doc is a Conversation
|
||||
if (doc.messages) {
|
||||
try {
|
||||
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
|
||||
@@ -464,12 +356,10 @@ module.exports = function mongoMeili(schema, options) {
|
||||
}
|
||||
}
|
||||
|
||||
// If the MeiliSearch document exists and the title is unchanged, do nothing.
|
||||
if (meiliDoc && meiliDoc.title === doc.title) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, trigger a post-save hook to synchronize the document.
|
||||
doc.postSaveHook();
|
||||
});
|
||||
};
|
||||
|
||||
60
api/models/schema/action.js
Normal file
60
api/models/schema/action.js
Normal file
@@ -0,0 +1,60 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const AuthSchema = new Schema(
|
||||
{
|
||||
authorization_type: String,
|
||||
custom_auth_header: String,
|
||||
type: {
|
||||
type: String,
|
||||
enum: ['service_http', 'oauth', 'none'],
|
||||
},
|
||||
authorization_content_type: String,
|
||||
authorization_url: String,
|
||||
client_url: String,
|
||||
scope: String,
|
||||
token_exchange_method: {
|
||||
type: String,
|
||||
enum: ['default_post', 'basic_auth_header', null],
|
||||
},
|
||||
},
|
||||
{ _id: false },
|
||||
);
|
||||
|
||||
const actionSchema = new Schema({
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
action_id: {
|
||||
type: String,
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
default: 'action_prototype',
|
||||
},
|
||||
settings: Schema.Types.Mixed,
|
||||
agent_id: String,
|
||||
assistant_id: String,
|
||||
metadata: {
|
||||
api_key: String, // private, encrypted
|
||||
auth: AuthSchema,
|
||||
domain: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
// json_schema: Schema.Types.Mixed,
|
||||
privacy_policy_url: String,
|
||||
raw_spec: String,
|
||||
oauth_client_id: String, // private, encrypted
|
||||
oauth_client_secret: String, // private, encrypted
|
||||
},
|
||||
});
|
||||
// }, { minimize: false }); // Prevent removal of empty objects
|
||||
|
||||
module.exports = actionSchema;
|
||||
@@ -1,34 +1,6 @@
|
||||
import { Schema, Document, Types } from 'mongoose';
|
||||
export interface IAgent extends Omit<Document, 'model'> {
|
||||
id: string;
|
||||
name?: string;
|
||||
description?: string;
|
||||
instructions?: string;
|
||||
avatar?: {
|
||||
filepath: string;
|
||||
source: string;
|
||||
};
|
||||
provider: string;
|
||||
model: string;
|
||||
model_parameters?: Record<string, unknown>;
|
||||
artifacts?: string;
|
||||
access_level?: number;
|
||||
recursion_limit?: number;
|
||||
tools?: string[];
|
||||
tool_kwargs?: Array<unknown>;
|
||||
actions?: string[];
|
||||
author: Types.ObjectId;
|
||||
authorName?: string;
|
||||
hide_sequential_outputs?: boolean;
|
||||
end_after_tools?: boolean;
|
||||
agent_ids?: string[];
|
||||
isCollaborative?: boolean;
|
||||
conversation_starters?: string[];
|
||||
tool_resources?: unknown;
|
||||
projectIds?: Types.ObjectId[];
|
||||
}
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const agentSchema = new Schema<IAgent>(
|
||||
const agentSchema = mongoose.Schema(
|
||||
{
|
||||
id: {
|
||||
type: String,
|
||||
@@ -46,7 +18,10 @@ const agentSchema = new Schema<IAgent>(
|
||||
type: String,
|
||||
},
|
||||
avatar: {
|
||||
type: Schema.Types.Mixed,
|
||||
type: {
|
||||
filepath: String,
|
||||
source: String,
|
||||
},
|
||||
default: undefined,
|
||||
},
|
||||
provider: {
|
||||
@@ -66,22 +41,19 @@ const agentSchema = new Schema<IAgent>(
|
||||
access_level: {
|
||||
type: Number,
|
||||
},
|
||||
recursion_limit: {
|
||||
type: Number,
|
||||
},
|
||||
tools: {
|
||||
type: [String],
|
||||
default: undefined,
|
||||
},
|
||||
tool_kwargs: {
|
||||
type: [{ type: Schema.Types.Mixed }],
|
||||
type: [{ type: mongoose.Schema.Types.Mixed }],
|
||||
},
|
||||
actions: {
|
||||
type: [String],
|
||||
default: undefined,
|
||||
},
|
||||
author: {
|
||||
type: Schema.Types.ObjectId,
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
@@ -107,11 +79,11 @@ const agentSchema = new Schema<IAgent>(
|
||||
default: [],
|
||||
},
|
||||
tool_resources: {
|
||||
type: Schema.Types.Mixed,
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
default: {},
|
||||
},
|
||||
projectIds: {
|
||||
type: [Schema.Types.ObjectId],
|
||||
type: [mongoose.Schema.Types.ObjectId],
|
||||
ref: 'Project',
|
||||
index: true,
|
||||
},
|
||||
@@ -121,4 +93,4 @@ const agentSchema = new Schema<IAgent>(
|
||||
},
|
||||
);
|
||||
|
||||
export default agentSchema;
|
||||
module.exports = agentSchema;
|
||||
@@ -1,23 +1,9 @@
|
||||
import { Schema, Document, Types } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
export interface IAssistant extends Document {
|
||||
user: Types.ObjectId;
|
||||
assistant_id: string;
|
||||
avatar?: {
|
||||
filepath: string;
|
||||
source: string;
|
||||
};
|
||||
conversation_starters?: string[];
|
||||
access_level?: number;
|
||||
file_ids?: string[];
|
||||
actions?: string[];
|
||||
append_current_datetime?: boolean;
|
||||
}
|
||||
|
||||
const assistantSchema = new Schema<IAssistant>(
|
||||
const assistantSchema = mongoose.Schema(
|
||||
{
|
||||
user: {
|
||||
type: Schema.Types.ObjectId,
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
@@ -27,7 +13,10 @@ const assistantSchema = new Schema<IAssistant>(
|
||||
required: true,
|
||||
},
|
||||
avatar: {
|
||||
type: Schema.Types.Mixed,
|
||||
type: {
|
||||
filepath: String,
|
||||
source: String,
|
||||
},
|
||||
default: undefined,
|
||||
},
|
||||
conversation_starters: {
|
||||
@@ -49,4 +38,4 @@ const assistantSchema = new Schema<IAssistant>(
|
||||
},
|
||||
);
|
||||
|
||||
export default assistantSchema;
|
||||
module.exports = assistantSchema;
|
||||
17
api/models/schema/balance.js
Normal file
17
api/models/schema/balance.js
Normal file
@@ -0,0 +1,17 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const balanceSchema = mongoose.Schema({
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
// 1000 tokenCredits = 1 mill ($0.001 USD)
|
||||
tokenCredits: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
});
|
||||
|
||||
module.exports = balanceSchema;
|
||||
@@ -1,15 +1,6 @@
|
||||
import { Schema, Document } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
export interface IBanner extends Document {
|
||||
bannerId: string;
|
||||
message: string;
|
||||
displayFrom: Date;
|
||||
displayTo?: Date;
|
||||
type: 'banner' | 'popup';
|
||||
isPublic: boolean;
|
||||
}
|
||||
|
||||
const bannerSchema = new Schema<IBanner>(
|
||||
const bannerSchema = mongoose.Schema(
|
||||
{
|
||||
bannerId: {
|
||||
type: String,
|
||||
@@ -37,7 +28,9 @@ const bannerSchema = new Schema<IBanner>(
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
export default bannerSchema;
|
||||
const Banner = mongoose.model('Banner', bannerSchema);
|
||||
module.exports = Banner;
|
||||
19
api/models/schema/categories.js
Normal file
19
api/models/schema/categories.js
Normal file
@@ -0,0 +1,19 @@
|
||||
const mongoose = require('mongoose');
|
||||
const Schema = mongoose.Schema;
|
||||
|
||||
const categoriesSchema = new Schema({
|
||||
label: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
value: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
});
|
||||
|
||||
const categories = mongoose.model('categories', categoriesSchema);
|
||||
|
||||
module.exports = { Categories: categories };
|
||||
32
api/models/schema/conversationTagSchema.js
Normal file
32
api/models/schema/conversationTagSchema.js
Normal file
@@ -0,0 +1,32 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const conversationTagSchema = mongoose.Schema(
|
||||
{
|
||||
tag: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
description: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
count: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
position: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
index: true,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
conversationTagSchema.index({ tag: 1, user: 1 }, { unique: true });
|
||||
|
||||
module.exports = mongoose.model('ConversationTag', conversationTagSchema);
|
||||
@@ -1,7 +1,46 @@
|
||||
const mongoose = require('mongoose');
|
||||
const mongoMeili = require('../plugins/mongoMeili');
|
||||
|
||||
const { convoSchema } = require('@librechat/data-schemas');
|
||||
const { conversationPreset } = require('./defaults');
|
||||
const convoSchema = mongoose.Schema(
|
||||
{
|
||||
conversationId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
required: true,
|
||||
index: true,
|
||||
meiliIndex: true,
|
||||
},
|
||||
title: {
|
||||
type: String,
|
||||
default: 'New Chat',
|
||||
meiliIndex: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }],
|
||||
agentOptions: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
},
|
||||
...conversationPreset,
|
||||
agent_id: {
|
||||
type: String,
|
||||
},
|
||||
tags: {
|
||||
type: [String],
|
||||
default: [],
|
||||
meiliIndex: true,
|
||||
},
|
||||
files: {
|
||||
type: [String],
|
||||
},
|
||||
expiredAt: {
|
||||
type: Date,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
convoSchema.plugin(mongoMeili, {
|
||||
@@ -13,6 +52,10 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
});
|
||||
}
|
||||
|
||||
convoSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
||||
convoSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
convoSchema.index({ conversationId: 1, user: 1 }, { unique: true });
|
||||
|
||||
const Conversation = mongoose.models.Conversation || mongoose.model('Conversation', convoSchema);
|
||||
|
||||
module.exports = Conversation;
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { Schema } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
// @ts-ignore
|
||||
export const conversationPreset = {
|
||||
const conversationPreset = {
|
||||
// endpoint: [azureOpenAI, openAI, anthropic, chatGPTBrowser]
|
||||
endpoint: {
|
||||
type: String,
|
||||
@@ -27,7 +26,7 @@ export const conversationPreset = {
|
||||
required: false,
|
||||
},
|
||||
// for google only
|
||||
examples: { type: [{ type: Schema.Types.Mixed }], default: undefined },
|
||||
examples: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
modelLabel: {
|
||||
type: String,
|
||||
required: false,
|
||||
@@ -136,3 +135,7 @@ export const conversationPreset = {
|
||||
type: String,
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
conversationPreset,
|
||||
};
|
||||
111
api/models/schema/fileSchema.js
Normal file
111
api/models/schema/fileSchema.js
Normal file
@@ -0,0 +1,111 @@
|
||||
const { FileSources } = require('librechat-data-provider');
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoFile
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {number} [__v] - MongoDB Version Key
|
||||
* @property {ObjectId} user - User ID
|
||||
* @property {string} [conversationId] - Optional conversation ID
|
||||
* @property {string} file_id - File identifier
|
||||
* @property {string} [temp_file_id] - Temporary File identifier
|
||||
* @property {number} bytes - Size of the file in bytes
|
||||
* @property {string} filename - Name of the file
|
||||
* @property {string} filepath - Location of the file
|
||||
* @property {'file'} object - Type of object, always 'file'
|
||||
* @property {string} type - Type of file
|
||||
* @property {number} [usage=0] - Number of uses of the file
|
||||
* @property {string} [context] - Context of the file origin
|
||||
* @property {boolean} [embedded=false] - Whether or not the file is embedded in vector db
|
||||
* @property {string} [model] - The model to identify the group region of the file (for Azure OpenAI hosting)
|
||||
* @property {string} [source] - The source of the file (e.g., from FileSources)
|
||||
* @property {number} [width] - Optional width of the file
|
||||
* @property {number} [height] - Optional height of the file
|
||||
* @property {Object} [metadata] - Metadata related to the file
|
||||
* @property {string} [metadata.fileIdentifier] - Unique identifier for the file in metadata
|
||||
* @property {Date} [expiresAt] - Optional expiration date of the file
|
||||
* @property {Date} [createdAt] - Date when the file was created
|
||||
* @property {Date} [updatedAt] - Date when the file was updated
|
||||
*/
|
||||
|
||||
/** @type {MongooseSchema<MongoFile>} */
|
||||
const fileSchema = mongoose.Schema(
|
||||
{
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
conversationId: {
|
||||
type: String,
|
||||
ref: 'Conversation',
|
||||
index: true,
|
||||
},
|
||||
file_id: {
|
||||
type: String,
|
||||
// required: true,
|
||||
index: true,
|
||||
},
|
||||
temp_file_id: {
|
||||
type: String,
|
||||
// required: true,
|
||||
},
|
||||
bytes: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
filename: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
filepath: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
object: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: 'file',
|
||||
},
|
||||
embedded: {
|
||||
type: Boolean,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
context: {
|
||||
type: String,
|
||||
// required: true,
|
||||
},
|
||||
usage: {
|
||||
type: Number,
|
||||
required: true,
|
||||
default: 0,
|
||||
},
|
||||
source: {
|
||||
type: String,
|
||||
default: FileSources.local,
|
||||
},
|
||||
model: {
|
||||
type: String,
|
||||
},
|
||||
width: Number,
|
||||
height: Number,
|
||||
metadata: {
|
||||
fileIdentifier: String,
|
||||
},
|
||||
expiresAt: {
|
||||
type: Date,
|
||||
expires: 3600, // 1 hour in seconds
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
fileSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
|
||||
module.exports = fileSchema;
|
||||
@@ -1,13 +1,6 @@
|
||||
import mongoose, { Schema, Document, Types } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
export interface IKey extends Document {
|
||||
userId: Types.ObjectId;
|
||||
name: string;
|
||||
value: string;
|
||||
expiresAt?: Date;
|
||||
}
|
||||
|
||||
const keySchema: Schema<IKey> = new Schema({
|
||||
const keySchema = mongoose.Schema({
|
||||
userId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
@@ -28,4 +21,4 @@ const keySchema: Schema<IKey> = new Schema({
|
||||
|
||||
keySchema.index({ expiresAt: 1 }, { expireAfterSeconds: 0 });
|
||||
|
||||
export default keySchema;
|
||||
module.exports = keySchema;
|
||||
@@ -1,6 +1,157 @@
|
||||
const mongoose = require('mongoose');
|
||||
const mongoMeili = require('~/models/plugins/mongoMeili');
|
||||
const { messageSchema } = require('@librechat/data-schemas');
|
||||
const messageSchema = mongoose.Schema(
|
||||
{
|
||||
messageId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
required: true,
|
||||
index: true,
|
||||
meiliIndex: true,
|
||||
},
|
||||
conversationId: {
|
||||
type: String,
|
||||
index: true,
|
||||
required: true,
|
||||
meiliIndex: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
index: true,
|
||||
required: true,
|
||||
default: null,
|
||||
},
|
||||
model: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
endpoint: {
|
||||
type: String,
|
||||
},
|
||||
conversationSignature: {
|
||||
type: String,
|
||||
},
|
||||
clientId: {
|
||||
type: String,
|
||||
},
|
||||
invocationId: {
|
||||
type: Number,
|
||||
},
|
||||
parentMessageId: {
|
||||
type: String,
|
||||
},
|
||||
tokenCount: {
|
||||
type: Number,
|
||||
},
|
||||
summaryTokenCount: {
|
||||
type: Number,
|
||||
},
|
||||
sender: {
|
||||
type: String,
|
||||
meiliIndex: true,
|
||||
},
|
||||
text: {
|
||||
type: String,
|
||||
meiliIndex: true,
|
||||
},
|
||||
summary: {
|
||||
type: String,
|
||||
},
|
||||
isCreatedByUser: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
default: false,
|
||||
},
|
||||
unfinished: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
error: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
finish_reason: {
|
||||
type: String,
|
||||
},
|
||||
_meiliIndex: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
select: false,
|
||||
default: false,
|
||||
},
|
||||
files: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
plugin: {
|
||||
type: {
|
||||
latest: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
inputs: {
|
||||
type: [mongoose.Schema.Types.Mixed],
|
||||
required: false,
|
||||
default: undefined,
|
||||
},
|
||||
outputs: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
default: undefined,
|
||||
},
|
||||
plugins: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
content: {
|
||||
type: [{ type: mongoose.Schema.Types.Mixed }],
|
||||
default: undefined,
|
||||
meiliIndex: true,
|
||||
},
|
||||
thread_id: {
|
||||
type: String,
|
||||
},
|
||||
/* frontend components */
|
||||
iconURL: {
|
||||
type: String,
|
||||
},
|
||||
attachments: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
/*
|
||||
attachments: {
|
||||
type: [
|
||||
{
|
||||
file_id: String,
|
||||
filename: String,
|
||||
filepath: String,
|
||||
expiresAt: Date,
|
||||
width: Number,
|
||||
height: Number,
|
||||
type: String,
|
||||
conversationId: String,
|
||||
messageId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
toolCallId: String,
|
||||
},
|
||||
],
|
||||
default: undefined,
|
||||
},
|
||||
*/
|
||||
expiredAt: {
|
||||
type: Date,
|
||||
},
|
||||
iv: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
authTag: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
encryptedKey: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
messageSchema.plugin(mongoMeili, {
|
||||
@@ -10,7 +161,11 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
primaryKey: 'messageId',
|
||||
});
|
||||
}
|
||||
messageSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
||||
messageSchema.index({ createdAt: 1 });
|
||||
messageSchema.index({ messageId: 1, user: 1 }, { unique: true });
|
||||
|
||||
/** @type {mongoose.Model<TMessage>} */
|
||||
const Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
|
||||
|
||||
module.exports = Message;
|
||||
|
||||
@@ -1,5 +1,25 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { pluginAuthSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const pluginAuthSchema = mongoose.Schema(
|
||||
{
|
||||
authField: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
value: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
userId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
pluginKey: {
|
||||
type: String,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
const PluginAuth = mongoose.models.Plugin || mongoose.model('PluginAuth', pluginAuthSchema);
|
||||
|
||||
|
||||
@@ -1,5 +1,36 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { presetSchema } = require('@librechat/data-schemas');
|
||||
const { conversationPreset } = require('./defaults');
|
||||
const presetSchema = mongoose.Schema(
|
||||
{
|
||||
presetId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
title: {
|
||||
type: String,
|
||||
default: 'New Chat',
|
||||
meiliIndex: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
defaultPreset: {
|
||||
type: Boolean,
|
||||
},
|
||||
order: {
|
||||
type: Number,
|
||||
},
|
||||
...conversationPreset,
|
||||
agentOptions: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
default: null,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
const Preset = mongoose.models.Preset || mongoose.model('Preset', presetSchema);
|
||||
|
||||
|
||||
35
api/models/schema/projectSchema.js
Normal file
35
api/models/schema/projectSchema.js
Normal file
@@ -0,0 +1,35 @@
|
||||
const { Schema } = require('mongoose');
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoProject
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {string} name - The name of the project
|
||||
* @property {ObjectId[]} promptGroupIds - Array of PromptGroup IDs associated with the project
|
||||
* @property {Date} [createdAt] - Date when the project was created (added by timestamps)
|
||||
* @property {Date} [updatedAt] - Date when the project was last updated (added by timestamps)
|
||||
*/
|
||||
|
||||
const projectSchema = new Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
promptGroupIds: {
|
||||
type: [Schema.Types.ObjectId],
|
||||
ref: 'PromptGroup',
|
||||
default: [],
|
||||
},
|
||||
agentIds: {
|
||||
type: [String],
|
||||
ref: 'Agent',
|
||||
default: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = projectSchema;
|
||||
118
api/models/schema/promptSchema.js
Normal file
118
api/models/schema/promptSchema.js
Normal file
@@ -0,0 +1,118 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const Schema = mongoose.Schema;
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoPromptGroup
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {string} name - The name of the prompt group
|
||||
* @property {ObjectId} author - The author of the prompt group
|
||||
* @property {ObjectId} [projectId=null] - The project ID of the prompt group
|
||||
* @property {ObjectId} [productionId=null] - The project ID of the prompt group
|
||||
* @property {string} authorName - The name of the author of the prompt group
|
||||
* @property {number} [numberOfGenerations=0] - Number of generations the prompt group has
|
||||
* @property {string} [oneliner=''] - Oneliner description of the prompt group
|
||||
* @property {string} [category=''] - Category of the prompt group
|
||||
* @property {string} [command] - Command for the prompt group
|
||||
* @property {Date} [createdAt] - Date when the prompt group was created (added by timestamps)
|
||||
* @property {Date} [updatedAt] - Date when the prompt group was last updated (added by timestamps)
|
||||
*/
|
||||
|
||||
const promptGroupSchema = new Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
numberOfGenerations: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
oneliner: {
|
||||
type: String,
|
||||
default: '',
|
||||
},
|
||||
category: {
|
||||
type: String,
|
||||
default: '',
|
||||
index: true,
|
||||
},
|
||||
projectIds: {
|
||||
type: [Schema.Types.ObjectId],
|
||||
ref: 'Project',
|
||||
index: true,
|
||||
},
|
||||
productionId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Prompt',
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
author: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
authorName: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
command: {
|
||||
type: String,
|
||||
index: true,
|
||||
validate: {
|
||||
validator: function (v) {
|
||||
return v === undefined || v === null || v === '' || /^[a-z0-9-]+$/.test(v);
|
||||
},
|
||||
message: (props) =>
|
||||
`${props.value} is not a valid command. Only lowercase alphanumeric characters and highfins (') are allowed.`,
|
||||
},
|
||||
maxlength: [
|
||||
Constants.COMMANDS_MAX_LENGTH,
|
||||
`Command cannot be longer than ${Constants.COMMANDS_MAX_LENGTH} characters`,
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
|
||||
|
||||
const promptSchema = new Schema(
|
||||
{
|
||||
groupId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'PromptGroup',
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
author: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
prompt: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
enum: ['text', 'chat'],
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
const Prompt = mongoose.model('Prompt', promptSchema);
|
||||
|
||||
promptSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
promptGroupSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
|
||||
module.exports = { Prompt, PromptGroup };
|
||||
@@ -1,33 +1,7 @@
|
||||
import { Schema, Document } from 'mongoose';
|
||||
import { PermissionTypes, Permissions } from 'librechat-data-provider';
|
||||
const { PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
export interface IRole extends Document {
|
||||
name: string;
|
||||
[PermissionTypes.BOOKMARKS]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.PROMPTS]?: {
|
||||
[Permissions.SHARED_GLOBAL]?: boolean;
|
||||
[Permissions.USE]?: boolean;
|
||||
[Permissions.CREATE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.AGENTS]?: {
|
||||
[Permissions.SHARED_GLOBAL]?: boolean;
|
||||
[Permissions.USE]?: boolean;
|
||||
[Permissions.CREATE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.MULTI_CONVO]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.TEMPORARY_CHAT]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.RUN_CODE]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
const roleSchema: Schema<IRole> = new Schema({
|
||||
const roleSchema = new mongoose.Schema({
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
@@ -88,4 +62,6 @@ const roleSchema: Schema<IRole> = new Schema({
|
||||
},
|
||||
});
|
||||
|
||||
export default roleSchema;
|
||||
const Role = mongoose.model('Role', roleSchema);
|
||||
|
||||
module.exports = Role;
|
||||
20
api/models/schema/session.js
Normal file
20
api/models/schema/session.js
Normal file
@@ -0,0 +1,20 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const sessionSchema = mongoose.Schema({
|
||||
refreshTokenHash: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
expiration: {
|
||||
type: Date,
|
||||
required: true,
|
||||
expires: 0,
|
||||
},
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
});
|
||||
|
||||
module.exports = sessionSchema;
|
||||
@@ -1,17 +1,6 @@
|
||||
import mongoose, { Schema, Document, Types } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
export interface ISharedLink extends Document {
|
||||
conversationId: string;
|
||||
title?: string;
|
||||
user?: string;
|
||||
messages?: Types.ObjectId[];
|
||||
shareId?: string;
|
||||
isPublic: boolean;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
const shareSchema: Schema<ISharedLink> = new Schema(
|
||||
const shareSchema = mongoose.Schema(
|
||||
{
|
||||
conversationId: {
|
||||
type: String,
|
||||
@@ -38,4 +27,4 @@ const shareSchema: Schema<ISharedLink> = new Schema(
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
export default shareSchema;
|
||||
module.exports = mongoose.model('SharedLink', shareSchema);
|
||||
@@ -1,17 +1,7 @@
|
||||
import { Schema, Document, Types } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
const Schema = mongoose.Schema;
|
||||
|
||||
export interface IToken extends Document {
|
||||
userId: Types.ObjectId;
|
||||
email?: string;
|
||||
type?: string;
|
||||
identifier?: string;
|
||||
token: string;
|
||||
createdAt: Date;
|
||||
expiresAt: Date;
|
||||
metadata?: Map<string, unknown>;
|
||||
}
|
||||
|
||||
const tokenSchema: Schema<IToken> = new Schema({
|
||||
const tokenSchema = new Schema({
|
||||
userId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
required: true,
|
||||
@@ -20,9 +10,7 @@ const tokenSchema: Schema<IToken> = new Schema({
|
||||
email: {
|
||||
type: String,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
},
|
||||
type: String,
|
||||
identifier: {
|
||||
type: String,
|
||||
},
|
||||
@@ -47,4 +35,4 @@ const tokenSchema: Schema<IToken> = new Schema({
|
||||
|
||||
tokenSchema.index({ expiresAt: 1 }, { expireAfterSeconds: 0 });
|
||||
|
||||
export default tokenSchema;
|
||||
module.exports = tokenSchema;
|
||||
54
api/models/schema/toolCallSchema.js
Normal file
54
api/models/schema/toolCallSchema.js
Normal file
@@ -0,0 +1,54 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
/**
|
||||
* @typedef {Object} ToolCallData
|
||||
* @property {string} conversationId - The ID of the conversation
|
||||
* @property {string} messageId - The ID of the message
|
||||
* @property {string} toolId - The ID of the tool
|
||||
* @property {string | ObjectId} user - The user's ObjectId
|
||||
* @property {unknown} [result] - Optional result data
|
||||
* @property {TAttachment[]} [attachments] - Optional attachments data
|
||||
* @property {number} [blockIndex] - Optional code block index
|
||||
* @property {number} [partIndex] - Optional part index
|
||||
*/
|
||||
|
||||
/** @type {MongooseSchema<ToolCallData>} */
|
||||
const toolCallSchema = mongoose.Schema(
|
||||
{
|
||||
conversationId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
messageId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
toolId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
result: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
},
|
||||
attachments: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
},
|
||||
blockIndex: {
|
||||
type: Number,
|
||||
},
|
||||
partIndex: {
|
||||
type: Number,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
toolCallSchema.index({ messageId: 1, user: 1 });
|
||||
toolCallSchema.index({ conversationId: 1, user: 1 });
|
||||
|
||||
module.exports = mongoose.model('ToolCall', toolCallSchema);
|
||||
@@ -1,24 +1,6 @@
|
||||
import mongoose, { Schema, Document, Types } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
// @ts-ignore
|
||||
export interface ITransaction extends Document {
|
||||
user: Types.ObjectId;
|
||||
conversationId?: string;
|
||||
tokenType: 'prompt' | 'completion' | 'credits';
|
||||
model?: string;
|
||||
context?: string;
|
||||
valueKey?: string;
|
||||
rate?: number;
|
||||
rawAmount?: number;
|
||||
tokenValue?: number;
|
||||
inputTokens?: number;
|
||||
writeTokens?: number;
|
||||
readTokens?: number;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
const transactionSchema: Schema<ITransaction> = new Schema(
|
||||
const transactionSchema = mongoose.Schema(
|
||||
{
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
@@ -57,4 +39,4 @@ const transactionSchema: Schema<ITransaction> = new Schema(
|
||||
},
|
||||
);
|
||||
|
||||
export default transactionSchema;
|
||||
module.exports = transactionSchema;
|
||||
171
api/models/schema/userSchema.js
Normal file
171
api/models/schema/userSchema.js
Normal file
@@ -0,0 +1,171 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { SystemRoles } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoSession
|
||||
* @property {string} [refreshToken] - The refresh token
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoUser
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {string} [name] - The user's name
|
||||
* @property {string} [username] - The user's username, in lowercase
|
||||
* @property {string} email - The user's email address
|
||||
* @property {boolean} emailVerified - Whether the user's email is verified
|
||||
* @property {string} [password] - The user's password, trimmed with 8-128 characters
|
||||
* @property {string} [avatar] - The URL of the user's avatar
|
||||
* @property {string} provider - The provider of the user's account (e.g., 'local', 'google')
|
||||
* @property {string} [role='USER'] - The role of the user
|
||||
* @property {string} [googleId] - Optional Google ID for the user
|
||||
* @property {string} [facebookId] - Optional Facebook ID for the user
|
||||
* @property {string} [openidId] - Optional OpenID ID for the user
|
||||
* @property {string} [ldapId] - Optional LDAP ID for the user
|
||||
* @property {string} [githubId] - Optional GitHub ID for the user
|
||||
* @property {string} [discordId] - Optional Discord ID for the user
|
||||
* @property {string} [appleId] - Optional Apple ID for the user
|
||||
* @property {Array} [plugins=[]] - List of plugins used by the user
|
||||
* @property {Array.<MongoSession>} [refreshToken] - List of sessions with refresh tokens
|
||||
* @property {Date} [expiresAt] - Optional expiration date of the file
|
||||
* @property {string} [encryptionPublicKey] - The user's encryption public key
|
||||
* @property {string} [encryptedPrivateKey] - The user's encrypted private key
|
||||
* @property {string} [encryptionSalt] - The salt used for key derivation (e.g., PBKDF2)
|
||||
* @property {string} [encryptionIV] - The IV used for encrypting the private key
|
||||
* @property {Date} [createdAt] - Date when the user was created (added by timestamps)
|
||||
* @property {Date} [updatedAt] - Date when the user was last updated (added by timestamps)
|
||||
*/
|
||||
|
||||
/** @type {MongooseSchema<MongoSession>} */
|
||||
const Session = mongoose.Schema({
|
||||
refreshToken: {
|
||||
type: String,
|
||||
default: '',
|
||||
},
|
||||
});
|
||||
|
||||
const backupCodeSchema = mongoose.Schema({
|
||||
codeHash: { type: String, required: true },
|
||||
used: { type: Boolean, default: false },
|
||||
usedAt: { type: Date, default: null },
|
||||
});
|
||||
|
||||
/** @type {MongooseSchema<MongoUser>} */
|
||||
const userSchema = mongoose.Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
},
|
||||
username: {
|
||||
type: String,
|
||||
lowercase: true,
|
||||
default: '',
|
||||
},
|
||||
email: {
|
||||
type: String,
|
||||
required: [true, 'can\'t be blank'],
|
||||
lowercase: true,
|
||||
unique: true,
|
||||
match: [/\S+@\S+\.\S+/, 'is invalid'],
|
||||
index: true,
|
||||
},
|
||||
emailVerified: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
default: false,
|
||||
},
|
||||
password: {
|
||||
type: String,
|
||||
trim: true,
|
||||
minlength: 8,
|
||||
maxlength: 128,
|
||||
},
|
||||
avatar: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
provider: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: 'local',
|
||||
},
|
||||
role: {
|
||||
type: String,
|
||||
default: SystemRoles.USER,
|
||||
},
|
||||
googleId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
facebookId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
openidId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
ldapId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
githubId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
discordId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
appleId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
plugins: {
|
||||
type: Array,
|
||||
},
|
||||
totpSecret: {
|
||||
type: String,
|
||||
},
|
||||
backupCodes: {
|
||||
type: [backupCodeSchema],
|
||||
},
|
||||
refreshToken: {
|
||||
type: [Session],
|
||||
},
|
||||
expiresAt: {
|
||||
type: Date,
|
||||
expires: 604800, // 7 days in seconds
|
||||
},
|
||||
termsAccepted: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
encryptionPublicKey: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
encryptedPrivateKey: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
encryptionSalt: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
encryptionIV: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
},
|
||||
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
module.exports = userSchema;
|
||||
@@ -36,7 +36,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
||||
prompt = await Transaction.create({
|
||||
...txData,
|
||||
tokenType: 'prompt',
|
||||
rawAmount: promptTokens === 0 ? 0 : -Math.max(promptTokens, 0),
|
||||
rawAmount: -Math.max(promptTokens, 0),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
||||
completion = await Transaction.create({
|
||||
...txData,
|
||||
tokenType: 'completion',
|
||||
rawAmount: completionTokens === 0 ? 0 : -Math.max(completionTokens, 0),
|
||||
rawAmount: -Math.max(completionTokens, 0),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { Transaction } = require('./Transaction');
|
||||
const Balance = require('./Balance');
|
||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||
|
||||
// Mock the logger to prevent console output during tests
|
||||
jest.mock('./Transaction', () => ({
|
||||
Transaction: {
|
||||
create: jest.fn(),
|
||||
createStructured: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('./Balance', () => ({
|
||||
findOne: jest.fn(),
|
||||
findOneAndUpdate: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
debug: jest.fn(),
|
||||
@@ -12,46 +19,19 @@ jest.mock('~/config', () => ({
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock the Config service
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
jest.mock('~/server/services/Config');
|
||||
|
||||
// Import after mocking
|
||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||
const { Transaction } = require('./Transaction');
|
||||
const Balance = require('./Balance');
|
||||
describe('spendTokens', () => {
|
||||
let mongoServer;
|
||||
let userId;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
// Clear collections before each test
|
||||
await Transaction.deleteMany({});
|
||||
await Balance.deleteMany({});
|
||||
|
||||
// Create a new user ID for each test
|
||||
userId = new mongoose.Types.ObjectId();
|
||||
|
||||
// Mock the balance config to be enabled by default
|
||||
getBalanceConfig.mockResolvedValue({ enabled: true });
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
process.env.CHECK_BALANCE = 'true';
|
||||
});
|
||||
|
||||
it('should create transactions for both prompt and completion tokens', async () => {
|
||||
// Create a balance for the user
|
||||
await Balance.create({
|
||||
user: userId,
|
||||
tokenCredits: 10000,
|
||||
});
|
||||
|
||||
const txData = {
|
||||
user: userId,
|
||||
user: new mongoose.Types.ObjectId(),
|
||||
conversationId: 'test-convo',
|
||||
model: 'gpt-3.5-turbo',
|
||||
context: 'test',
|
||||
@@ -61,35 +41,31 @@ describe('spendTokens', () => {
|
||||
completionTokens: 50,
|
||||
};
|
||||
|
||||
Transaction.create.mockResolvedValueOnce({ tokenType: 'prompt', rawAmount: -100 });
|
||||
Transaction.create.mockResolvedValueOnce({ tokenType: 'completion', rawAmount: -50 });
|
||||
Balance.findOne.mockResolvedValue({ tokenCredits: 10000 });
|
||||
Balance.findOneAndUpdate.mockResolvedValue({ tokenCredits: 9850 });
|
||||
|
||||
await spendTokens(txData, tokenUsage);
|
||||
|
||||
// Verify transactions were created
|
||||
const transactions = await Transaction.find({ user: userId }).sort({ tokenType: 1 });
|
||||
expect(transactions).toHaveLength(2);
|
||||
|
||||
// Check completion transaction
|
||||
expect(transactions[0].tokenType).toBe('completion');
|
||||
expect(transactions[0].rawAmount).toBe(-50);
|
||||
|
||||
// Check prompt transaction
|
||||
expect(transactions[1].tokenType).toBe('prompt');
|
||||
expect(transactions[1].rawAmount).toBe(-100);
|
||||
|
||||
// Verify balance was updated
|
||||
const balance = await Balance.findOne({ user: userId });
|
||||
expect(balance).toBeDefined();
|
||||
expect(balance.tokenCredits).toBeLessThan(10000); // Balance should be reduced
|
||||
expect(Transaction.create).toHaveBeenCalledTimes(2);
|
||||
expect(Transaction.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
tokenType: 'prompt',
|
||||
rawAmount: -100,
|
||||
}),
|
||||
);
|
||||
expect(Transaction.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
tokenType: 'completion',
|
||||
rawAmount: -50,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle zero completion tokens', async () => {
|
||||
// Create a balance for the user
|
||||
await Balance.create({
|
||||
user: userId,
|
||||
tokenCredits: 10000,
|
||||
});
|
||||
|
||||
const txData = {
|
||||
user: userId,
|
||||
user: new mongoose.Types.ObjectId(),
|
||||
conversationId: 'test-convo',
|
||||
model: 'gpt-3.5-turbo',
|
||||
context: 'test',
|
||||
@@ -99,26 +75,31 @@ describe('spendTokens', () => {
|
||||
completionTokens: 0,
|
||||
};
|
||||
|
||||
Transaction.create.mockResolvedValueOnce({ tokenType: 'prompt', rawAmount: -100 });
|
||||
Transaction.create.mockResolvedValueOnce({ tokenType: 'completion', rawAmount: -0 });
|
||||
Balance.findOne.mockResolvedValue({ tokenCredits: 10000 });
|
||||
Balance.findOneAndUpdate.mockResolvedValue({ tokenCredits: 9850 });
|
||||
|
||||
await spendTokens(txData, tokenUsage);
|
||||
|
||||
// Verify transactions were created
|
||||
const transactions = await Transaction.find({ user: userId }).sort({ tokenType: 1 });
|
||||
expect(transactions).toHaveLength(2);
|
||||
|
||||
// Check completion transaction
|
||||
expect(transactions[0].tokenType).toBe('completion');
|
||||
// In JavaScript -0 and 0 are different but functionally equivalent
|
||||
// Use Math.abs to handle both 0 and -0
|
||||
expect(Math.abs(transactions[0].rawAmount)).toBe(0);
|
||||
|
||||
// Check prompt transaction
|
||||
expect(transactions[1].tokenType).toBe('prompt');
|
||||
expect(transactions[1].rawAmount).toBe(-100);
|
||||
expect(Transaction.create).toHaveBeenCalledTimes(2);
|
||||
expect(Transaction.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
tokenType: 'prompt',
|
||||
rawAmount: -100,
|
||||
}),
|
||||
);
|
||||
expect(Transaction.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
tokenType: 'completion',
|
||||
rawAmount: -0, // Changed from 0 to -0
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle undefined token counts', async () => {
|
||||
const txData = {
|
||||
user: userId,
|
||||
user: new mongoose.Types.ObjectId(),
|
||||
conversationId: 'test-convo',
|
||||
model: 'gpt-3.5-turbo',
|
||||
context: 'test',
|
||||
@@ -127,22 +108,13 @@ describe('spendTokens', () => {
|
||||
|
||||
await spendTokens(txData, tokenUsage);
|
||||
|
||||
// Verify no transactions were created
|
||||
const transactions = await Transaction.find({ user: userId });
|
||||
expect(transactions).toHaveLength(0);
|
||||
expect(Transaction.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not update balance when the balance feature is disabled', async () => {
|
||||
// Override configuration: disable balance updates
|
||||
getBalanceConfig.mockResolvedValue({ enabled: false });
|
||||
// Create a balance for the user
|
||||
await Balance.create({
|
||||
user: userId,
|
||||
tokenCredits: 10000,
|
||||
});
|
||||
|
||||
it('should not update balance when CHECK_BALANCE is false', async () => {
|
||||
process.env.CHECK_BALANCE = 'false';
|
||||
const txData = {
|
||||
user: userId,
|
||||
user: new mongoose.Types.ObjectId(),
|
||||
conversationId: 'test-convo',
|
||||
model: 'gpt-3.5-turbo',
|
||||
context: 'test',
|
||||
@@ -152,529 +124,19 @@ describe('spendTokens', () => {
|
||||
completionTokens: 50,
|
||||
};
|
||||
|
||||
await spendTokens(txData, tokenUsage);
|
||||
|
||||
// Verify transactions were created
|
||||
const transactions = await Transaction.find({ user: userId });
|
||||
expect(transactions).toHaveLength(2);
|
||||
|
||||
// Verify balance was not updated (should still be 10000)
|
||||
const balance = await Balance.findOne({ user: userId });
|
||||
expect(balance.tokenCredits).toBe(10000);
|
||||
});
|
||||
|
||||
it('should not allow balance to go below zero when spending tokens', async () => {
|
||||
// Create a balance with a low amount
|
||||
await Balance.create({
|
||||
user: userId,
|
||||
tokenCredits: 5000,
|
||||
});
|
||||
|
||||
const txData = {
|
||||
user: userId,
|
||||
conversationId: 'test-convo',
|
||||
model: 'gpt-4', // Using a more expensive model
|
||||
context: 'test',
|
||||
};
|
||||
|
||||
// Spending more tokens than the user has balance for
|
||||
const tokenUsage = {
|
||||
promptTokens: 1000,
|
||||
completionTokens: 500,
|
||||
};
|
||||
Transaction.create.mockResolvedValueOnce({ tokenType: 'prompt', rawAmount: -100 });
|
||||
Transaction.create.mockResolvedValueOnce({ tokenType: 'completion', rawAmount: -50 });
|
||||
|
||||
await spendTokens(txData, tokenUsage);
|
||||
|
||||
// Verify transactions were created
|
||||
const transactions = await Transaction.find({ user: userId }).sort({ tokenType: 1 });
|
||||
expect(transactions).toHaveLength(2);
|
||||
|
||||
// Verify balance was reduced to exactly 0, not negative
|
||||
const balance = await Balance.findOne({ user: userId });
|
||||
expect(balance).toBeDefined();
|
||||
expect(balance.tokenCredits).toBe(0);
|
||||
|
||||
// Check that the transaction records show the adjusted values
|
||||
const transactionResults = await Promise.all(
|
||||
transactions.map((t) =>
|
||||
Transaction.create({
|
||||
...txData,
|
||||
tokenType: t.tokenType,
|
||||
rawAmount: t.rawAmount,
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
// The second transaction should have an adjusted value since balance is already 0
|
||||
expect(transactionResults[1]).toEqual(
|
||||
expect.objectContaining({
|
||||
balance: 0,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle multiple transactions in sequence with low balance and not increase balance', async () => {
|
||||
// This test is specifically checking for the issue reported in production
|
||||
// where the balance increases after a transaction when it should remain at 0
|
||||
// Create a balance with a very low amount
|
||||
await Balance.create({
|
||||
user: userId,
|
||||
tokenCredits: 100,
|
||||
});
|
||||
|
||||
// First transaction - should reduce balance to 0
|
||||
const txData1 = {
|
||||
user: userId,
|
||||
conversationId: 'test-convo-1',
|
||||
model: 'gpt-4',
|
||||
context: 'test',
|
||||
};
|
||||
|
||||
const tokenUsage1 = {
|
||||
promptTokens: 100,
|
||||
completionTokens: 50,
|
||||
};
|
||||
|
||||
await spendTokens(txData1, tokenUsage1);
|
||||
|
||||
// Check balance after first transaction
|
||||
let balance = await Balance.findOne({ user: userId });
|
||||
expect(balance.tokenCredits).toBe(0);
|
||||
|
||||
// Second transaction - should keep balance at 0, not make it negative or increase it
|
||||
const txData2 = {
|
||||
user: userId,
|
||||
conversationId: 'test-convo-2',
|
||||
model: 'gpt-4',
|
||||
context: 'test',
|
||||
};
|
||||
|
||||
const tokenUsage2 = {
|
||||
promptTokens: 200,
|
||||
completionTokens: 100,
|
||||
};
|
||||
|
||||
await spendTokens(txData2, tokenUsage2);
|
||||
|
||||
// Check balance after second transaction - should still be 0
|
||||
balance = await Balance.findOne({ user: userId });
|
||||
expect(balance.tokenCredits).toBe(0);
|
||||
|
||||
// Verify all transactions were created
|
||||
const transactions = await Transaction.find({ user: userId });
|
||||
expect(transactions).toHaveLength(4); // 2 transactions (prompt+completion) for each call
|
||||
|
||||
// Let's examine the actual transaction records to see what's happening
|
||||
const transactionDetails = await Transaction.find({ user: userId }).sort({ createdAt: 1 });
|
||||
|
||||
// Log the transaction details for debugging
|
||||
console.log('Transaction details:');
|
||||
transactionDetails.forEach((tx, i) => {
|
||||
console.log(`Transaction ${i + 1}:`, {
|
||||
tokenType: tx.tokenType,
|
||||
rawAmount: tx.rawAmount,
|
||||
tokenValue: tx.tokenValue,
|
||||
model: tx.model,
|
||||
});
|
||||
});
|
||||
|
||||
// Check the return values from Transaction.create directly
|
||||
// This is to verify that the incrementValue is not becoming positive
|
||||
const directResult = await Transaction.create({
|
||||
user: userId,
|
||||
conversationId: 'test-convo-3',
|
||||
model: 'gpt-4',
|
||||
tokenType: 'completion',
|
||||
rawAmount: -100,
|
||||
context: 'test',
|
||||
});
|
||||
|
||||
console.log('Direct Transaction.create result:', directResult);
|
||||
|
||||
// The completion value should never be positive
|
||||
expect(directResult.completion).not.toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should ensure tokenValue is always negative for spending tokens', async () => {
|
||||
// Create a balance for the user
|
||||
await Balance.create({
|
||||
user: userId,
|
||||
tokenCredits: 10000,
|
||||
});
|
||||
|
||||
// Test with various models to check multiplier calculations
|
||||
const models = ['gpt-3.5-turbo', 'gpt-4', 'claude-3-5-sonnet'];
|
||||
|
||||
for (const model of models) {
|
||||
const txData = {
|
||||
user: userId,
|
||||
conversationId: `test-convo-${model}`,
|
||||
model,
|
||||
context: 'test',
|
||||
};
|
||||
|
||||
const tokenUsage = {
|
||||
promptTokens: 100,
|
||||
completionTokens: 50,
|
||||
};
|
||||
|
||||
await spendTokens(txData, tokenUsage);
|
||||
|
||||
// Get the transactions for this model
|
||||
const transactions = await Transaction.find({
|
||||
user: userId,
|
||||
model,
|
||||
});
|
||||
|
||||
// Verify tokenValue is negative for all transactions
|
||||
transactions.forEach((tx) => {
|
||||
console.log(`Model ${model}, Type ${tx.tokenType}: tokenValue = ${tx.tokenValue}`);
|
||||
expect(tx.tokenValue).toBeLessThan(0);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle structured transactions in sequence with low balance', async () => {
|
||||
// Create a balance with a very low amount
|
||||
await Balance.create({
|
||||
user: userId,
|
||||
tokenCredits: 100,
|
||||
});
|
||||
|
||||
// First transaction - should reduce balance to 0
|
||||
const txData1 = {
|
||||
user: userId,
|
||||
conversationId: 'test-convo-1',
|
||||
model: 'claude-3-5-sonnet',
|
||||
context: 'test',
|
||||
};
|
||||
|
||||
const tokenUsage1 = {
|
||||
promptTokens: {
|
||||
input: 10,
|
||||
write: 100,
|
||||
read: 5,
|
||||
},
|
||||
completionTokens: 50,
|
||||
};
|
||||
|
||||
await spendStructuredTokens(txData1, tokenUsage1);
|
||||
|
||||
// Check balance after first transaction
|
||||
let balance = await Balance.findOne({ user: userId });
|
||||
expect(balance.tokenCredits).toBe(0);
|
||||
|
||||
// Second transaction - should keep balance at 0, not make it negative or increase it
|
||||
const txData2 = {
|
||||
user: userId,
|
||||
conversationId: 'test-convo-2',
|
||||
model: 'claude-3-5-sonnet',
|
||||
context: 'test',
|
||||
};
|
||||
|
||||
const tokenUsage2 = {
|
||||
promptTokens: {
|
||||
input: 20,
|
||||
write: 200,
|
||||
read: 10,
|
||||
},
|
||||
completionTokens: 100,
|
||||
};
|
||||
|
||||
await spendStructuredTokens(txData2, tokenUsage2);
|
||||
|
||||
// Check balance after second transaction - should still be 0
|
||||
balance = await Balance.findOne({ user: userId });
|
||||
expect(balance.tokenCredits).toBe(0);
|
||||
|
||||
// Verify all transactions were created
|
||||
const transactions = await Transaction.find({ user: userId });
|
||||
expect(transactions).toHaveLength(4); // 2 transactions (prompt+completion) for each call
|
||||
|
||||
// Let's examine the actual transaction records to see what's happening
|
||||
const transactionDetails = await Transaction.find({ user: userId }).sort({ createdAt: 1 });
|
||||
|
||||
// Log the transaction details for debugging
|
||||
console.log('Structured transaction details:');
|
||||
transactionDetails.forEach((tx, i) => {
|
||||
console.log(`Transaction ${i + 1}:`, {
|
||||
tokenType: tx.tokenType,
|
||||
rawAmount: tx.rawAmount,
|
||||
tokenValue: tx.tokenValue,
|
||||
inputTokens: tx.inputTokens,
|
||||
writeTokens: tx.writeTokens,
|
||||
readTokens: tx.readTokens,
|
||||
model: tx.model,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should not allow balance to go below zero when spending structured tokens', async () => {
|
||||
// Create a balance with a low amount
|
||||
await Balance.create({
|
||||
user: userId,
|
||||
tokenCredits: 5000,
|
||||
});
|
||||
|
||||
const txData = {
|
||||
user: userId,
|
||||
conversationId: 'test-convo',
|
||||
model: 'claude-3-5-sonnet', // Using a model that supports structured tokens
|
||||
context: 'test',
|
||||
};
|
||||
|
||||
// Spending more tokens than the user has balance for
|
||||
const tokenUsage = {
|
||||
promptTokens: {
|
||||
input: 100,
|
||||
write: 1000,
|
||||
read: 50,
|
||||
},
|
||||
completionTokens: 500,
|
||||
};
|
||||
|
||||
const result = await spendStructuredTokens(txData, tokenUsage);
|
||||
|
||||
// Verify transactions were created
|
||||
const transactions = await Transaction.find({ user: userId }).sort({ tokenType: 1 });
|
||||
expect(transactions).toHaveLength(2);
|
||||
|
||||
// Verify balance was reduced to exactly 0, not negative
|
||||
const balance = await Balance.findOne({ user: userId });
|
||||
expect(balance).toBeDefined();
|
||||
expect(balance.tokenCredits).toBe(0);
|
||||
|
||||
// The result should show the adjusted values
|
||||
expect(result).toEqual({
|
||||
prompt: expect.objectContaining({
|
||||
user: userId.toString(),
|
||||
balance: expect.any(Number),
|
||||
}),
|
||||
completion: expect.objectContaining({
|
||||
user: userId.toString(),
|
||||
balance: 0, // Final balance should be 0
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multiple concurrent transactions correctly with a high balance', async () => {
|
||||
// Create a balance with a high amount
|
||||
const initialBalance = 10000000;
|
||||
await Balance.create({
|
||||
user: userId,
|
||||
tokenCredits: initialBalance,
|
||||
});
|
||||
|
||||
// Simulate the recordCollectedUsage function from the production code
|
||||
const conversationId = 'test-concurrent-convo';
|
||||
const context = 'message';
|
||||
const model = 'gpt-4';
|
||||
|
||||
const amount = 50;
|
||||
// Create `amount` of usage records to simulate multiple transactions
|
||||
const collectedUsage = Array.from({ length: amount }, (_, i) => ({
|
||||
model,
|
||||
input_tokens: 100 + i * 10, // Increasing input tokens
|
||||
output_tokens: 50 + i * 5, // Increasing output tokens
|
||||
input_token_details: {
|
||||
cache_creation: i % 2 === 0 ? 20 : 0, // Some have cache creation
|
||||
cache_read: i % 3 === 0 ? 10 : 0, // Some have cache read
|
||||
},
|
||||
}));
|
||||
|
||||
// Process all transactions concurrently to simulate race conditions
|
||||
const promises = [];
|
||||
let expectedTotalSpend = 0;
|
||||
|
||||
for (let i = 0; i < collectedUsage.length; i++) {
|
||||
const usage = collectedUsage[i];
|
||||
if (!usage) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const cache_creation = Number(usage.input_token_details?.cache_creation) || 0;
|
||||
const cache_read = Number(usage.input_token_details?.cache_read) || 0;
|
||||
|
||||
const txMetadata = {
|
||||
context,
|
||||
conversationId,
|
||||
user: userId,
|
||||
model: usage.model,
|
||||
};
|
||||
|
||||
// Calculate expected spend for this transaction
|
||||
const promptTokens = usage.input_tokens;
|
||||
const completionTokens = usage.output_tokens;
|
||||
|
||||
// For regular transactions
|
||||
if (cache_creation === 0 && cache_read === 0) {
|
||||
// Add to expected spend using the correct multipliers from tx.js
|
||||
// For gpt-4, the multipliers are: prompt=30, completion=60
|
||||
expectedTotalSpend += promptTokens * 30; // gpt-4 prompt rate is 30
|
||||
expectedTotalSpend += completionTokens * 60; // gpt-4 completion rate is 60
|
||||
|
||||
promises.push(
|
||||
spendTokens(txMetadata, {
|
||||
promptTokens,
|
||||
completionTokens,
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
// For structured transactions with cache operations
|
||||
// The multipliers for claude models with cache operations are different
|
||||
// But since we're using gpt-4 in the test, we need to use appropriate values
|
||||
expectedTotalSpend += promptTokens * 30; // Base prompt rate for gpt-4
|
||||
// Since gpt-4 doesn't have cache multipliers defined, we'll use the prompt rate
|
||||
expectedTotalSpend += cache_creation * 30; // Write rate (using prompt rate as fallback)
|
||||
expectedTotalSpend += cache_read * 30; // Read rate (using prompt rate as fallback)
|
||||
expectedTotalSpend += completionTokens * 60; // Completion rate for gpt-4
|
||||
|
||||
promises.push(
|
||||
spendStructuredTokens(txMetadata, {
|
||||
promptTokens: {
|
||||
input: promptTokens,
|
||||
write: cache_creation,
|
||||
read: cache_read,
|
||||
},
|
||||
completionTokens,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for all transactions to complete
|
||||
await Promise.all(promises);
|
||||
|
||||
// Verify final balance
|
||||
const finalBalance = await Balance.findOne({ user: userId });
|
||||
expect(finalBalance).toBeDefined();
|
||||
|
||||
// The final balance should be the initial balance minus the expected total spend
|
||||
const expectedFinalBalance = initialBalance - expectedTotalSpend;
|
||||
|
||||
console.log('Initial balance:', initialBalance);
|
||||
console.log('Expected total spend:', expectedTotalSpend);
|
||||
console.log('Expected final balance:', expectedFinalBalance);
|
||||
console.log('Actual final balance:', finalBalance.tokenCredits);
|
||||
|
||||
// Allow for small rounding differences
|
||||
expect(finalBalance.tokenCredits).toBeCloseTo(expectedFinalBalance, 0);
|
||||
|
||||
// Verify all transactions were created
|
||||
const transactions = await Transaction.find({
|
||||
user: userId,
|
||||
conversationId,
|
||||
});
|
||||
|
||||
// We should have 2 transactions (prompt + completion) for each usage record
|
||||
// Some might be structured, some regular
|
||||
expect(transactions.length).toBeGreaterThanOrEqual(collectedUsage.length);
|
||||
|
||||
// Log transaction details for debugging
|
||||
console.log('Transaction summary:');
|
||||
let totalTokenValue = 0;
|
||||
transactions.forEach((tx) => {
|
||||
console.log(`${tx.tokenType}: rawAmount=${tx.rawAmount}, tokenValue=${tx.tokenValue}`);
|
||||
totalTokenValue += tx.tokenValue;
|
||||
});
|
||||
console.log('Total token value from transactions:', totalTokenValue);
|
||||
|
||||
// The difference between expected and actual is significant
|
||||
// This is likely due to the multipliers being different in the test environment
|
||||
// Let's adjust our expectation based on the actual transactions
|
||||
const actualSpend = initialBalance - finalBalance.tokenCredits;
|
||||
console.log('Actual spend:', actualSpend);
|
||||
|
||||
// Instead of checking the exact balance, let's verify that:
|
||||
// 1. The balance was reduced (tokens were spent)
|
||||
expect(finalBalance.tokenCredits).toBeLessThan(initialBalance);
|
||||
// 2. The total token value from transactions matches the actual spend
|
||||
expect(Math.abs(totalTokenValue)).toBeCloseTo(actualSpend, -3); // Allow for larger differences
|
||||
});
|
||||
|
||||
// Add this new test case
|
||||
it('should handle multiple concurrent balance increases correctly', async () => {
|
||||
// Start with zero balance
|
||||
const initialBalance = 0;
|
||||
await Balance.create({
|
||||
user: userId,
|
||||
tokenCredits: initialBalance,
|
||||
});
|
||||
|
||||
const numberOfRefills = 25;
|
||||
const refillAmount = 1000;
|
||||
|
||||
const promises = [];
|
||||
for (let i = 0; i < numberOfRefills; i++) {
|
||||
promises.push(
|
||||
Transaction.createAutoRefillTransaction({
|
||||
user: userId,
|
||||
tokenType: 'credits',
|
||||
context: 'concurrent-refill-test',
|
||||
rawAmount: refillAmount,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
// Wait for all refill transactions to complete
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// Verify final balance
|
||||
const finalBalance = await Balance.findOne({ user: userId });
|
||||
expect(finalBalance).toBeDefined();
|
||||
|
||||
// The final balance should be the initial balance plus the sum of all refills
|
||||
const expectedFinalBalance = initialBalance + numberOfRefills * refillAmount;
|
||||
|
||||
console.log('Initial balance (Increase Test):', initialBalance);
|
||||
console.log(`Performed ${numberOfRefills} refills of ${refillAmount} each.`);
|
||||
console.log('Expected final balance (Increase Test):', expectedFinalBalance);
|
||||
console.log('Actual final balance (Increase Test):', finalBalance.tokenCredits);
|
||||
|
||||
// Use toBeCloseTo for safety, though toBe should work for integer math
|
||||
expect(finalBalance.tokenCredits).toBeCloseTo(expectedFinalBalance, 0);
|
||||
|
||||
// Verify all transactions were created
|
||||
const transactions = await Transaction.find({
|
||||
user: userId,
|
||||
context: 'concurrent-refill-test',
|
||||
});
|
||||
|
||||
// We should have one transaction for each refill attempt
|
||||
expect(transactions.length).toBe(numberOfRefills);
|
||||
|
||||
// Optional: Verify the sum of increments from the results matches the balance change
|
||||
const totalIncrementReported = results.reduce((sum, result) => {
|
||||
// Assuming createAutoRefillTransaction returns an object with the increment amount
|
||||
// Adjust this based on the actual return structure.
|
||||
// Let's assume it returns { balance: newBalance, transaction: { rawAmount: ... } }
|
||||
// Or perhaps we check the transaction.rawAmount directly
|
||||
return sum + (result?.transaction?.rawAmount || 0);
|
||||
}, 0);
|
||||
console.log('Total increment reported by results:', totalIncrementReported);
|
||||
expect(totalIncrementReported).toBe(expectedFinalBalance - initialBalance);
|
||||
|
||||
// Optional: Check the sum of tokenValue from saved transactions
|
||||
let totalTokenValueFromDb = 0;
|
||||
transactions.forEach((tx) => {
|
||||
// For refills, rawAmount is positive, and tokenValue might be calculated based on it
|
||||
// Let's assume tokenValue directly reflects the increment for simplicity here
|
||||
// If calculation is involved, adjust accordingly
|
||||
totalTokenValueFromDb += tx.rawAmount; // Or tx.tokenValue if that holds the increment
|
||||
});
|
||||
console.log('Total rawAmount from DB transactions:', totalTokenValueFromDb);
|
||||
expect(totalTokenValueFromDb).toBeCloseTo(expectedFinalBalance - initialBalance, 0);
|
||||
expect(Transaction.create).toHaveBeenCalledTimes(2);
|
||||
expect(Balance.findOne).not.toHaveBeenCalled();
|
||||
expect(Balance.findOneAndUpdate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should create structured transactions for both prompt and completion tokens', async () => {
|
||||
// Create a balance for the user
|
||||
await Balance.create({
|
||||
user: userId,
|
||||
tokenCredits: 10000,
|
||||
});
|
||||
|
||||
const txData = {
|
||||
user: userId,
|
||||
user: new mongoose.Types.ObjectId(),
|
||||
conversationId: 'test-convo',
|
||||
model: 'claude-3-5-sonnet',
|
||||
context: 'test',
|
||||
@@ -688,37 +150,48 @@ describe('spendTokens', () => {
|
||||
completionTokens: 50,
|
||||
};
|
||||
|
||||
const result = await spendStructuredTokens(txData, tokenUsage);
|
||||
|
||||
// Verify transactions were created
|
||||
const transactions = await Transaction.find({ user: userId }).sort({ tokenType: 1 });
|
||||
expect(transactions).toHaveLength(2);
|
||||
|
||||
// Check completion transaction
|
||||
expect(transactions[0].tokenType).toBe('completion');
|
||||
expect(transactions[0].rawAmount).toBe(-50);
|
||||
|
||||
// Check prompt transaction
|
||||
expect(transactions[1].tokenType).toBe('prompt');
|
||||
expect(transactions[1].inputTokens).toBe(-10);
|
||||
expect(transactions[1].writeTokens).toBe(-100);
|
||||
expect(transactions[1].readTokens).toBe(-5);
|
||||
|
||||
// Verify result contains transaction info
|
||||
expect(result).toEqual({
|
||||
prompt: expect.objectContaining({
|
||||
user: userId.toString(),
|
||||
prompt: expect.any(Number),
|
||||
}),
|
||||
completion: expect.objectContaining({
|
||||
user: userId.toString(),
|
||||
completion: expect.any(Number),
|
||||
}),
|
||||
Transaction.createStructured.mockResolvedValueOnce({
|
||||
rate: 3.75,
|
||||
user: txData.user.toString(),
|
||||
balance: 9570,
|
||||
prompt: -430,
|
||||
});
|
||||
Transaction.create.mockResolvedValueOnce({
|
||||
rate: 15,
|
||||
user: txData.user.toString(),
|
||||
balance: 8820,
|
||||
completion: -750,
|
||||
});
|
||||
|
||||
// Verify balance was updated
|
||||
const balance = await Balance.findOne({ user: userId });
|
||||
expect(balance).toBeDefined();
|
||||
expect(balance.tokenCredits).toBeLessThan(10000); // Balance should be reduced
|
||||
const result = await spendStructuredTokens(txData, tokenUsage);
|
||||
|
||||
expect(Transaction.createStructured).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
tokenType: 'prompt',
|
||||
inputTokens: -10,
|
||||
writeTokens: -100,
|
||||
readTokens: -5,
|
||||
}),
|
||||
);
|
||||
expect(Transaction.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
tokenType: 'completion',
|
||||
rawAmount: -50,
|
||||
}),
|
||||
);
|
||||
expect(result).toEqual({
|
||||
prompt: expect.objectContaining({
|
||||
rate: 3.75,
|
||||
user: txData.user.toString(),
|
||||
balance: 9570,
|
||||
prompt: -430,
|
||||
}),
|
||||
completion: expect.objectContaining({
|
||||
rate: 15,
|
||||
user: txData.user.toString(),
|
||||
balance: 8820,
|
||||
completion: -750,
|
||||
}),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -61,7 +61,6 @@ const bedrockValues = {
|
||||
'amazon.nova-micro-v1:0': { prompt: 0.035, completion: 0.14 },
|
||||
'amazon.nova-lite-v1:0': { prompt: 0.06, completion: 0.24 },
|
||||
'amazon.nova-pro-v1:0': { prompt: 0.8, completion: 3.2 },
|
||||
'deepseek.r1': { prompt: 1.35, completion: 5.4 },
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -109,7 +108,6 @@ const tokenValues = Object.assign(
|
||||
'gemini-2.0-flash-lite': { prompt: 0.075, completion: 0.3 },
|
||||
'gemini-2.0-flash': { prompt: 0.1, completion: 0.7 },
|
||||
'gemini-2.0': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemini-2.5': { prompt: 0, completion: 0 }, // Free for a period of time
|
||||
'gemini-1.5-flash-8b': { prompt: 0.075, completion: 0.3 },
|
||||
'gemini-1.5-flash': { prompt: 0.15, completion: 0.6 },
|
||||
'gemini-1.5': { prompt: 2.5, completion: 10 },
|
||||
@@ -123,12 +121,6 @@ const tokenValues = Object.assign(
|
||||
'grok-2-latest': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-beta': { prompt: 5.0, completion: 15.0 },
|
||||
'mistral-large': { prompt: 2.0, completion: 6.0 },
|
||||
'pixtral-large': { prompt: 2.0, completion: 6.0 },
|
||||
'mistral-saba': { prompt: 0.2, completion: 0.6 },
|
||||
codestral: { prompt: 0.3, completion: 0.9 },
|
||||
'ministral-8b': { prompt: 0.1, completion: 0.1 },
|
||||
'ministral-3b': { prompt: 0.04, completion: 0.04 },
|
||||
},
|
||||
bedrockValues,
|
||||
);
|
||||
|
||||
@@ -288,7 +288,7 @@ describe('AWS Bedrock Model Tests', () => {
|
||||
});
|
||||
|
||||
describe('Deepseek Model Tests', () => {
|
||||
const deepseekModels = ['deepseek-chat', 'deepseek-coder', 'deepseek-reasoner', 'deepseek.r1'];
|
||||
const deepseekModels = ['deepseek-chat', 'deepseek-coder', 'deepseek-reasoner'];
|
||||
|
||||
it('should return the correct prompt multipliers for all models', () => {
|
||||
const results = deepseekModels.map((model) => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const bcrypt = require('bcryptjs');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const signPayload = require('~/server/services/signPayload');
|
||||
const { isEnabled } = require('~/server/utils/handleText');
|
||||
const Balance = require('./Balance');
|
||||
const User = require('./User');
|
||||
|
||||
@@ -13,9 +13,11 @@ const User = require('./User');
|
||||
*/
|
||||
const getUserById = async function (userId, fieldsToSelect = null) {
|
||||
const query = User.findById(userId);
|
||||
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
|
||||
return await query.lean();
|
||||
};
|
||||
|
||||
@@ -30,6 +32,7 @@ const findUser = async function (searchCriteria, fieldsToSelect = null) {
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
|
||||
return await query.lean();
|
||||
};
|
||||
|
||||
@@ -55,12 +58,11 @@ const updateUser = async function (userId, updateData) {
|
||||
* Creates a new user, optionally with a TTL of 1 week.
|
||||
* @param {MongoUser} data - The user data to be created, must contain user_id.
|
||||
* @param {boolean} [disableTTL=true] - Whether to disable the TTL. Defaults to `true`.
|
||||
* @param {boolean} [returnUser=false] - Whether to return the created user object.
|
||||
* @returns {Promise<ObjectId|MongoUser>} A promise that resolves to the created user document ID or user object.
|
||||
* @param {boolean} [returnUser=false] - Whether to disable the TTL. Defaults to `true`.
|
||||
* @returns {Promise<ObjectId>} A promise that resolves to the created user document ID.
|
||||
* @throws {Error} If a user with the same user_id already exists.
|
||||
*/
|
||||
const createUser = async (data, disableTTL = true, returnUser = false) => {
|
||||
const balance = await getBalanceConfig();
|
||||
const userData = {
|
||||
...data,
|
||||
expiresAt: disableTTL ? null : new Date(Date.now() + 604800 * 1000), // 1 week in milliseconds
|
||||
@@ -72,27 +74,13 @@ const createUser = async (data, disableTTL = true, returnUser = false) => {
|
||||
|
||||
const user = await User.create(userData);
|
||||
|
||||
// If balance is enabled, create or update a balance record for the user using global.interfaceConfig.balance
|
||||
if (balance?.enabled && balance?.startBalance) {
|
||||
const update = {
|
||||
$inc: { tokenCredits: balance.startBalance },
|
||||
};
|
||||
|
||||
if (
|
||||
balance.autoRefillEnabled &&
|
||||
balance.refillIntervalValue != null &&
|
||||
balance.refillIntervalUnit != null &&
|
||||
balance.refillAmount != null
|
||||
) {
|
||||
update.$set = {
|
||||
autoRefillEnabled: true,
|
||||
refillIntervalValue: balance.refillIntervalValue,
|
||||
refillIntervalUnit: balance.refillIntervalUnit,
|
||||
refillAmount: balance.refillAmount,
|
||||
};
|
||||
}
|
||||
|
||||
await Balance.findOneAndUpdate({ user: user._id }, update, { upsert: true, new: true }).lean();
|
||||
if (isEnabled(process.env.CHECK_BALANCE) && process.env.START_BALANCE) {
|
||||
let incrementValue = parseInt(process.env.START_BALANCE);
|
||||
await Balance.findOneAndUpdate(
|
||||
{ user: user._id },
|
||||
{ $inc: { tokenCredits: incrementValue } },
|
||||
{ upsert: true, new: true },
|
||||
).lean();
|
||||
}
|
||||
|
||||
if (returnUser) {
|
||||
@@ -135,7 +123,7 @@ const expires = eval(SESSION_EXPIRY) ?? 1000 * 60 * 15;
|
||||
/**
|
||||
* Generates a JWT token for a given user.
|
||||
*
|
||||
* @param {MongoUser} user - The user for whom the token is being generated.
|
||||
* @param {MongoUser} user - ID of the user for whom the token is being generated.
|
||||
* @returns {Promise<string>} A promise that resolves to a JWT token.
|
||||
*/
|
||||
const generateToken = async (user) => {
|
||||
@@ -158,7 +146,7 @@ const generateToken = async (user) => {
|
||||
/**
|
||||
* Compares the provided password with the user's password.
|
||||
*
|
||||
* @param {MongoUser} user - The user to compare the password for.
|
||||
* @param {MongoUser} user - the user to compare password for.
|
||||
* @param {string} candidatePassword - The password to test against the user's password.
|
||||
* @returns {Promise<boolean>} A promise that resolves to a boolean indicating if the password matches.
|
||||
*/
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "v0.7.7",
|
||||
"version": "v0.7.7-rc1",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
@@ -35,24 +35,19 @@
|
||||
"homepage": "https://librechat.ai",
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.37.0",
|
||||
"@aws-sdk/client-s3": "^3.758.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.758.0",
|
||||
"@azure/identity": "^4.7.0",
|
||||
"@azure/search-documents": "^12.0.0",
|
||||
"@azure/storage-blob": "^12.26.0",
|
||||
"@google/generative-ai": "^0.23.0",
|
||||
"@googleapis/youtube": "^20.0.0",
|
||||
"@keyv/mongo": "^2.1.8",
|
||||
"@keyv/redis": "^2.8.1",
|
||||
"@langchain/community": "^0.3.34",
|
||||
"@langchain/community": "^0.3.14",
|
||||
"@langchain/core": "^0.3.40",
|
||||
"@langchain/google-genai": "^0.1.11",
|
||||
"@langchain/google-vertexai": "^0.2.2",
|
||||
"@langchain/google-genai": "^0.1.9",
|
||||
"@langchain/google-vertexai": "^0.2.0",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.3.95",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@librechat/agents": "^2.1.8",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
"axios": "1.7.8",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"cohere-ai": "^7.9.1",
|
||||
"compression": "^1.7.4",
|
||||
@@ -79,6 +74,7 @@
|
||||
"keyv": "^4.5.4",
|
||||
"keyv-file": "^0.2.0",
|
||||
"klona": "^2.0.6",
|
||||
"langchain": "^0.2.19",
|
||||
"librechat-data-provider": "*",
|
||||
"librechat-mcp": "*",
|
||||
"lodash": "^4.17.21",
|
||||
@@ -86,7 +82,7 @@
|
||||
"memorystore": "^1.6.7",
|
||||
"mime": "^3.0.0",
|
||||
"module-alias": "^2.2.3",
|
||||
"mongoose": "^8.12.1",
|
||||
"mongoose": "^8.9.5",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"nanoid": "^3.3.7",
|
||||
"nodemailer": "^6.9.15",
|
||||
@@ -103,16 +99,13 @@
|
||||
"passport-jwt": "^4.0.1",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"passport-local": "^1.0.0",
|
||||
"rate-limit-redis": "^4.2.0",
|
||||
"sharp": "^0.33.5",
|
||||
"socket.io": "^4.8.1",
|
||||
"sharp": "^0.32.6",
|
||||
"tiktoken": "^1.0.15",
|
||||
"traverse": "^0.6.7",
|
||||
"ua-parser-js": "^1.0.36",
|
||||
"winston": "^3.11.0",
|
||||
"winston-daily-rotate-file": "^4.7.1",
|
||||
"youtube-transcript": "^1.2.1",
|
||||
"wrtc": "^0.4.7",
|
||||
"zod": "^3.22.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,9 +1,59 @@
|
||||
const { getResponseSender, Constants } = require('librechat-data-provider');
|
||||
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||
const { saveMessage } = require('~/models');
|
||||
const { saveMessage, getUserById } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
let crypto;
|
||||
try {
|
||||
crypto = require('crypto');
|
||||
} catch (err) {
|
||||
logger.error('[AskController] crypto support is disabled!', err);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to encrypt plaintext using AES-256-GCM and then RSA-encrypt the AES key.
|
||||
* @param {string} plainText - The plaintext to encrypt.
|
||||
* @param {string} pemPublicKey - The RSA public key in PEM format.
|
||||
* @returns {Object} An object containing the ciphertext, iv, authTag, and encryptedKey.
|
||||
*/
|
||||
function encryptText(plainText, pemPublicKey) {
|
||||
// Generate a random 256-bit AES key and a 12-byte IV.
|
||||
const aesKey = crypto.randomBytes(32);
|
||||
const iv = crypto.randomBytes(12);
|
||||
|
||||
// Encrypt the plaintext using AES-256-GCM.
|
||||
const cipher = crypto.createCipheriv('aes-256-gcm', aesKey, iv);
|
||||
let ciphertext = cipher.update(plainText, 'utf8', 'base64');
|
||||
ciphertext += cipher.final('base64');
|
||||
const authTag = cipher.getAuthTag().toString('base64');
|
||||
|
||||
// Encrypt the AES key using the user's RSA public key.
|
||||
const encryptedKey = crypto.publicEncrypt(
|
||||
{
|
||||
key: pemPublicKey,
|
||||
padding: crypto.constants.RSA_PKCS1_OAEP_PADDING,
|
||||
oaepHash: 'sha256',
|
||||
},
|
||||
aesKey,
|
||||
).toString('base64');
|
||||
|
||||
return {
|
||||
ciphertext,
|
||||
iv: iv.toString('base64'),
|
||||
authTag,
|
||||
encryptedKey,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* AskController
|
||||
* - Initializes the client.
|
||||
* - Obtains the response from the language model.
|
||||
* - Retrieves the full user record (to get encryption parameters).
|
||||
* - If the user has encryption enabled (i.e. encryptionPublicKey is provided),
|
||||
* encrypts both the request (userMessage) and the response before saving.
|
||||
*/
|
||||
const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
let {
|
||||
text,
|
||||
@@ -32,7 +82,22 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
modelDisplayLabel,
|
||||
});
|
||||
const newConvo = !conversationId;
|
||||
const user = req.user.id;
|
||||
const userId = req.user.id; // User ID from authentication
|
||||
|
||||
// Retrieve full user record from DB (including encryption parameters)
|
||||
const dbUser = await getUserById(userId, 'encryptionPublicKey encryptedPrivateKey encryptionSalt encryptionIV');
|
||||
|
||||
// Build clientOptions including the encryptionPublicKey (if available)
|
||||
const clientOptions = {
|
||||
encryptionPublicKey: dbUser?.encryptionPublicKey,
|
||||
};
|
||||
|
||||
// Rebuild PEM format if encryptionPublicKey is available
|
||||
let pemPublicKey = null;
|
||||
if (clientOptions.encryptionPublicKey && clientOptions.encryptionPublicKey.trim() !== '') {
|
||||
const pubKeyBase64 = clientOptions.encryptionPublicKey;
|
||||
pemPublicKey = `-----BEGIN PUBLIC KEY-----\n${pubKeyBase64.match(/.{1,64}/g).join('\n')}\n-----END PUBLIC KEY-----`;
|
||||
}
|
||||
|
||||
const getReqData = (data = {}) => {
|
||||
for (let key in data) {
|
||||
@@ -52,11 +117,10 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
};
|
||||
|
||||
let getText;
|
||||
|
||||
try {
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
// Pass clientOptions (which includes encryptionPublicKey) along with other parameters to initializeClient
|
||||
const { client } = await initializeClient({ req, res, endpointOption, ...clientOptions });
|
||||
const { onProgress: progressCallback, getPartialText } = createOnProgress();
|
||||
|
||||
getText = client.getStreamText != null ? client.getStreamText.bind(client) : getPartialText;
|
||||
|
||||
const getAbortData = () => ({
|
||||
@@ -74,20 +138,14 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
|
||||
res.on('close', () => {
|
||||
logger.debug('[AskController] Request closed');
|
||||
if (!abortController) {
|
||||
return;
|
||||
} else if (abortController.signal.aborted) {
|
||||
return;
|
||||
} else if (abortController.requestCompleted) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!abortController) { return; }
|
||||
if (abortController.signal.aborted || abortController.requestCompleted) { return; }
|
||||
abortController.abort();
|
||||
logger.debug('[AskController] Request aborted on close');
|
||||
});
|
||||
|
||||
const messageOptions = {
|
||||
user,
|
||||
user: userId,
|
||||
parentMessageId,
|
||||
conversationId,
|
||||
overrideParentMessageId,
|
||||
@@ -95,16 +153,14 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
onStart,
|
||||
abortController,
|
||||
progressCallback,
|
||||
progressOptions: {
|
||||
res,
|
||||
// parentMessageId: overrideParentMessageId || userMessageId,
|
||||
},
|
||||
progressOptions: { res },
|
||||
};
|
||||
|
||||
/** @type {TMessage} */
|
||||
// Get the response from the language model client.
|
||||
let response = await client.sendMessage(text, messageOptions);
|
||||
response.endpoint = endpointOption.endpoint;
|
||||
|
||||
// Ensure the conversation has a title.
|
||||
const { conversation = {} } = await client.responsePromise;
|
||||
conversation.title =
|
||||
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
|
||||
@@ -115,6 +171,35 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
delete userMessage.image_urls;
|
||||
}
|
||||
|
||||
// --- Encrypt the user message if encryption is enabled ---
|
||||
if (pemPublicKey && userMessage && userMessage.text) {
|
||||
try {
|
||||
const { ciphertext, iv, authTag, encryptedKey } = encryptText(userMessage.text, pemPublicKey);
|
||||
userMessage.text = ciphertext;
|
||||
userMessage.iv = iv;
|
||||
userMessage.authTag = authTag;
|
||||
userMessage.encryptedKey = encryptedKey;
|
||||
logger.debug('[AskController] User message encrypted.');
|
||||
} catch (encError) {
|
||||
logger.error('[AskController] Error encrypting user message:', encError);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Encrypt the AI response if encryption is enabled ---
|
||||
if (pemPublicKey && response.text) {
|
||||
try {
|
||||
const { ciphertext, iv, authTag, encryptedKey } = encryptText(response.text, pemPublicKey);
|
||||
response.text = ciphertext;
|
||||
response.iv = iv;
|
||||
response.authTag = authTag;
|
||||
response.encryptedKey = encryptedKey;
|
||||
logger.debug('[AskController] Response message encrypted.');
|
||||
} catch (encError) {
|
||||
logger.error('[AskController] Error encrypting response message:', encError);
|
||||
}
|
||||
}
|
||||
// --- End Encryption Branch ---
|
||||
|
||||
if (!abortController.signal.aborted) {
|
||||
sendMessage(res, {
|
||||
final: true,
|
||||
@@ -128,15 +213,15 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
if (!client.savedMessageIds.has(response.messageId)) {
|
||||
await saveMessage(
|
||||
req,
|
||||
{ ...response, user },
|
||||
{ context: 'api/server/controllers/AskController.js - response end' },
|
||||
{ ...response, user: userId },
|
||||
{ context: 'AskController - response end' },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!client.skipSaveUserMessage) {
|
||||
await saveMessage(req, userMessage, {
|
||||
context: 'api/server/controllers/AskController.js - don\'t skip saving user message',
|
||||
context: 'AskController - save user message',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -156,9 +241,9 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
messageId: responseMessageId,
|
||||
parentMessageId: overrideParentMessageId ?? userMessageId ?? parentMessageId,
|
||||
}).catch((err) => {
|
||||
logger.error('[AskController] Error in `handleAbortError`', err);
|
||||
logger.error('[AskController] Error in handleAbortError', err);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = AskController;
|
||||
module.exports = AskController;
|
||||
@@ -1,7 +1,6 @@
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { loadDefaultModels, loadConfigModels } = require('~/server/services/Config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req
|
||||
@@ -37,13 +36,8 @@ async function loadModels(req) {
|
||||
}
|
||||
|
||||
async function modelController(req, res) {
|
||||
try {
|
||||
const modelConfig = await loadModels(req);
|
||||
res.send(modelConfig);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching models:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
const modelConfig = await loadModels(req);
|
||||
res.send(modelConfig);
|
||||
}
|
||||
|
||||
module.exports = { modelController, loadModels, getModelsConfig };
|
||||
|
||||
@@ -1,81 +1,66 @@
|
||||
const {
|
||||
generateTOTPSecret,
|
||||
generateBackupCodes,
|
||||
verifyTOTP,
|
||||
verifyBackupCode,
|
||||
generateTOTPSecret,
|
||||
generateBackupCodes,
|
||||
getTOTPSecret,
|
||||
} = require('~/server/services/twoFactorService');
|
||||
const { updateUser, getUserById } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
const { encryptV3 } = require('~/server/utils/crypto');
|
||||
const { encryptV2 } = require('~/server/utils/crypto');
|
||||
|
||||
const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, '');
|
||||
const enable2FAController = async (req, res) => {
|
||||
const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, '');
|
||||
|
||||
/**
|
||||
* Enable 2FA for the user by generating a new TOTP secret and backup codes.
|
||||
* The secret is encrypted and stored, and 2FA is marked as disabled until confirmed.
|
||||
*/
|
||||
const enable2FA = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const secret = generateTOTPSecret();
|
||||
const { plainCodes, codeObjects } = await generateBackupCodes();
|
||||
|
||||
// Encrypt the secret with v3 encryption before saving.
|
||||
const encryptedSecret = encryptV3(secret);
|
||||
|
||||
// Update the user record: store the secret & backup codes and set twoFactorEnabled to false.
|
||||
const user = await updateUser(userId, {
|
||||
totpSecret: encryptedSecret,
|
||||
backupCodes: codeObjects,
|
||||
twoFactorEnabled: false,
|
||||
});
|
||||
const encryptedSecret = await encryptV2(secret);
|
||||
const user = await updateUser(userId, { totpSecret: encryptedSecret, backupCodes: codeObjects });
|
||||
|
||||
const otpauthUrl = `otpauth://totp/${safeAppTitle}:${user.email}?secret=${secret}&issuer=${safeAppTitle}`;
|
||||
|
||||
return res.status(200).json({ otpauthUrl, backupCodes: plainCodes });
|
||||
res.status(200).json({
|
||||
otpauthUrl,
|
||||
backupCodes: plainCodes,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error('[enable2FA]', err);
|
||||
return res.status(500).json({ message: err.message });
|
||||
logger.error('[enable2FAController]', err);
|
||||
res.status(500).json({ message: err.message });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Verify a 2FA code (either TOTP or backup code) during setup.
|
||||
*/
|
||||
const verify2FA = async (req, res) => {
|
||||
const verify2FAController = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const { token, backupCode } = req.body;
|
||||
const user = await getUserById(userId);
|
||||
|
||||
if (!user || !user.totpSecret) {
|
||||
return res.status(400).json({ message: '2FA not initiated' });
|
||||
}
|
||||
|
||||
// Retrieve the plain TOTP secret using getTOTPSecret.
|
||||
const secret = await getTOTPSecret(user.totpSecret);
|
||||
let isVerified = false;
|
||||
|
||||
if (token) {
|
||||
isVerified = await verifyTOTP(secret, token);
|
||||
} else if (backupCode) {
|
||||
isVerified = await verifyBackupCode({ user, backupCode });
|
||||
}
|
||||
|
||||
if (isVerified) {
|
||||
if (token && (await verifyTOTP(secret, token))) {
|
||||
return res.status(200).json();
|
||||
} else if (backupCode) {
|
||||
const verified = await verifyBackupCode({ user, backupCode });
|
||||
if (verified) {
|
||||
return res.status(200).json();
|
||||
}
|
||||
}
|
||||
return res.status(400).json({ message: 'Invalid token or backup code.' });
|
||||
|
||||
return res.status(400).json({ message: 'Invalid token.' });
|
||||
} catch (err) {
|
||||
logger.error('[verify2FA]', err);
|
||||
return res.status(500).json({ message: err.message });
|
||||
logger.error('[verify2FAController]', err);
|
||||
res.status(500).json({ message: err.message });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Confirm and enable 2FA after a successful verification.
|
||||
*/
|
||||
const confirm2FA = async (req, res) => {
|
||||
const confirm2FAController = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const { token } = req.body;
|
||||
@@ -85,54 +70,50 @@ const confirm2FA = async (req, res) => {
|
||||
return res.status(400).json({ message: '2FA not initiated' });
|
||||
}
|
||||
|
||||
// Retrieve the plain TOTP secret using getTOTPSecret.
|
||||
const secret = await getTOTPSecret(user.totpSecret);
|
||||
|
||||
if (await verifyTOTP(secret, token)) {
|
||||
await updateUser(userId, { twoFactorEnabled: true });
|
||||
return res.status(200).json();
|
||||
}
|
||||
|
||||
return res.status(400).json({ message: 'Invalid token.' });
|
||||
} catch (err) {
|
||||
logger.error('[confirm2FA]', err);
|
||||
return res.status(500).json({ message: err.message });
|
||||
logger.error('[confirm2FAController]', err);
|
||||
res.status(500).json({ message: err.message });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Disable 2FA by clearing the stored secret and backup codes.
|
||||
*/
|
||||
const disable2FA = async (req, res) => {
|
||||
const disable2FAController = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
await updateUser(userId, { totpSecret: null, backupCodes: [], twoFactorEnabled: false });
|
||||
return res.status(200).json();
|
||||
await updateUser(userId, { totpSecret: null, backupCodes: [] });
|
||||
res.status(200).json();
|
||||
} catch (err) {
|
||||
logger.error('[disable2FA]', err);
|
||||
return res.status(500).json({ message: err.message });
|
||||
logger.error('[disable2FAController]', err);
|
||||
res.status(500).json({ message: err.message });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Regenerate backup codes for the user.
|
||||
*/
|
||||
const regenerateBackupCodes = async (req, res) => {
|
||||
const regenerateBackupCodesController = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const { plainCodes, codeObjects } = await generateBackupCodes();
|
||||
await updateUser(userId, { backupCodes: codeObjects });
|
||||
return res.status(200).json({
|
||||
res.status(200).json({
|
||||
backupCodes: plainCodes,
|
||||
backupCodesHash: codeObjects,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error('[regenerateBackupCodes]', err);
|
||||
return res.status(500).json({ message: err.message });
|
||||
logger.error('[regenerateBackupCodesController]', err);
|
||||
res.status(500).json({ message: err.message });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
enable2FA,
|
||||
verify2FA,
|
||||
confirm2FA,
|
||||
disable2FA,
|
||||
regenerateBackupCodes,
|
||||
enable2FAController,
|
||||
verify2FAController,
|
||||
confirm2FAController,
|
||||
disable2FAController,
|
||||
regenerateBackupCodesController,
|
||||
};
|
||||
|
||||
@@ -1,20 +1,18 @@
|
||||
const { FileSources } = require('librechat-data-provider');
|
||||
const {
|
||||
Balance,
|
||||
getFiles,
|
||||
updateUser,
|
||||
deleteFiles,
|
||||
deleteConvos,
|
||||
deletePresets,
|
||||
deleteMessages,
|
||||
deleteUserById,
|
||||
deleteAllUserSessions,
|
||||
updateUser,
|
||||
} = require('~/models');
|
||||
const User = require('~/models/User');
|
||||
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
|
||||
const { updateUserPluginsService, deleteUserKey } = require('~/server/services/UserService');
|
||||
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
|
||||
const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud');
|
||||
const { processDeleteRequest } = require('~/server/services/Files/process');
|
||||
const { deleteAllSharedLinks } = require('~/models/Share');
|
||||
const { deleteToolCalls } = require('~/models/ToolCall');
|
||||
@@ -22,23 +20,8 @@ const { Transaction } = require('~/models/Transaction');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const getUserController = async (req, res) => {
|
||||
/** @type {MongoUser} */
|
||||
const userData = req.user.toObject != null ? req.user.toObject() : { ...req.user };
|
||||
delete userData.totpSecret;
|
||||
if (req.app.locals.fileStrategy === FileSources.s3 && userData.avatar) {
|
||||
const avatarNeedsRefresh = needsRefresh(userData.avatar, 3600);
|
||||
if (!avatarNeedsRefresh) {
|
||||
return res.status(200).send(userData);
|
||||
}
|
||||
const originalAvatar = userData.avatar;
|
||||
try {
|
||||
userData.avatar = await getNewS3URL(userData.avatar);
|
||||
await updateUser(userData.id, { avatar: userData.avatar });
|
||||
} catch (error) {
|
||||
userData.avatar = originalAvatar;
|
||||
logger.error('Error getting new S3 URL for avatar:', error);
|
||||
}
|
||||
}
|
||||
res.status(200).send(userData);
|
||||
};
|
||||
|
||||
@@ -182,6 +165,37 @@ const resendVerificationController = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
const updateUserEncryptionController = async (req, res) => {
|
||||
try {
|
||||
const { encryptionPublicKey, encryptedPrivateKey, encryptionSalt, encryptionIV } = req.body;
|
||||
|
||||
// Allow disabling encryption by passing null for all fields.
|
||||
const allNull = encryptionPublicKey === null && encryptedPrivateKey === null && encryptionSalt === null && encryptionIV === null;
|
||||
const allPresent = encryptionPublicKey && encryptedPrivateKey && encryptionSalt && encryptionIV;
|
||||
|
||||
if (!allNull && !allPresent) {
|
||||
return res.status(400).json({ message: 'Missing encryption parameters.' });
|
||||
}
|
||||
|
||||
// Update the user record with the provided encryption parameters (or null to disable)
|
||||
const updatedUser = await updateUser(req.user.id, {
|
||||
encryptionPublicKey: encryptionPublicKey || null,
|
||||
encryptedPrivateKey: encryptedPrivateKey || null,
|
||||
encryptionSalt: encryptionSalt || null,
|
||||
encryptionIV: encryptionIV || null,
|
||||
});
|
||||
|
||||
if (!updatedUser) {
|
||||
return res.status(404).json({ message: 'User not found.' });
|
||||
}
|
||||
|
||||
res.status(200).json({ success: true });
|
||||
} catch (error) {
|
||||
logger.error('[updateUserEncryptionController]', error);
|
||||
res.status(500).json({ message: 'Something went wrong updating encryption keys.' });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getUserController,
|
||||
getTermsStatusController,
|
||||
@@ -190,4 +204,5 @@ module.exports = {
|
||||
verifyEmailController,
|
||||
updateUserPluginsController,
|
||||
resendVerificationController,
|
||||
updateUserEncryptionController,
|
||||
};
|
||||
|
||||
@@ -10,8 +10,8 @@ const {
|
||||
ChatModelStreamHandler,
|
||||
} = require('@librechat/agents');
|
||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { saveBase64Image } = require('~/server/services/Files/process');
|
||||
const { loadAuthValues } = require('~/app/clients/tools/util');
|
||||
const { logger, sendEvent } = require('~/config');
|
||||
|
||||
/** @typedef {import('@librechat/agents').Graph} Graph */
|
||||
|
||||
@@ -7,16 +7,7 @@
|
||||
// validateVisionModel,
|
||||
// mapModelToAzureConfig,
|
||||
// } = require('librechat-data-provider');
|
||||
require('events').EventEmitter.defaultMaxListeners = 100;
|
||||
const {
|
||||
Callback,
|
||||
GraphEvents,
|
||||
formatMessage,
|
||||
formatAgentMessages,
|
||||
formatContentStrings,
|
||||
getTokenCountForMessage,
|
||||
createMetadataAggregator,
|
||||
} = require('@librechat/agents');
|
||||
const { Callback, createMetadataAggregator } = require('@librechat/agents');
|
||||
const {
|
||||
Constants,
|
||||
VisionModes,
|
||||
@@ -26,19 +17,24 @@ const {
|
||||
KnownEndpoints,
|
||||
anthropicSchema,
|
||||
isAgentsEndpoint,
|
||||
AgentCapabilities,
|
||||
bedrockInputSchema,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const { getCustomEndpointConfig, checkCapability } = require('~/server/services/Config');
|
||||
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
|
||||
const {
|
||||
formatMessage,
|
||||
addCacheControl,
|
||||
formatAgentMessages,
|
||||
formatContentStrings,
|
||||
createContextHandlers,
|
||||
} = require('~/app/clients/prompts');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { getCustomEndpointConfig } = require('~/server/services/Config');
|
||||
const Tokenizer = require('~/server/services/Tokenizer');
|
||||
const BaseClient = require('~/app/clients/BaseClient');
|
||||
const { logger, sendEvent } = require('~/config');
|
||||
const { createRun } = require('./run');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/** @typedef {import('@librechat/agents').MessageContentComplex} MessageContentComplex */
|
||||
/** @typedef {import('@langchain/core/runnables').RunnableConfig} RunnableConfig */
|
||||
@@ -103,8 +99,6 @@ class AgentClient extends BaseClient {
|
||||
this.outputTokensKey = 'output_tokens';
|
||||
/** @type {UsageMetadata} */
|
||||
this.usage;
|
||||
/** @type {Record<string, number>} */
|
||||
this.indexTokenCountMap = {};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -229,23 +223,14 @@ class AgentClient extends BaseClient {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TMessage} message
|
||||
* @param {Array<MongoFile>} attachments
|
||||
* @returns {Promise<Array<Partial<MongoFile>>>}
|
||||
*/
|
||||
async addImageURLs(message, attachments) {
|
||||
const { files, text, image_urls } = await encodeAndFormat(
|
||||
const { files, image_urls } = await encodeAndFormat(
|
||||
this.options.req,
|
||||
attachments,
|
||||
this.options.agent.provider,
|
||||
VisionModes.agents,
|
||||
);
|
||||
message.image_urls = image_urls.length ? image_urls : undefined;
|
||||
if (text && text.length) {
|
||||
message.ocr = text;
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
@@ -323,21 +308,7 @@ class AgentClient extends BaseClient {
|
||||
assistantName: this.options?.modelLabel,
|
||||
});
|
||||
|
||||
if (message.ocr && i !== orderedMessages.length - 1) {
|
||||
if (typeof formattedMessage.content === 'string') {
|
||||
formattedMessage.content = message.ocr + '\n' + formattedMessage.content;
|
||||
} else {
|
||||
const textPart = formattedMessage.content.find((part) => part.type === 'text');
|
||||
textPart
|
||||
? (textPart.text = message.ocr + '\n' + textPart.text)
|
||||
: formattedMessage.content.unshift({ type: 'text', text: message.ocr });
|
||||
}
|
||||
} else if (message.ocr && i === orderedMessages.length - 1) {
|
||||
systemContent = [systemContent, message.ocr].join('\n');
|
||||
}
|
||||
|
||||
const needsTokenCount =
|
||||
(this.contextStrategy && !orderedMessages[i].tokenCount) || message.ocr;
|
||||
const needsTokenCount = this.contextStrategy && !orderedMessages[i].tokenCount;
|
||||
|
||||
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
|
||||
if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) {
|
||||
@@ -383,10 +354,6 @@ class AgentClient extends BaseClient {
|
||||
}));
|
||||
}
|
||||
|
||||
for (let i = 0; i < messages.length; i++) {
|
||||
this.indexTokenCountMap[i] = messages[i].tokenCount;
|
||||
}
|
||||
|
||||
const result = {
|
||||
tokenCountMap,
|
||||
prompt: payload,
|
||||
@@ -471,7 +438,6 @@ class AgentClient extends BaseClient {
|
||||
err,
|
||||
);
|
||||
});
|
||||
continue;
|
||||
}
|
||||
spendTokens(txMetadata, {
|
||||
promptTokens: usage.input_tokens,
|
||||
@@ -633,9 +599,6 @@ class AgentClient extends BaseClient {
|
||||
// });
|
||||
// }
|
||||
|
||||
/** @type {TCustomConfig['endpoints']['agents']} */
|
||||
const agentsEConfig = this.options.req.app.locals[EModelEndpoint.agents];
|
||||
|
||||
/** @type {Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string; streamMode: string }} */
|
||||
const config = {
|
||||
configurable: {
|
||||
@@ -643,30 +606,19 @@ class AgentClient extends BaseClient {
|
||||
last_agent_index: this.agentConfigs?.size ?? 0,
|
||||
hide_sequential_outputs: this.options.agent.hide_sequential_outputs,
|
||||
},
|
||||
recursionLimit: agentsEConfig?.recursionLimit,
|
||||
recursionLimit: this.options.req.app.locals[EModelEndpoint.agents]?.recursionLimit,
|
||||
signal: abortController.signal,
|
||||
streamMode: 'values',
|
||||
version: 'v2',
|
||||
};
|
||||
|
||||
const toolSet = new Set((this.options.agent.tools ?? []).map((tool) => tool && tool.name));
|
||||
let { messages: initialMessages, indexTokenCountMap } = formatAgentMessages(
|
||||
payload,
|
||||
this.indexTokenCountMap,
|
||||
toolSet,
|
||||
);
|
||||
const initialMessages = formatAgentMessages(payload);
|
||||
if (legacyContentEndpoints.has(this.options.agent.endpoint)) {
|
||||
initialMessages = formatContentStrings(initialMessages);
|
||||
formatContentStrings(initialMessages);
|
||||
}
|
||||
|
||||
/** @type {ReturnType<createRun>} */
|
||||
let run;
|
||||
const countTokens = ((text) => this.getTokenCount(text)).bind(this);
|
||||
|
||||
/** @type {(message: BaseMessage) => number} */
|
||||
const tokenCounter = (message) => {
|
||||
return getTokenCountForMessage(message, countTokens);
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -674,23 +626,12 @@ class AgentClient extends BaseClient {
|
||||
* @param {BaseMessage[]} messages
|
||||
* @param {number} [i]
|
||||
* @param {TMessageContentParts[]} [contentData]
|
||||
* @param {Record<string, number>} [currentIndexCountMap]
|
||||
*/
|
||||
const runAgent = async (agent, _messages, i = 0, contentData = [], _currentIndexCountMap) => {
|
||||
const runAgent = async (agent, _messages, i = 0, contentData = []) => {
|
||||
config.configurable.model = agent.model_parameters.model;
|
||||
const currentIndexCountMap = _currentIndexCountMap ?? indexTokenCountMap;
|
||||
if (i > 0) {
|
||||
this.model = agent.model_parameters.model;
|
||||
}
|
||||
if (agent.recursion_limit && typeof agent.recursion_limit === 'number') {
|
||||
config.recursionLimit = agent.recursion_limit;
|
||||
}
|
||||
if (
|
||||
agentsEConfig?.maxRecursionLimit &&
|
||||
config.recursionLimit > agentsEConfig?.maxRecursionLimit
|
||||
) {
|
||||
config.recursionLimit = agentsEConfig?.maxRecursionLimit;
|
||||
}
|
||||
config.configurable.agent_id = agent.id;
|
||||
config.configurable.name = agent.name;
|
||||
config.configurable.agent_index = i;
|
||||
@@ -753,29 +694,11 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
|
||||
if (contentData.length) {
|
||||
const agentUpdate = {
|
||||
type: ContentTypes.AGENT_UPDATE,
|
||||
[ContentTypes.AGENT_UPDATE]: {
|
||||
index: contentData.length,
|
||||
runId: this.responseMessageId,
|
||||
agentId: agent.id,
|
||||
},
|
||||
};
|
||||
const streamData = {
|
||||
event: GraphEvents.ON_AGENT_UPDATE,
|
||||
data: agentUpdate,
|
||||
};
|
||||
this.options.aggregateContent(streamData);
|
||||
sendEvent(this.options.res, streamData);
|
||||
contentData.push(agentUpdate);
|
||||
run.Graph.contentData = contentData;
|
||||
}
|
||||
|
||||
await run.processStream({ messages }, config, {
|
||||
keepContent: i !== 0,
|
||||
tokenCounter,
|
||||
indexTokenCountMap: currentIndexCountMap,
|
||||
maxContextTokens: agent.maxContextTokens,
|
||||
callbacks: {
|
||||
[Callback.TOOL_ERROR]: (graph, error, toolId) => {
|
||||
logger.error(
|
||||
@@ -789,13 +712,9 @@ class AgentClient extends BaseClient {
|
||||
};
|
||||
|
||||
await runAgent(this.options.agent, initialMessages);
|
||||
|
||||
let finalContentStart = 0;
|
||||
if (
|
||||
this.agentConfigs &&
|
||||
this.agentConfigs.size > 0 &&
|
||||
(await checkCapability(this.options.req, AgentCapabilities.chain))
|
||||
) {
|
||||
const windowSize = 5;
|
||||
if (this.agentConfigs && this.agentConfigs.size > 0) {
|
||||
let latestMessage = initialMessages.pop().content;
|
||||
if (typeof latestMessage !== 'string') {
|
||||
latestMessage = latestMessage[0].text;
|
||||
@@ -803,16 +722,7 @@ class AgentClient extends BaseClient {
|
||||
let i = 1;
|
||||
let runMessages = [];
|
||||
|
||||
const windowIndexCountMap = {};
|
||||
const windowMessages = initialMessages.slice(-windowSize);
|
||||
let currentIndex = 4;
|
||||
for (let i = initialMessages.length - 1; i >= 0; i--) {
|
||||
windowIndexCountMap[currentIndex] = indexTokenCountMap[i];
|
||||
currentIndex--;
|
||||
if (currentIndex < 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
const lastFiveMessages = initialMessages.slice(-5);
|
||||
for (const [agentId, agent] of this.agentConfigs) {
|
||||
if (abortController.signal.aborted === true) {
|
||||
break;
|
||||
@@ -847,9 +757,7 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
try {
|
||||
const contextMessages = [];
|
||||
const runIndexCountMap = {};
|
||||
for (let i = 0; i < windowMessages.length; i++) {
|
||||
const message = windowMessages[i];
|
||||
for (const message of lastFiveMessages) {
|
||||
const messageType = message._getType();
|
||||
if (
|
||||
(!agent.tools || agent.tools.length === 0) &&
|
||||
@@ -857,13 +765,11 @@ class AgentClient extends BaseClient {
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
runIndexCountMap[contextMessages.length] = windowIndexCountMap[i];
|
||||
|
||||
contextMessages.push(message);
|
||||
}
|
||||
const bufferMessage = new HumanMessage(bufferString);
|
||||
runIndexCountMap[contextMessages.length] = tokenCounter(bufferMessage);
|
||||
const currentMessages = [...contextMessages, bufferMessage];
|
||||
await runAgent(agent, currentMessages, i, contentData, runIndexCountMap);
|
||||
const currentMessages = [...contextMessages, new HumanMessage(bufferString)];
|
||||
await runAgent(agent, currentMessages, i, contentData);
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[api/server/controllers/agents/client.js #chatCompletion] Error running agent ${agentId} (${i})`,
|
||||
@@ -874,7 +780,6 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
}
|
||||
|
||||
/** Note: not implemented */
|
||||
if (config.configurable.hide_sequential_outputs !== true) {
|
||||
finalContentStart = 0;
|
||||
}
|
||||
@@ -907,10 +812,7 @@ class AgentClient extends BaseClient {
|
||||
'[api/server/controllers/agents/client.js #sendCompletion] Unhandled error type',
|
||||
err,
|
||||
);
|
||||
this.contentParts.push({
|
||||
type: ContentTypes.ERROR,
|
||||
[ContentTypes.ERROR]: `An error occurred while processing the request${err?.message ? `: ${err.message}` : ''}`,
|
||||
});
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -932,14 +834,7 @@ class AgentClient extends BaseClient {
|
||||
};
|
||||
let endpointConfig = this.options.req.app.locals[this.options.agent.endpoint];
|
||||
if (!endpointConfig) {
|
||||
try {
|
||||
endpointConfig = await getCustomEndpointConfig(this.options.agent.endpoint);
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #titleConvo] Error getting custom endpoint config',
|
||||
err,
|
||||
);
|
||||
}
|
||||
endpointConfig = await getCustomEndpointConfig(this.options.agent.endpoint);
|
||||
}
|
||||
if (
|
||||
endpointConfig &&
|
||||
|
||||
@@ -43,12 +43,6 @@ async function createRun({
|
||||
agent.model_parameters,
|
||||
);
|
||||
|
||||
/** Resolves Mistral type strictness due to new OpenAI usage field */
|
||||
if (agent.endpoint?.toLowerCase().includes(KnownEndpoints.mistral)) {
|
||||
llmConfig.streamUsage = false;
|
||||
llmConfig.usage = true;
|
||||
}
|
||||
|
||||
/** @type {'reasoning_content' | 'reasoning'} */
|
||||
let reasoningKey;
|
||||
if (
|
||||
@@ -57,6 +51,10 @@ async function createRun({
|
||||
) {
|
||||
reasoningKey = 'reasoning';
|
||||
}
|
||||
if (/o1(?!-(?:mini|preview)).*$/.test(llmConfig.model)) {
|
||||
llmConfig.streaming = false;
|
||||
llmConfig.disableStreaming = true;
|
||||
}
|
||||
|
||||
/** @type {StandardGraphConfig} */
|
||||
const graphConfig = {
|
||||
@@ -70,7 +68,7 @@ async function createRun({
|
||||
};
|
||||
|
||||
// TEMPORARY FOR TESTING
|
||||
if (agent.provider === Providers.ANTHROPIC || agent.provider === Providers.BEDROCK) {
|
||||
if (agent.provider === Providers.ANTHROPIC) {
|
||||
graphConfig.streamBuffer = 2000;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
const fs = require('fs').promises;
|
||||
const { nanoid } = require('nanoid');
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
FileContext,
|
||||
FileSources,
|
||||
Constants,
|
||||
Tools,
|
||||
SystemRoles,
|
||||
EToolResources,
|
||||
actionDelimiter,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
@@ -18,10 +16,9 @@ const {
|
||||
} = require('~/models/Agent');
|
||||
const { uploadImageBuffer, filterFile } = require('~/server/services/Files/process');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { refreshS3Url } = require('~/server/services/Files/S3/crud');
|
||||
const { updateAction, getActions } = require('~/models/Action');
|
||||
const { updateAgentProjects } = require('~/models/Agent');
|
||||
const { getProjectByName } = require('~/models/Project');
|
||||
const { updateAgentProjects } = require('~/models/Agent');
|
||||
const { deleteFileByFilter } = require('~/models/File');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -104,14 +101,6 @@ const getAgentHandler = async (req, res) => {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
if (agent.avatar && agent.avatar?.source === FileSources.s3) {
|
||||
const originalUrl = agent.avatar.filepath;
|
||||
agent.avatar.filepath = await refreshS3Url(agent.avatar);
|
||||
if (originalUrl !== agent.avatar.filepath) {
|
||||
await updateAgent({ id }, { avatar: agent.avatar });
|
||||
}
|
||||
}
|
||||
|
||||
agent.author = agent.author.toString();
|
||||
agent.isCollaborative = !!agent.isCollaborative;
|
||||
|
||||
@@ -214,25 +203,13 @@ const duplicateAgentHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
const {
|
||||
id: _id,
|
||||
_id: __id,
|
||||
id: _id,
|
||||
author: _author,
|
||||
createdAt: _createdAt,
|
||||
updatedAt: _updatedAt,
|
||||
tool_resources: _tool_resources = {},
|
||||
...cloneData
|
||||
} = agent;
|
||||
cloneData.name = `${agent.name} (${new Date().toLocaleString('en-US', {
|
||||
dateStyle: 'short',
|
||||
timeStyle: 'short',
|
||||
hour12: false,
|
||||
})})`;
|
||||
|
||||
if (_tool_resources?.[EToolResources.ocr]) {
|
||||
cloneData.tool_resources = {
|
||||
[EToolResources.ocr]: _tool_resources[EToolResources.ocr],
|
||||
};
|
||||
}
|
||||
|
||||
const newAgentId = `agent_${nanoid()}`;
|
||||
const newAgentData = Object.assign(cloneData, {
|
||||
|
||||
@@ -19,7 +19,7 @@ const {
|
||||
addThreadMetadata,
|
||||
saveAssistantMessage,
|
||||
} = require('~/server/services/Threads');
|
||||
const { sendResponse, sendMessage, sleep, countTokens } = require('~/server/utils');
|
||||
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
|
||||
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
|
||||
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||
const { formatMessage, createVisionPrompt } = require('~/app/clients/prompts');
|
||||
@@ -27,7 +27,7 @@ const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||
const { createRunBody } = require('~/server/services/createRunBody');
|
||||
const { getTransactions } = require('~/models/Transaction');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const checkBalance = require('~/models/checkBalance');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
@@ -248,8 +248,7 @@ const chatV1 = async (req, res) => {
|
||||
}
|
||||
|
||||
const checkBalanceBeforeRun = async () => {
|
||||
const balance = req.app?.locals?.balance;
|
||||
if (!balance?.enabled) {
|
||||
if (!isEnabled(process.env.CHECK_BALANCE)) {
|
||||
return;
|
||||
}
|
||||
const transactions =
|
||||
|
||||
@@ -18,14 +18,14 @@ const {
|
||||
saveAssistantMessage,
|
||||
} = require('~/server/services/Threads');
|
||||
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
|
||||
const { sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
|
||||
const { createErrorHandler } = require('~/server/controllers/assistants/errors');
|
||||
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||
const { sendMessage, sleep, countTokens } = require('~/server/utils');
|
||||
const { createRunBody } = require('~/server/services/createRunBody');
|
||||
const { getTransactions } = require('~/models/Transaction');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const checkBalance = require('~/models/checkBalance');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
@@ -124,8 +124,7 @@ const chatV2 = async (req, res) => {
|
||||
}
|
||||
|
||||
const checkBalanceBeforeRun = async () => {
|
||||
const balance = req.app?.locals?.balance;
|
||||
if (!balance?.enabled) {
|
||||
if (!isEnabled(process.env.CHECK_BALANCE)) {
|
||||
return;
|
||||
}
|
||||
const transactions =
|
||||
|
||||
@@ -8,7 +8,7 @@ const loginController = async (req, res) => {
|
||||
return res.status(400).json({ message: 'Invalid credentials' });
|
||||
}
|
||||
|
||||
if (req.user.twoFactorEnabled) {
|
||||
if (req.user.backupCodes != null && req.user.backupCodes.length > 0) {
|
||||
const tempToken = generate2FATempToken(req.user._id);
|
||||
return res.status(200).json({ twoFAPending: true, tempToken });
|
||||
}
|
||||
|
||||
@@ -1,17 +1,10 @@
|
||||
const jwt = require('jsonwebtoken');
|
||||
const {
|
||||
verifyTOTP,
|
||||
verifyBackupCode,
|
||||
getTOTPSecret,
|
||||
} = require('~/server/services/twoFactorService');
|
||||
const { verifyTOTP, verifyBackupCode, getTOTPSecret } = require('~/server/services/twoFactorService');
|
||||
const { setAuthTokens } = require('~/server/services/AuthService');
|
||||
const { getUserById } = require('~/models/userMethods');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Verifies the 2FA code during login using a temporary token.
|
||||
*/
|
||||
const verify2FAWithTempToken = async (req, res) => {
|
||||
const verify2FA = async (req, res) => {
|
||||
try {
|
||||
const { tempToken, token, backupCode } = req.body;
|
||||
if (!tempToken) {
|
||||
@@ -26,24 +19,29 @@ const verify2FAWithTempToken = async (req, res) => {
|
||||
}
|
||||
|
||||
const user = await getUserById(payload.userId);
|
||||
if (!user || !user.twoFactorEnabled) {
|
||||
// Ensure that the user exists and has backup codes (i.e. 2FA enabled)
|
||||
if (!user || !(user.backupCodes && user.backupCodes.length > 0)) {
|
||||
return res.status(400).json({ message: '2FA is not enabled for this user' });
|
||||
}
|
||||
|
||||
// Use the new getTOTPSecret function to retrieve (and decrypt if necessary) the TOTP secret.
|
||||
const secret = await getTOTPSecret(user.totpSecret);
|
||||
let isVerified = false;
|
||||
if (token) {
|
||||
isVerified = await verifyTOTP(secret, token);
|
||||
|
||||
let verified = false;
|
||||
if (token && (await verifyTOTP(secret, token))) {
|
||||
verified = true;
|
||||
} else if (backupCode) {
|
||||
isVerified = await verifyBackupCode({ user, backupCode });
|
||||
verified = await verifyBackupCode({ user, backupCode });
|
||||
}
|
||||
|
||||
if (!isVerified) {
|
||||
if (!verified) {
|
||||
return res.status(401).json({ message: 'Invalid 2FA code or backup code' });
|
||||
}
|
||||
|
||||
// Prepare user data to return (omit sensitive fields).
|
||||
// Prepare user data for response.
|
||||
// If the user is a plain object (from lean queries), we create a shallow copy.
|
||||
const userData = user.toObject ? user.toObject() : { ...user };
|
||||
// Remove sensitive fields.
|
||||
delete userData.password;
|
||||
delete userData.__v;
|
||||
delete userData.totpSecret;
|
||||
@@ -52,9 +50,9 @@ const verify2FAWithTempToken = async (req, res) => {
|
||||
const authToken = await setAuthTokens(user._id, res);
|
||||
return res.status(200).json({ token: authToken, user: userData });
|
||||
} catch (err) {
|
||||
logger.error('[verify2FAWithTempToken]', err);
|
||||
logger.error('[verify2FA]', err);
|
||||
return res.status(500).json({ message: 'Something went wrong' });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = { verify2FAWithTempToken };
|
||||
module.exports = { verify2FA };
|
||||
|
||||
@@ -10,8 +10,7 @@ const {
|
||||
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
|
||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||
const { createToolCall, getToolCallsByConvo } = require('~/models/ToolCall');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { loadTools } = require('~/app/clients/tools/util');
|
||||
const { loadAuthValues, loadTools } = require('~/app/clients/tools/util');
|
||||
const { checkAccess } = require('~/server/middleware');
|
||||
const { getMessage } = require('~/models/Message');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -4,7 +4,6 @@ require('module-alias')({ base: path.resolve(__dirname, '..') });
|
||||
const cors = require('cors');
|
||||
const axios = require('axios');
|
||||
const express = require('express');
|
||||
const { createServer } = require('http');
|
||||
const compression = require('compression');
|
||||
const passport = require('passport');
|
||||
const mongoSanitize = require('express-mongo-sanitize');
|
||||
@@ -15,8 +14,6 @@ const { connectDb, indexSync } = require('~/lib/db');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { ldapLogin } = require('~/strategies');
|
||||
const { logger } = require('~/config');
|
||||
const { AudioSocketModule } = require('./services/Files/Audio/AudioSocketModule');
|
||||
const { SocketIOService } = require('./services/WebSocket/WebSocketServer');
|
||||
const validateImageRequest = require('./middleware/validateImageRequest');
|
||||
const errorController = require('./controllers/ErrorController');
|
||||
const configureSocialLogins = require('./socialLogins');
|
||||
@@ -31,9 +28,6 @@ const port = Number(PORT) || 3080;
|
||||
const host = HOST || 'localhost';
|
||||
const trusted_proxy = Number(TRUST_PROXY) || 1; /* trust first proxy by default */
|
||||
|
||||
let socketIOService;
|
||||
let audioModule;
|
||||
|
||||
const startServer = async () => {
|
||||
if (typeof Bun !== 'undefined') {
|
||||
axios.defaults.headers.common['Accept-Encoding'] = 'gzip';
|
||||
@@ -43,21 +37,7 @@ const startServer = async () => {
|
||||
await indexSync();
|
||||
|
||||
const app = express();
|
||||
const server = createServer(app);
|
||||
|
||||
app.disable('x-powered-by');
|
||||
app.use(
|
||||
cors({
|
||||
origin: true,
|
||||
credentials: true,
|
||||
}),
|
||||
);
|
||||
|
||||
socketIOService = new SocketIOService(server);
|
||||
audioModule = new AudioSocketModule(socketIOService);
|
||||
|
||||
logger.info('WebSocket server and Audio module initialized');
|
||||
|
||||
await AppService(app);
|
||||
|
||||
const indexPath = path.join(app.locals.paths.dist, 'index.html');
|
||||
@@ -130,7 +110,6 @@ const startServer = async () => {
|
||||
app.use('/api/agents', routes.agents);
|
||||
app.use('/api/banner', routes.banner);
|
||||
app.use('/api/bedrock', routes.bedrock);
|
||||
app.use('/api/websocket', routes.websocket);
|
||||
|
||||
app.use('/api/tags', routes.tags);
|
||||
|
||||
@@ -148,7 +127,7 @@ const startServer = async () => {
|
||||
res.send(updatedIndexHtml);
|
||||
});
|
||||
|
||||
server.listen(port, host, () => {
|
||||
app.listen(port, host, () => {
|
||||
if (host == '0.0.0.0') {
|
||||
logger.info(
|
||||
`Server listening on all interfaces at port ${port}. Use http://localhost:${port} to access it`,
|
||||
@@ -156,26 +135,11 @@ const startServer = async () => {
|
||||
} else {
|
||||
logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`);
|
||||
}
|
||||
|
||||
logger.info(`Socket.IO endpoint: http://${host}:${port}`);
|
||||
});
|
||||
};
|
||||
|
||||
startServer();
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
logger.info('Shutting down server...');
|
||||
if (audioModule) {
|
||||
audioModule.cleanup();
|
||||
logger.info('Audio module cleaned up');
|
||||
}
|
||||
if (socketIOService) {
|
||||
socketIOService.shutdown();
|
||||
logger.info('WebSocket server shut down');
|
||||
}
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
let messageCount = 0;
|
||||
process.on('uncaughtException', (err) => {
|
||||
if (!err.message.includes('fetch failed')) {
|
||||
|
||||
@@ -120,7 +120,7 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
||||
{ promptTokens, completionTokens },
|
||||
);
|
||||
|
||||
await saveMessage(
|
||||
saveMessage(
|
||||
req,
|
||||
{ ...responseMessage, user },
|
||||
{ context: 'api/server/middleware/abortMiddleware.js' },
|
||||
@@ -148,13 +148,6 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
||||
return { abortController, onStart };
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ServerResponse} res
|
||||
* @param {ServerRequest} req
|
||||
* @param {Error | unknown} error
|
||||
* @param {Partial<TMessage> & { partialText?: string }} data
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
const handleAbortError = async (res, req, error, data) => {
|
||||
if (error?.message?.includes('base64')) {
|
||||
logger.error('[handleAbortError] Error in base64 encoding', {
|
||||
@@ -185,30 +178,17 @@ const handleAbortError = async (res, req, error, data) => {
|
||||
errorText = `{"type":"${ErrorTypes.NO_SYSTEM_MESSAGES}"}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} partialText
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const respondWithError = async (partialText) => {
|
||||
const endpointOption = req.body?.endpointOption;
|
||||
let options = {
|
||||
sender,
|
||||
messageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
text: errorText,
|
||||
user: req.user.id,
|
||||
shouldSaveMessage: true,
|
||||
spec: endpointOption?.spec,
|
||||
iconURL: endpointOption?.iconURL,
|
||||
modelLabel: endpointOption?.modelLabel,
|
||||
model: endpointOption?.modelOptions?.model || req.body?.model,
|
||||
user: req.user.id,
|
||||
};
|
||||
|
||||
if (req.body?.agent_id) {
|
||||
options.agent_id = req.body.agent_id;
|
||||
}
|
||||
|
||||
if (partialText) {
|
||||
options = {
|
||||
...options,
|
||||
|
||||
@@ -10,6 +10,7 @@ const openAI = require('~/server/services/Endpoints/openAI');
|
||||
const agents = require('~/server/services/Endpoints/agents');
|
||||
const custom = require('~/server/services/Endpoints/custom');
|
||||
const google = require('~/server/services/Endpoints/google');
|
||||
const { getConvoFiles } = require('~/models/Conversation');
|
||||
const { handleError } = require('~/server/utils');
|
||||
|
||||
const buildFunction = {
|
||||
@@ -86,8 +87,16 @@ async function buildEndpointOption(req, res, next) {
|
||||
|
||||
// TODO: use `getModelsConfig` only when necessary
|
||||
const modelsConfig = await getModelsConfig(req);
|
||||
const { resendFiles = true } = req.body.endpointOption;
|
||||
req.body.endpointOption.modelsConfig = modelsConfig;
|
||||
if (req.body.files && !isAgents) {
|
||||
if (isAgents && resendFiles && req.body.conversationId) {
|
||||
const fileIds = await getConvoFiles(req.body.conversationId);
|
||||
const requestFiles = req.body.files ?? [];
|
||||
if (requestFiles.length || fileIds.length) {
|
||||
req.body.endpointOption.attachments = processFiles(requestFiles, fileIds);
|
||||
}
|
||||
} else if (req.body.files) {
|
||||
// hold the promise
|
||||
req.body.endpointOption.attachments = processFiles(req.body.files);
|
||||
}
|
||||
next();
|
||||
|
||||
@@ -41,7 +41,7 @@ const banResponse = async (req, res) => {
|
||||
* @function
|
||||
* @param {Object} req - Express request object.
|
||||
* @param {Object} res - Express response object.
|
||||
* @param {import('express').NextFunction} next - Next middleware function.
|
||||
* @param {Function} next - Next middleware function.
|
||||
*
|
||||
* @returns {Promise<function|Object>} - Returns a Promise which when resolved calls next middleware if user or source IP is not banned. Otherwise calls `banResponse()` and sets ban details in `banCache`.
|
||||
*/
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user