Compare commits
34 Commits
refactor/c
...
re-add-dow
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c27e8566fb | ||
|
|
4ffdefc2a8 | ||
|
|
2d5f704695 | ||
|
|
3fd25920d4 | ||
|
|
d03f8285db | ||
|
|
e565e0faab | ||
|
|
d4d56281e3 | ||
|
|
14eff23b57 | ||
|
|
18fd8f1416 | ||
|
|
ba9cb71245 | ||
|
|
e5dfa06e6c | ||
|
|
0bd59c0efe | ||
|
|
344297021f | ||
|
|
620973436c | ||
|
|
422d1a2c91 | ||
|
|
2ad097647c | ||
|
|
9e7615f832 | ||
|
|
ee4dd1b2e9 | ||
|
|
f6125ccd59 | ||
|
|
1acd47a0f6 | ||
|
|
511f1336da | ||
|
|
5d40d0a37a | ||
|
|
1c282d1517 | ||
|
|
73dbf3eb20 | ||
|
|
237a0de8b6 | ||
|
|
d5782ac66c | ||
|
|
d5d188eebf | ||
|
|
25ea3b8e98 | ||
|
|
b1ec67ea42 | ||
|
|
f1bb5fa4c5 | ||
|
|
44d8596872 | ||
|
|
32d84c85ea | ||
|
|
0a1d38e318 | ||
|
|
d839e4661c |
@@ -144,7 +144,7 @@ GOOGLE_KEY=user_provided
|
||||
#============#
|
||||
|
||||
OPENAI_API_KEY=user_provided
|
||||
# OPENAI_MODELS=gpt-4o,gpt-3.5-turbo-0125,gpt-3.5-turbo-0301,gpt-3.5-turbo,gpt-4,gpt-4-0613,gpt-4-vision-preview,gpt-3.5-turbo-0613,gpt-3.5-turbo-16k-0613,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-instruct-0914,gpt-3.5-turbo-16k
|
||||
# OPENAI_MODELS=gpt-4o,gpt-4o-mini,gpt-3.5-turbo-0125,gpt-3.5-turbo-0301,gpt-3.5-turbo,gpt-4,gpt-4-0613,gpt-4-vision-preview,gpt-3.5-turbo-0613,gpt-3.5-turbo-16k-0613,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-instruct-0914,gpt-3.5-turbo-16k
|
||||
|
||||
DEBUG_OPENAI=false
|
||||
|
||||
@@ -166,7 +166,7 @@ DEBUG_OPENAI=false
|
||||
|
||||
ASSISTANTS_API_KEY=user_provided
|
||||
# ASSISTANTS_BASE_URL=
|
||||
# ASSISTANTS_MODELS=gpt-4o,gpt-3.5-turbo-0125,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-16k,gpt-3.5-turbo,gpt-4,gpt-4-0314,gpt-4-32k-0314,gpt-4-0613,gpt-3.5-turbo-0613,gpt-3.5-turbo-1106,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview
|
||||
# ASSISTANTS_MODELS=gpt-4o,gpt-4o-mini,gpt-3.5-turbo-0125,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-16k,gpt-3.5-turbo,gpt-4,gpt-4-0314,gpt-4-32k-0314,gpt-4-0613,gpt-3.5-turbo-0613,gpt-3.5-turbo-1106,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview
|
||||
|
||||
#==========================#
|
||||
# Azure Assistants API #
|
||||
@@ -188,7 +188,7 @@ ASSISTANTS_API_KEY=user_provided
|
||||
# Plugins #
|
||||
#============#
|
||||
|
||||
# PLUGIN_MODELS=gpt-4o,gpt-4,gpt-4-turbo-preview,gpt-4-0125-preview,gpt-4-1106-preview,gpt-4-0613,gpt-3.5-turbo,gpt-3.5-turbo-0125,gpt-3.5-turbo-1106,gpt-3.5-turbo-0613
|
||||
# PLUGIN_MODELS=gpt-4o,gpt-4o-mini,gpt-4,gpt-4-turbo-preview,gpt-4-0125-preview,gpt-4-1106-preview,gpt-4-0613,gpt-3.5-turbo,gpt-3.5-turbo-0125,gpt-3.5-turbo-1106,gpt-3.5-turbo-0613
|
||||
|
||||
DEBUG_PLUGINS=true
|
||||
|
||||
@@ -374,6 +374,8 @@ LDAP_BIND_CREDENTIALS=
|
||||
LDAP_USER_SEARCH_BASE=
|
||||
LDAP_SEARCH_FILTER=mail={{username}}
|
||||
LDAP_CA_CERT_PATH=
|
||||
# LDAP_TLS_REJECT_UNAUTHORIZED=
|
||||
# LDAP_LOGIN_USES_USERNAME=true
|
||||
# LDAP_ID=
|
||||
# LDAP_USERNAME=
|
||||
# LDAP_FULL_NAME=
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
- 🔄 Edit, Resubmit, and Continue Messages with Conversation branching
|
||||
- 🌿 Fork Messages & Conversations for Advanced Context control
|
||||
- 💬 Multimodal Chat:
|
||||
- Upload and analyze images with Claude 3, GPT-4 (including `gpt-4o`), and Gemini Vision 📸
|
||||
- Upload and analyze images with Claude 3, GPT-4 (including `gpt-4o` and `gpt-4o-mini`), and Gemini Vision 📸
|
||||
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, & Google. 🗃️
|
||||
- Advanced Agents with Files, Code Interpreter, Tools, and API Actions 🔦
|
||||
- Available through the [OpenAI Assistants API](https://platform.openai.com/docs/assistants/overview) 🌤️
|
||||
|
||||
@@ -2,8 +2,10 @@ const Anthropic = require('@anthropic-ai/sdk');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
||||
const {
|
||||
getResponseSender,
|
||||
Constants,
|
||||
EModelEndpoint,
|
||||
anthropicSettings,
|
||||
getResponseSender,
|
||||
validateVisionModel,
|
||||
} = require('librechat-data-provider');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
@@ -16,6 +18,7 @@ const {
|
||||
} = require('./prompts');
|
||||
const spendTokens = require('~/models/spendTokens');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const BaseClient = require('./BaseClient');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -29,6 +32,8 @@ function delayBeforeRetry(attempts, baseDelay = 1000) {
|
||||
return new Promise((resolve) => setTimeout(resolve, baseDelay * attempts));
|
||||
}
|
||||
|
||||
const { legacy } = anthropicSettings;
|
||||
|
||||
class AnthropicClient extends BaseClient {
|
||||
constructor(apiKey, options = {}) {
|
||||
super(apiKey, options);
|
||||
@@ -61,15 +66,20 @@ class AnthropicClient extends BaseClient {
|
||||
const modelOptions = this.options.modelOptions || {};
|
||||
this.modelOptions = {
|
||||
...modelOptions,
|
||||
// set some good defaults (check for undefined in some cases because they may be 0)
|
||||
model: modelOptions.model || 'claude-1',
|
||||
temperature: typeof modelOptions.temperature === 'undefined' ? 1 : modelOptions.temperature, // 0 - 1, 1 is default
|
||||
topP: typeof modelOptions.topP === 'undefined' ? 0.7 : modelOptions.topP, // 0 - 1, default: 0.7
|
||||
topK: typeof modelOptions.topK === 'undefined' ? 40 : modelOptions.topK, // 1-40, default: 40
|
||||
stop: modelOptions.stop, // no stop method for now
|
||||
model: modelOptions.model || anthropicSettings.model.default,
|
||||
};
|
||||
|
||||
this.isClaude3 = this.modelOptions.model.includes('claude-3');
|
||||
this.isLegacyOutput = !this.modelOptions.model.includes('claude-3-5-sonnet');
|
||||
|
||||
if (
|
||||
this.isLegacyOutput &&
|
||||
this.modelOptions.maxOutputTokens &&
|
||||
this.modelOptions.maxOutputTokens > legacy.maxOutputTokens.default
|
||||
) {
|
||||
this.modelOptions.maxOutputTokens = legacy.maxOutputTokens.default;
|
||||
}
|
||||
|
||||
this.useMessages = this.isClaude3 || !!this.options.attachments;
|
||||
|
||||
this.defaultVisionModel = this.options.visionModel ?? 'claude-3-sonnet-20240229';
|
||||
@@ -119,10 +129,11 @@ class AnthropicClient extends BaseClient {
|
||||
|
||||
/**
|
||||
* Get the initialized Anthropic client.
|
||||
* @param {Partial<Anthropic.ClientOptions>} requestOptions - The options for the client.
|
||||
* @returns {Anthropic} The Anthropic client instance.
|
||||
*/
|
||||
getClient() {
|
||||
/** @type {Anthropic.default.RequestOptions} */
|
||||
getClient(requestOptions) {
|
||||
/** @type {Anthropic.ClientOptions} */
|
||||
const options = {
|
||||
fetch: this.fetch,
|
||||
apiKey: this.apiKey,
|
||||
@@ -136,6 +147,12 @@ class AnthropicClient extends BaseClient {
|
||||
options.baseURL = this.options.reverseProxyUrl;
|
||||
}
|
||||
|
||||
if (requestOptions?.model && requestOptions.model.includes('claude-3-5-sonnet')) {
|
||||
options.defaultHeaders = {
|
||||
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
|
||||
};
|
||||
}
|
||||
|
||||
return new Anthropic(options);
|
||||
}
|
||||
|
||||
@@ -556,8 +573,6 @@ class AnthropicClient extends BaseClient {
|
||||
}
|
||||
|
||||
logger.debug('modelOptions', { modelOptions });
|
||||
|
||||
const client = this.getClient();
|
||||
const metadata = {
|
||||
user_id: this.user,
|
||||
};
|
||||
@@ -585,7 +600,7 @@ class AnthropicClient extends BaseClient {
|
||||
|
||||
if (this.useMessages) {
|
||||
requestOptions.messages = payload;
|
||||
requestOptions.max_tokens = maxOutputTokens || 1500;
|
||||
requestOptions.max_tokens = maxOutputTokens || legacy.maxOutputTokens.default;
|
||||
} else {
|
||||
requestOptions.prompt = payload;
|
||||
requestOptions.max_tokens_to_sample = maxOutputTokens || 1500;
|
||||
@@ -605,12 +620,14 @@ class AnthropicClient extends BaseClient {
|
||||
};
|
||||
|
||||
const maxRetries = 3;
|
||||
const streamRate = this.options.streamRate ?? Constants.DEFAULT_STREAM_RATE;
|
||||
async function processResponse() {
|
||||
let attempts = 0;
|
||||
|
||||
while (attempts < maxRetries) {
|
||||
let response;
|
||||
try {
|
||||
const client = this.getClient(requestOptions);
|
||||
response = await this.createResponse(client, requestOptions);
|
||||
|
||||
signal.addEventListener('abort', () => {
|
||||
@@ -627,6 +644,8 @@ class AnthropicClient extends BaseClient {
|
||||
} else if (completion.completion) {
|
||||
handleChunk(completion.completion);
|
||||
}
|
||||
|
||||
await sleep(streamRate);
|
||||
}
|
||||
|
||||
// Successful processing, exit loop
|
||||
@@ -737,7 +756,11 @@ class AnthropicClient extends BaseClient {
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await this.createResponse(this.getClient(), requestOptions, true);
|
||||
const response = await this.createResponse(
|
||||
this.getClient(requestOptions),
|
||||
requestOptions,
|
||||
true,
|
||||
);
|
||||
let promptTokens = response?.usage?.input_tokens;
|
||||
let completionTokens = response?.usage?.output_tokens;
|
||||
if (!promptTokens) {
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
const crypto = require('crypto');
|
||||
const fetch = require('node-fetch');
|
||||
const { supportsBalanceCheck, Constants } = require('librechat-data-provider');
|
||||
const { supportsBalanceCheck, Constants, CacheKeys, Time } = require('librechat-data-provider');
|
||||
const { getMessages, saveMessage, updateMessage, saveConvo } = require('~/models');
|
||||
const { addSpaceIfNeeded, isEnabled } = require('~/server/utils');
|
||||
const checkBalance = require('~/models/checkBalance');
|
||||
const { getFiles } = require('~/models/File');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const TextStream = require('./TextStream');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -539,7 +540,19 @@ class BaseClient {
|
||||
const completionTokens = this.getTokenCount(completion);
|
||||
await this.recordTokenUsage({ promptTokens, completionTokens });
|
||||
}
|
||||
if (this.userMessagePromise) {
|
||||
await this.userMessagePromise;
|
||||
}
|
||||
this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
|
||||
const messageCache = getLogStores(CacheKeys.MESSAGES);
|
||||
messageCache.set(
|
||||
responseMessageId,
|
||||
{
|
||||
text: responseMessage.text,
|
||||
complete: true,
|
||||
},
|
||||
Time.FIVE_MINUTES,
|
||||
);
|
||||
delete responseMessage.tokenCount;
|
||||
return responseMessage;
|
||||
}
|
||||
@@ -598,28 +611,41 @@ class BaseClient {
|
||||
* @param {string | null} user
|
||||
*/
|
||||
async saveMessageToDatabase(message, endpointOptions, user = null) {
|
||||
const savedMessage = await saveMessage({
|
||||
...message,
|
||||
endpoint: this.options.endpoint,
|
||||
unfinished: false,
|
||||
user,
|
||||
});
|
||||
if (this.user && user !== this.user) {
|
||||
throw new Error('User mismatch.');
|
||||
}
|
||||
|
||||
const savedMessage = await saveMessage(
|
||||
this.options.req,
|
||||
{
|
||||
...message,
|
||||
endpoint: this.options.endpoint,
|
||||
unfinished: false,
|
||||
user,
|
||||
},
|
||||
{ context: 'api/app/clients/BaseClient.js - saveMessageToDatabase #saveMessage' },
|
||||
);
|
||||
|
||||
if (this.skipSaveConvo) {
|
||||
return { message: savedMessage };
|
||||
}
|
||||
const conversation = await saveConvo(user, {
|
||||
conversationId: message.conversationId,
|
||||
endpoint: this.options.endpoint,
|
||||
endpointType: this.options.endpointType,
|
||||
...endpointOptions,
|
||||
});
|
||||
|
||||
const conversation = await saveConvo(
|
||||
this.options.req,
|
||||
{
|
||||
conversationId: message.conversationId,
|
||||
endpoint: this.options.endpoint,
|
||||
endpointType: this.options.endpointType,
|
||||
...endpointOptions,
|
||||
},
|
||||
{ context: 'api/app/clients/BaseClient.js - saveMessageToDatabase #saveConvo' },
|
||||
);
|
||||
|
||||
return { message: savedMessage, conversation };
|
||||
}
|
||||
|
||||
async updateMessageInDatabase(message) {
|
||||
await updateMessage(message);
|
||||
await updateMessage(this.options.req, message);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -13,10 +13,12 @@ const {
|
||||
endpointSettings,
|
||||
EModelEndpoint,
|
||||
VisionModes,
|
||||
Constants,
|
||||
AuthKeys,
|
||||
} = require('librechat-data-provider');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
const {
|
||||
formatMessage,
|
||||
@@ -620,8 +622,9 @@ class GoogleClient extends BaseClient {
|
||||
}
|
||||
|
||||
async getCompletion(_payload, options = {}) {
|
||||
const { onProgress, abortController } = options;
|
||||
const { parameters, instances } = _payload;
|
||||
const { onProgress, abortController } = options;
|
||||
const streamRate = this.options.streamRate ?? Constants.DEFAULT_STREAM_RATE;
|
||||
const { messages: _messages, context, examples: _examples } = instances?.[0] ?? {};
|
||||
|
||||
let examples;
|
||||
@@ -701,6 +704,7 @@ class GoogleClient extends BaseClient {
|
||||
delay,
|
||||
});
|
||||
reply += chunkText;
|
||||
await sleep(streamRate);
|
||||
}
|
||||
return reply;
|
||||
}
|
||||
@@ -712,10 +716,17 @@ class GoogleClient extends BaseClient {
|
||||
safetySettings: safetySettings,
|
||||
});
|
||||
|
||||
let delay = this.isGenerativeModel ? 12 : 8;
|
||||
if (modelName.includes('flash')) {
|
||||
delay = 5;
|
||||
let delay = this.options.streamRate || 8;
|
||||
|
||||
if (!this.options.streamRate) {
|
||||
if (this.isGenerativeModel) {
|
||||
delay = 12;
|
||||
}
|
||||
if (modelName.includes('flash')) {
|
||||
delay = 5;
|
||||
}
|
||||
}
|
||||
|
||||
for await (const chunk of stream) {
|
||||
const chunkText = chunk?.content ?? chunk;
|
||||
await this.generateTextStream(chunkText, onProgress, {
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
const { z } = require('zod');
|
||||
const axios = require('axios');
|
||||
const { Ollama } = require('ollama');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { deriveBaseURL } = require('~/utils');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const ollamaPayloadSchema = z.object({
|
||||
@@ -40,6 +42,7 @@ const getValidBase64 = (imageUrl) => {
|
||||
class OllamaClient {
|
||||
constructor(options = {}) {
|
||||
const host = deriveBaseURL(options.baseURL ?? 'http://localhost:11434');
|
||||
this.streamRate = options.streamRate ?? Constants.DEFAULT_STREAM_RATE;
|
||||
/** @type {Ollama} */
|
||||
this.client = new Ollama({ host });
|
||||
}
|
||||
@@ -136,6 +139,8 @@ class OllamaClient {
|
||||
stream.controller.abort();
|
||||
break;
|
||||
}
|
||||
|
||||
await sleep(this.streamRate);
|
||||
}
|
||||
}
|
||||
// TODO: regular completion
|
||||
|
||||
@@ -1182,8 +1182,10 @@ ${convo}
|
||||
});
|
||||
}
|
||||
|
||||
const streamRate = this.options.streamRate ?? Constants.DEFAULT_STREAM_RATE;
|
||||
|
||||
if (this.message_file_map && this.isOllama) {
|
||||
const ollamaClient = new OllamaClient({ baseURL });
|
||||
const ollamaClient = new OllamaClient({ baseURL, streamRate });
|
||||
return await ollamaClient.chatCompletion({
|
||||
payload: modelOptions,
|
||||
onProgress,
|
||||
@@ -1221,8 +1223,6 @@ ${convo}
|
||||
}
|
||||
});
|
||||
|
||||
const azureDelay = this.modelOptions.model?.includes('gpt-4') ? 30 : 17;
|
||||
|
||||
for await (const chunk of stream) {
|
||||
const token = chunk.choices[0]?.delta?.content || '';
|
||||
intermediateReply += token;
|
||||
@@ -1232,9 +1232,7 @@ ${convo}
|
||||
break;
|
||||
}
|
||||
|
||||
if (this.azure) {
|
||||
await sleep(azureDelay);
|
||||
}
|
||||
await sleep(streamRate);
|
||||
}
|
||||
|
||||
if (!UnexpectedRoleError) {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const OpenAIClient = require('./OpenAIClient');
|
||||
const { CallbackManager } = require('langchain/callbacks');
|
||||
const { CacheKeys, Time } = require('librechat-data-provider');
|
||||
const { BufferMemory, ChatMessageHistory } = require('langchain/memory');
|
||||
const { initializeCustomAgent, initializeFunctionsAgent } = require('./agents');
|
||||
const { addImages, buildErrorInput, buildPromptPrefix } = require('./output_parsers');
|
||||
@@ -11,6 +12,7 @@ const { SelfReflectionTool } = require('./tools');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { extractBaseURL } = require('~/utils');
|
||||
const { loadTools } = require('./tools/util');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
class PluginsClient extends OpenAIClient {
|
||||
@@ -220,6 +222,13 @@ class PluginsClient extends OpenAIClient {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TMessage} responseMessage
|
||||
* @param {Partial<TMessage>} saveOptions
|
||||
* @param {string} user
|
||||
* @returns
|
||||
*/
|
||||
async handleResponseMessage(responseMessage, saveOptions, user) {
|
||||
const { output, errorMessage, ...result } = this.result;
|
||||
logger.debug('[PluginsClient][handleResponseMessage] Output:', {
|
||||
@@ -239,6 +248,15 @@ class PluginsClient extends OpenAIClient {
|
||||
}
|
||||
|
||||
this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
|
||||
const messageCache = getLogStores(CacheKeys.MESSAGES);
|
||||
messageCache.set(
|
||||
responseMessage.messageId,
|
||||
{
|
||||
text: responseMessage.text,
|
||||
complete: true,
|
||||
},
|
||||
Time.FIVE_MINUTES,
|
||||
);
|
||||
delete responseMessage.tokenCount;
|
||||
return { ...responseMessage, ...result };
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
const AnthropicClient = require('../AnthropicClient');
|
||||
const { anthropicSettings } = require('librechat-data-provider');
|
||||
const AnthropicClient = require('~/app/clients/AnthropicClient');
|
||||
|
||||
const HUMAN_PROMPT = '\n\nHuman:';
|
||||
const AI_PROMPT = '\n\nAssistant:';
|
||||
|
||||
@@ -22,7 +24,7 @@ describe('AnthropicClient', () => {
|
||||
const options = {
|
||||
modelOptions: {
|
||||
model,
|
||||
temperature: 0.7,
|
||||
temperature: anthropicSettings.temperature.default,
|
||||
},
|
||||
};
|
||||
client = new AnthropicClient('test-api-key');
|
||||
@@ -33,7 +35,42 @@ describe('AnthropicClient', () => {
|
||||
it('should set the options correctly', () => {
|
||||
expect(client.apiKey).toBe('test-api-key');
|
||||
expect(client.modelOptions.model).toBe(model);
|
||||
expect(client.modelOptions.temperature).toBe(0.7);
|
||||
expect(client.modelOptions.temperature).toBe(anthropicSettings.temperature.default);
|
||||
});
|
||||
|
||||
it('should set legacy maxOutputTokens for non-Claude-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-2',
|
||||
maxOutputTokens: anthropicSettings.maxOutputTokens.default,
|
||||
},
|
||||
});
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||
anthropicSettings.legacy.maxOutputTokens.default,
|
||||
);
|
||||
});
|
||||
it('should not set maxOutputTokens if not provided', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3',
|
||||
},
|
||||
});
|
||||
expect(client.modelOptions.maxOutputTokens).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not set legacy maxOutputTokens for Claude-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-opus-20240229',
|
||||
maxOutputTokens: anthropicSettings.legacy.maxOutputTokens.default,
|
||||
},
|
||||
});
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||
anthropicSettings.legacy.maxOutputTokens.default,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -136,4 +173,57 @@ describe('AnthropicClient', () => {
|
||||
expect(prompt).toContain('You are Claude-2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getClient', () => {
|
||||
it('should set legacy maxOutputTokens for non-Claude-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-2',
|
||||
maxOutputTokens: anthropicSettings.legacy.maxOutputTokens.default,
|
||||
},
|
||||
});
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||
anthropicSettings.legacy.maxOutputTokens.default,
|
||||
);
|
||||
});
|
||||
|
||||
it('should not set legacy maxOutputTokens for Claude-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-opus-20240229',
|
||||
maxOutputTokens: anthropicSettings.legacy.maxOutputTokens.default,
|
||||
},
|
||||
});
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||
anthropicSettings.legacy.maxOutputTokens.default,
|
||||
);
|
||||
});
|
||||
|
||||
it('should add beta header for claude-3-5-sonnet model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-3-5-sonnet-20240307',
|
||||
};
|
||||
client.setOptions({ modelOptions });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'max-tokens-3-5-sonnet-2024-07-15',
|
||||
);
|
||||
});
|
||||
|
||||
it('should not add beta header for other models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-2',
|
||||
},
|
||||
});
|
||||
const anthropicClient = client.getClient();
|
||||
expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { initializeFakeClient } = require('./FakeClient');
|
||||
|
||||
jest.mock('../../../lib/db/connectDb');
|
||||
jest.mock('~/lib/db/connectDb');
|
||||
jest.mock('~/models', () => ({
|
||||
User: jest.fn(),
|
||||
Key: jest.fn(),
|
||||
@@ -631,5 +631,32 @@ describe('BaseClient', () => {
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('userMessagePromise is awaited before saving response message', async () => {
|
||||
// Mock the saveMessageToDatabase method
|
||||
TestClient.saveMessageToDatabase = jest.fn().mockImplementation(() => {
|
||||
return new Promise((resolve) => setTimeout(resolve, 100)); // Simulate a delay
|
||||
});
|
||||
|
||||
// Send a message
|
||||
const messagePromise = TestClient.sendMessage('Hello, world!');
|
||||
|
||||
// Wait a short time to ensure the user message save has started
|
||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||
|
||||
// Check that saveMessageToDatabase has been called once (for the user message)
|
||||
expect(TestClient.saveMessageToDatabase).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Wait for the message to be fully processed
|
||||
await messagePromise;
|
||||
|
||||
// Check that saveMessageToDatabase has been called twice (once for user message, once for response)
|
||||
expect(TestClient.saveMessageToDatabase).toHaveBeenCalledTimes(2);
|
||||
|
||||
// Check the order of calls
|
||||
const calls = TestClient.saveMessageToDatabase.mock.calls;
|
||||
expect(calls[0][0].isCreatedByUser).toBe(true); // First call should be for user message
|
||||
expect(calls[1][0].isCreatedByUser).toBe(false); // Second call should be for response message
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
25
api/cache/getLogStores.js
vendored
25
api/cache/getLogStores.js
vendored
@@ -1,13 +1,11 @@
|
||||
const Keyv = require('keyv');
|
||||
const { CacheKeys, ViolationTypes } = require('librechat-data-provider');
|
||||
const { CacheKeys, ViolationTypes, Time } = require('librechat-data-provider');
|
||||
const { logFile, violationFile } = require('./keyvFiles');
|
||||
const { math, isEnabled } = require('~/server/utils');
|
||||
const keyvRedis = require('./keyvRedis');
|
||||
const keyvMongo = require('./keyvMongo');
|
||||
|
||||
const { BAN_DURATION, USE_REDIS } = process.env ?? {};
|
||||
const THIRTY_MINUTES = 1800000;
|
||||
const TEN_MINUTES = 600000;
|
||||
|
||||
const duration = math(BAN_DURATION, 7200000);
|
||||
|
||||
@@ -29,17 +27,21 @@ const roles = isEnabled(USE_REDIS)
|
||||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: CacheKeys.ROLES });
|
||||
|
||||
const audioRuns = isEnabled(USE_REDIS) // ttl: 30 minutes
|
||||
? new Keyv({ store: keyvRedis, ttl: TEN_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.AUDIO_RUNS, ttl: TEN_MINUTES });
|
||||
const audioRuns = isEnabled(USE_REDIS)
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.TEN_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.AUDIO_RUNS, ttl: Time.TEN_MINUTES });
|
||||
|
||||
const messages = isEnabled(USE_REDIS)
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.FIVE_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.MESSAGES, ttl: Time.FIVE_MINUTES });
|
||||
|
||||
const tokenConfig = isEnabled(USE_REDIS) // ttl: 30 minutes
|
||||
? new Keyv({ store: keyvRedis, ttl: THIRTY_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.TOKEN_CONFIG, ttl: THIRTY_MINUTES });
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.THIRTY_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.TOKEN_CONFIG, ttl: Time.THIRTY_MINUTES });
|
||||
|
||||
const genTitle = isEnabled(USE_REDIS) // ttl: 2 minutes
|
||||
? new Keyv({ store: keyvRedis, ttl: 120000 })
|
||||
: new Keyv({ namespace: CacheKeys.GEN_TITLE, ttl: 120000 });
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.TWO_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.GEN_TITLE, ttl: Time.TWO_MINUTES });
|
||||
|
||||
const modelQueries = isEnabled(process.env.USE_REDIS)
|
||||
? new Keyv({ store: keyvRedis })
|
||||
@@ -47,7 +49,7 @@ const modelQueries = isEnabled(process.env.USE_REDIS)
|
||||
|
||||
const abortKeys = isEnabled(USE_REDIS)
|
||||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: CacheKeys.ABORT_KEYS, ttl: 600000 });
|
||||
: new Keyv({ namespace: CacheKeys.ABORT_KEYS, ttl: Time.TEN_MINUTES });
|
||||
|
||||
const namespaces = {
|
||||
[CacheKeys.ROLES]: roles,
|
||||
@@ -81,6 +83,7 @@ const namespaces = {
|
||||
[CacheKeys.GEN_TITLE]: genTitle,
|
||||
[CacheKeys.MODEL_QUERIES]: modelQueries,
|
||||
[CacheKeys.AUDIO_RUNS]: audioRuns,
|
||||
[CacheKeys.MESSAGES]: messages,
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -109,6 +109,14 @@ const condenseArray = (item) => {
|
||||
* @returns {string} - The formatted log message.
|
||||
*/
|
||||
const debugTraverse = winston.format.printf(({ level, message, timestamp, ...metadata }) => {
|
||||
if (!message) {
|
||||
return `${timestamp} ${level}`;
|
||||
}
|
||||
|
||||
if (!message?.trim || typeof message !== 'string') {
|
||||
return `${timestamp} ${level}: ${JSON.stringify(message)}`;
|
||||
}
|
||||
|
||||
let msg = `${timestamp} ${level}: ${truncateLongStrings(message?.trim(), 150)}`;
|
||||
try {
|
||||
if (level !== 'debug') {
|
||||
|
||||
@@ -19,22 +19,39 @@ const getConvo = async (user, conversationId) => {
|
||||
|
||||
module.exports = {
|
||||
Conversation,
|
||||
saveConvo: async (user, { conversationId, newConversationId, ...convo }) => {
|
||||
/**
|
||||
* Saves a conversation to the database.
|
||||
* @param {Object} req - The request object.
|
||||
* @param {string} conversationId - The conversation's ID.
|
||||
* @param {Object} metadata - Additional metadata to log for operation.
|
||||
* @returns {Promise<TConversation>} The conversation object.
|
||||
*/
|
||||
saveConvo: async (req, { conversationId, newConversationId, ...convo }, metadata) => {
|
||||
try {
|
||||
if (metadata && metadata?.context) {
|
||||
logger.debug(`[saveConvo] ${metadata.context}`);
|
||||
}
|
||||
const messages = await getMessages({ conversationId }, '_id');
|
||||
const update = { ...convo, messages, user };
|
||||
const update = { ...convo, messages, user: req.user.id };
|
||||
if (newConversationId) {
|
||||
update.conversationId = newConversationId;
|
||||
}
|
||||
|
||||
const conversation = await Conversation.findOneAndUpdate({ conversationId, user }, update, {
|
||||
new: true,
|
||||
upsert: true,
|
||||
});
|
||||
const conversation = await Conversation.findOneAndUpdate(
|
||||
{ conversationId, user: req.user.id },
|
||||
update,
|
||||
{
|
||||
new: true,
|
||||
upsert: true,
|
||||
},
|
||||
);
|
||||
|
||||
return conversation.toObject();
|
||||
} catch (error) {
|
||||
logger.error('[saveConvo] Error saving conversation', error);
|
||||
if (metadata && metadata?.context) {
|
||||
logger.info(`[saveConvo] ${metadata.context}`);
|
||||
}
|
||||
return { message: 'Error saving conversation' };
|
||||
}
|
||||
},
|
||||
@@ -56,13 +73,16 @@ module.exports = {
|
||||
throw new Error('Failed to save conversations in bulk.');
|
||||
}
|
||||
},
|
||||
getConvosByPage: async (user, pageNumber = 1, pageSize = 25, isArchived = false) => {
|
||||
getConvosByPage: async (user, pageNumber = 1, pageSize = 25, isArchived = false, tags) => {
|
||||
const query = { user };
|
||||
if (isArchived) {
|
||||
query.isArchived = true;
|
||||
} else {
|
||||
query.$or = [{ isArchived: false }, { isArchived: { $exists: false } }];
|
||||
}
|
||||
if (Array.isArray(tags) && tags.length > 0) {
|
||||
query.tags = { $in: tags };
|
||||
}
|
||||
try {
|
||||
const totalConvos = (await Conversation.countDocuments(query)) || 1;
|
||||
const totalPages = Math.ceil(totalConvos / pageSize);
|
||||
|
||||
268
api/models/ConversationTag.js
Normal file
268
api/models/ConversationTag.js
Normal file
@@ -0,0 +1,268 @@
|
||||
//const crypto = require('crypto');
|
||||
|
||||
const logger = require('~/config/winston');
|
||||
const Conversation = require('./schema/convoSchema');
|
||||
const ConversationTag = require('./schema/conversationTagSchema');
|
||||
|
||||
const SAVED_TAG = 'Saved';
|
||||
|
||||
const updateTagsForConversation = async (user, conversationId, tags) => {
|
||||
try {
|
||||
const conversation = await Conversation.findOne({ user, conversationId });
|
||||
if (!conversation) {
|
||||
return { message: 'Conversation not found' };
|
||||
}
|
||||
|
||||
const addedTags = tags.tags.filter((tag) => !conversation.tags.includes(tag));
|
||||
const removedTags = conversation.tags.filter((tag) => !tags.tags.includes(tag));
|
||||
for (const tag of addedTags) {
|
||||
await ConversationTag.updateOne({ tag, user }, { $inc: { count: 1 } }, { upsert: true });
|
||||
}
|
||||
for (const tag of removedTags) {
|
||||
await ConversationTag.updateOne({ tag, user }, { $inc: { count: -1 } });
|
||||
}
|
||||
conversation.tags = tags.tags;
|
||||
await conversation.save({ timestamps: { updatedAt: false } });
|
||||
return conversation.tags;
|
||||
} catch (error) {
|
||||
logger.error('[updateTagsToConversation] Error updating tags', error);
|
||||
return { message: 'Error updating tags' };
|
||||
}
|
||||
};
|
||||
|
||||
const createConversationTag = async (user, data) => {
|
||||
try {
|
||||
const cTag = await ConversationTag.findOne({ user, tag: data.tag });
|
||||
if (cTag) {
|
||||
return cTag;
|
||||
}
|
||||
|
||||
const addToConversation = data.addToConversation && data.conversationId;
|
||||
const newTag = await ConversationTag.create({
|
||||
user,
|
||||
tag: data.tag,
|
||||
count: 0,
|
||||
description: data.description,
|
||||
position: 1,
|
||||
});
|
||||
|
||||
await ConversationTag.updateMany(
|
||||
{ user, position: { $gte: 1 }, _id: { $ne: newTag._id } },
|
||||
{ $inc: { position: 1 } },
|
||||
);
|
||||
|
||||
if (addToConversation) {
|
||||
const conversation = await Conversation.findOne({
|
||||
user,
|
||||
conversationId: data.conversationId,
|
||||
});
|
||||
if (conversation) {
|
||||
const tags = [...(conversation.tags || []), data.tag];
|
||||
await updateTagsForConversation(user, data.conversationId, { tags });
|
||||
} else {
|
||||
logger.warn('[updateTagsForConversation] Conversation not found', data.conversationId);
|
||||
}
|
||||
}
|
||||
|
||||
return await ConversationTag.findOne({ user, tag: data.tag });
|
||||
} catch (error) {
|
||||
logger.error('[createConversationTag] Error updating conversation tag', error);
|
||||
return { message: 'Error updating conversation tag' };
|
||||
}
|
||||
};
|
||||
|
||||
const replaceOrRemoveTagInConversations = async (user, oldtag, newtag) => {
|
||||
try {
|
||||
const conversations = await Conversation.find({ user, tags: { $in: [oldtag] } });
|
||||
for (const conversation of conversations) {
|
||||
if (newtag && newtag !== '') {
|
||||
conversation.tags = conversation.tags.map((tag) => (tag === oldtag ? newtag : tag));
|
||||
} else {
|
||||
conversation.tags = conversation.tags.filter((tag) => tag !== oldtag);
|
||||
}
|
||||
await conversation.save({ timestamps: { updatedAt: false } });
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[replaceOrRemoveTagInConversations] Error updating conversation tags', error);
|
||||
return { message: 'Error updating conversation tags' };
|
||||
}
|
||||
};
|
||||
|
||||
const updateTagPosition = async (user, tag, newPosition) => {
|
||||
try {
|
||||
const cTag = await ConversationTag.findOne({ user, tag });
|
||||
if (!cTag) {
|
||||
return { message: 'Tag not found' };
|
||||
}
|
||||
|
||||
const oldPosition = cTag.position;
|
||||
|
||||
if (newPosition === oldPosition) {
|
||||
return cTag;
|
||||
}
|
||||
|
||||
const updateOperations = [];
|
||||
|
||||
if (newPosition > oldPosition) {
|
||||
// Move other tags up
|
||||
updateOperations.push({
|
||||
updateMany: {
|
||||
filter: {
|
||||
user,
|
||||
position: { $gt: oldPosition, $lte: newPosition },
|
||||
tag: { $ne: SAVED_TAG },
|
||||
},
|
||||
update: { $inc: { position: -1 } },
|
||||
},
|
||||
});
|
||||
} else {
|
||||
// Move other tags down
|
||||
updateOperations.push({
|
||||
updateMany: {
|
||||
filter: {
|
||||
user,
|
||||
position: { $gte: newPosition, $lt: oldPosition },
|
||||
tag: { $ne: SAVED_TAG },
|
||||
},
|
||||
update: { $inc: { position: 1 } },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Update the target tag's position
|
||||
updateOperations.push({
|
||||
updateOne: {
|
||||
filter: { _id: cTag._id },
|
||||
update: { $set: { position: newPosition } },
|
||||
},
|
||||
});
|
||||
|
||||
await ConversationTag.bulkWrite(updateOperations);
|
||||
|
||||
return await ConversationTag.findById(cTag._id);
|
||||
} catch (error) {
|
||||
logger.error('[updateTagPosition] Error updating tag position', error);
|
||||
return { message: 'Error updating tag position' };
|
||||
}
|
||||
};
|
||||
module.exports = {
|
||||
SAVED_TAG,
|
||||
ConversationTag,
|
||||
getConversationTags: async (user) => {
|
||||
try {
|
||||
const cTags = await ConversationTag.find({ user }).sort({ position: 1 }).lean();
|
||||
cTags.sort((a, b) => (a.tag === SAVED_TAG ? -1 : b.tag === SAVED_TAG ? 1 : 0));
|
||||
|
||||
return cTags;
|
||||
} catch (error) {
|
||||
logger.error('[getShare] Error getting share link', error);
|
||||
return { message: 'Error getting share link' };
|
||||
}
|
||||
},
|
||||
|
||||
createConversationTag,
|
||||
updateConversationTag: async (user, tag, data) => {
|
||||
try {
|
||||
const cTag = await ConversationTag.findOne({ user, tag });
|
||||
if (!cTag) {
|
||||
return createConversationTag(user, data);
|
||||
}
|
||||
|
||||
if (cTag.tag !== data.tag || cTag.description !== data.description) {
|
||||
cTag.tag = data.tag;
|
||||
cTag.description = data.description === undefined ? cTag.description : data.description;
|
||||
await cTag.save();
|
||||
}
|
||||
|
||||
if (data.position !== undefined && cTag.position !== data.position) {
|
||||
await updateTagPosition(user, tag, data.position);
|
||||
}
|
||||
|
||||
// update conversation tags properties
|
||||
replaceOrRemoveTagInConversations(user, tag, data.tag);
|
||||
return await ConversationTag.findOne({ user, tag: data.tag });
|
||||
} catch (error) {
|
||||
logger.error('[updateConversationTag] Error updating conversation tag', error);
|
||||
return { message: 'Error updating conversation tag' };
|
||||
}
|
||||
},
|
||||
|
||||
deleteConversationTag: async (user, tag) => {
|
||||
try {
|
||||
const currentTag = await ConversationTag.findOne({ user, tag });
|
||||
if (!currentTag) {
|
||||
return;
|
||||
}
|
||||
|
||||
await currentTag.deleteOne({ user, tag });
|
||||
|
||||
await replaceOrRemoveTagInConversations(user, tag, null);
|
||||
return currentTag;
|
||||
} catch (error) {
|
||||
logger.error('[deleteConversationTag] Error deleting conversation tag', error);
|
||||
return { message: 'Error deleting conversation tag' };
|
||||
}
|
||||
},
|
||||
|
||||
updateTagsForConversation,
|
||||
rebuildConversationTags: async (user) => {
|
||||
try {
|
||||
const conversations = await Conversation.find({ user }).select('tags');
|
||||
const tagCountMap = {};
|
||||
|
||||
// Count the occurrences of each tag
|
||||
conversations.forEach((conversation) => {
|
||||
conversation.tags.forEach((tag) => {
|
||||
if (tagCountMap[tag]) {
|
||||
tagCountMap[tag]++;
|
||||
} else {
|
||||
tagCountMap[tag] = 1;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const tags = await ConversationTag.find({ user }).sort({ position: -1 });
|
||||
|
||||
// Update existing tags and add new tags
|
||||
for (const [tag, count] of Object.entries(tagCountMap)) {
|
||||
const existingTag = tags.find((t) => t.tag === tag);
|
||||
if (existingTag) {
|
||||
existingTag.count = count;
|
||||
await existingTag.save();
|
||||
} else {
|
||||
const newTag = new ConversationTag({ user, tag, count });
|
||||
tags.push(newTag);
|
||||
await newTag.save();
|
||||
}
|
||||
}
|
||||
|
||||
// Set count to 0 for tags that are not in the grouped tags
|
||||
for (const tag of tags) {
|
||||
if (!tagCountMap[tag.tag]) {
|
||||
tag.count = 0;
|
||||
await tag.save();
|
||||
}
|
||||
}
|
||||
|
||||
// Sort tags by position in descending order
|
||||
tags.sort((a, b) => a.position - b.position);
|
||||
|
||||
// Move the tag with name "saved" to the first position
|
||||
const savedTagIndex = tags.findIndex((tag) => tag.tag === SAVED_TAG);
|
||||
if (savedTagIndex !== -1) {
|
||||
const [savedTag] = tags.splice(savedTagIndex, 1);
|
||||
tags.unshift(savedTag);
|
||||
}
|
||||
|
||||
// Reassign positions starting from 0
|
||||
tags.forEach((tag, index) => {
|
||||
tag.position = index;
|
||||
tag.save();
|
||||
});
|
||||
return tags;
|
||||
} catch (error) {
|
||||
logger.error('[rearrangeTags] Error rearranging tags', error);
|
||||
return { message: 'Error rearranging tags' };
|
||||
}
|
||||
},
|
||||
};
|
||||
@@ -1,204 +1,342 @@
|
||||
const { z } = require('zod');
|
||||
const Message = require('./schema/messageSchema');
|
||||
const logger = require('~/config/winston');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const idSchema = z.string().uuid();
|
||||
|
||||
/**
|
||||
* Saves a message in the database.
|
||||
*
|
||||
* @async
|
||||
* @function saveMessage
|
||||
* @param {Express.Request} req - The request object containing user information.
|
||||
* @param {Object} params - The message data object.
|
||||
* @param {string} params.endpoint - The endpoint where the message originated.
|
||||
* @param {string} params.iconURL - The URL of the sender's icon.
|
||||
* @param {string} params.messageId - The unique identifier for the message.
|
||||
* @param {string} params.newMessageId - The new unique identifier for the message (if applicable).
|
||||
* @param {string} params.conversationId - The identifier of the conversation.
|
||||
* @param {string} [params.parentMessageId] - The identifier of the parent message, if any.
|
||||
* @param {string} params.sender - The identifier of the sender.
|
||||
* @param {string} params.text - The text content of the message.
|
||||
* @param {boolean} params.isCreatedByUser - Indicates if the message was created by the user.
|
||||
* @param {string} [params.error] - Any error associated with the message.
|
||||
* @param {boolean} [params.unfinished] - Indicates if the message is unfinished.
|
||||
* @param {Object[]} [params.files] - An array of files associated with the message.
|
||||
* @param {boolean} [params.isEdited] - Indicates if the message was edited.
|
||||
* @param {string} [params.finish_reason] - Reason for finishing the message.
|
||||
* @param {number} [params.tokenCount] - The number of tokens in the message.
|
||||
* @param {string} [params.plugin] - Plugin associated with the message.
|
||||
* @param {string[]} [params.plugins] - An array of plugins associated with the message.
|
||||
* @param {string} [params.model] - The model used to generate the message.
|
||||
* @param {Object} [metadata] - Additional metadata for this operation
|
||||
* @param {string} [metadata.context] - The context of the operation
|
||||
* @returns {Promise<TMessage>} The updated or newly inserted message document.
|
||||
* @throws {Error} If there is an error in saving the message.
|
||||
*/
|
||||
async function saveMessage(req, params, metadata) {
|
||||
try {
|
||||
if (!req || !req.user || !req.user.id) {
|
||||
throw new Error('User not authenticated');
|
||||
}
|
||||
|
||||
const {
|
||||
text,
|
||||
error,
|
||||
model,
|
||||
files,
|
||||
plugin,
|
||||
sender,
|
||||
plugins,
|
||||
iconURL,
|
||||
endpoint,
|
||||
isEdited,
|
||||
messageId,
|
||||
unfinished,
|
||||
tokenCount,
|
||||
newMessageId,
|
||||
finish_reason,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
isCreatedByUser,
|
||||
} = params;
|
||||
|
||||
const validConvoId = idSchema.safeParse(conversationId);
|
||||
if (!validConvoId.success) {
|
||||
logger.warn(`Invalid conversation ID: ${conversationId}`);
|
||||
if (metadata && metadata?.context) {
|
||||
logger.info(`---\`saveMessage\` context: ${metadata.context}`);
|
||||
}
|
||||
|
||||
logger.info(`---Invalid conversation ID Params:
|
||||
|
||||
${JSON.stringify(params, null, 2)}
|
||||
|
||||
`);
|
||||
return;
|
||||
}
|
||||
|
||||
const update = {
|
||||
user: req.user.id,
|
||||
iconURL,
|
||||
endpoint,
|
||||
messageId: newMessageId || messageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
sender,
|
||||
text,
|
||||
isCreatedByUser,
|
||||
isEdited,
|
||||
finish_reason,
|
||||
error,
|
||||
unfinished,
|
||||
tokenCount,
|
||||
plugin,
|
||||
plugins,
|
||||
model,
|
||||
};
|
||||
|
||||
if (files) {
|
||||
update.files = files;
|
||||
}
|
||||
|
||||
const message = await Message.findOneAndUpdate({ messageId, user: req.user.id }, update, {
|
||||
upsert: true,
|
||||
new: true,
|
||||
});
|
||||
|
||||
return message.toObject();
|
||||
} catch (err) {
|
||||
logger.error('Error saving message:', err);
|
||||
if (metadata && metadata?.context) {
|
||||
logger.info(`---\`saveMessage\` context: ${metadata.context}`);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves multiple messages in the database in bulk.
|
||||
*
|
||||
* @async
|
||||
* @function bulkSaveMessages
|
||||
* @param {Object[]} messages - An array of message objects to save.
|
||||
* @returns {Promise<Object>} The result of the bulk write operation.
|
||||
* @throws {Error} If there is an error in saving messages in bulk.
|
||||
*/
|
||||
async function bulkSaveMessages(messages) {
|
||||
try {
|
||||
const bulkOps = messages.map((message) => ({
|
||||
updateOne: {
|
||||
filter: { messageId: message.messageId },
|
||||
update: message,
|
||||
upsert: true,
|
||||
},
|
||||
}));
|
||||
|
||||
const result = await Message.bulkWrite(bulkOps);
|
||||
return result;
|
||||
} catch (err) {
|
||||
logger.error('Error saving messages in bulk:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Records a message in the database.
|
||||
*
|
||||
* @async
|
||||
* @function recordMessage
|
||||
* @param {Object} params - The message data object.
|
||||
* @param {string} params.user - The identifier of the user.
|
||||
* @param {string} params.endpoint - The endpoint where the message originated.
|
||||
* @param {string} params.messageId - The unique identifier for the message.
|
||||
* @param {string} params.conversationId - The identifier of the conversation.
|
||||
* @param {string} [params.parentMessageId] - The identifier of the parent message, if any.
|
||||
* @param {Partial<TMessage>} rest - Any additional properties from the TMessage typedef not explicitly listed.
|
||||
* @returns {Promise<Object>} The updated or newly inserted message document.
|
||||
* @throws {Error} If there is an error in saving the message.
|
||||
*/
|
||||
async function recordMessage({
|
||||
user,
|
||||
endpoint,
|
||||
messageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
...rest
|
||||
}) {
|
||||
try {
|
||||
// No parsing of convoId as may use threadId
|
||||
const message = {
|
||||
user,
|
||||
endpoint,
|
||||
messageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
...rest,
|
||||
};
|
||||
|
||||
return await Message.findOneAndUpdate({ user, messageId }, message, {
|
||||
upsert: true,
|
||||
new: true,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error('Error recording message:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the text of a message.
|
||||
*
|
||||
* @async
|
||||
* @function updateMessageText
|
||||
* @param {Object} params - The update data object.
|
||||
* @param {Object} req - The request object.
|
||||
* @param {string} params.messageId - The unique identifier for the message.
|
||||
* @param {string} params.text - The new text content of the message.
|
||||
* @returns {Promise<void>}
|
||||
* @throws {Error} If there is an error in updating the message text.
|
||||
*/
|
||||
async function updateMessageText(req, { messageId, text }) {
|
||||
try {
|
||||
await Message.updateOne({ messageId, user: req.user.id }, { text });
|
||||
} catch (err) {
|
||||
logger.error('Error updating message text:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a message.
|
||||
*
|
||||
* @async
|
||||
* @function updateMessage
|
||||
* @param {Object} message - The message object containing update data.
|
||||
* @param {Object} req - The request object.
|
||||
* @param {string} message.messageId - The unique identifier for the message.
|
||||
* @param {string} [message.text] - The new text content of the message.
|
||||
* @param {Object[]} [message.files] - The files associated with the message.
|
||||
* @param {boolean} [message.isCreatedByUser] - Indicates if the message was created by the user.
|
||||
* @param {string} [message.sender] - The identifier of the sender.
|
||||
* @param {number} [message.tokenCount] - The number of tokens in the message.
|
||||
* @param {Object} [metadata] - The operation metadata
|
||||
* @param {string} [metadata.context] - The operation metadata
|
||||
* @returns {Promise<TMessage>} The updated message document.
|
||||
* @throws {Error} If there is an error in updating the message or if the message is not found.
|
||||
*/
|
||||
async function updateMessage(req, message, metadata) {
|
||||
try {
|
||||
const { messageId, ...update } = message;
|
||||
update.isEdited = true;
|
||||
const updatedMessage = await Message.findOneAndUpdate(
|
||||
{ messageId, user: req.user.id },
|
||||
update,
|
||||
{
|
||||
new: true,
|
||||
},
|
||||
);
|
||||
|
||||
if (!updatedMessage) {
|
||||
throw new Error('Message not found or user not authorized.');
|
||||
}
|
||||
|
||||
return {
|
||||
messageId: updatedMessage.messageId,
|
||||
conversationId: updatedMessage.conversationId,
|
||||
parentMessageId: updatedMessage.parentMessageId,
|
||||
sender: updatedMessage.sender,
|
||||
text: updatedMessage.text,
|
||||
isCreatedByUser: updatedMessage.isCreatedByUser,
|
||||
tokenCount: updatedMessage.tokenCount,
|
||||
isEdited: true,
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error('Error updating message:', err);
|
||||
if (metadata && metadata?.context) {
|
||||
logger.info(`---\`updateMessage\` context: ${metadata.context}`);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes messages in a conversation since a specific message.
|
||||
*
|
||||
* @async
|
||||
* @function deleteMessagesSince
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {Object} req - The request object.
|
||||
* @param {string} params.messageId - The unique identifier for the message.
|
||||
* @param {string} params.conversationId - The identifier of the conversation.
|
||||
* @returns {Promise<Number>} The number of deleted messages.
|
||||
* @throws {Error} If there is an error in deleting messages.
|
||||
*/
|
||||
async function deleteMessagesSince(req, { messageId, conversationId }) {
|
||||
try {
|
||||
const message = await Message.findOne({ messageId, user: req.user.id }).lean();
|
||||
|
||||
if (message) {
|
||||
const query = Message.find({ conversationId, user: req.user.id });
|
||||
return await query.deleteMany({
|
||||
createdAt: { $gt: message.createdAt },
|
||||
});
|
||||
}
|
||||
return undefined;
|
||||
} catch (err) {
|
||||
logger.error('Error deleting messages:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves messages from the database.
|
||||
* @async
|
||||
* @function getMessages
|
||||
* @param {Record<string, unknown>} filter - The filter criteria.
|
||||
* @param {string | undefined} [select] - The fields to select.
|
||||
* @returns {Promise<TMessage[]>} The messages that match the filter criteria.
|
||||
* @throws {Error} If there is an error in retrieving messages.
|
||||
*/
|
||||
async function getMessages(filter, select) {
|
||||
try {
|
||||
if (select) {
|
||||
return await Message.find(filter).select(select).sort({ createdAt: 1 }).lean();
|
||||
}
|
||||
|
||||
return await Message.find(filter).sort({ createdAt: 1 }).lean();
|
||||
} catch (err) {
|
||||
logger.error('Error getting messages:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes messages from the database.
|
||||
*
|
||||
* @async
|
||||
* @function deleteMessages
|
||||
* @param {Object} filter - The filter criteria to find messages to delete.
|
||||
* @returns {Promise<Number>} The number of deleted messages.
|
||||
* @throws {Error} If there is an error in deleting messages.
|
||||
*/
|
||||
async function deleteMessages(filter) {
|
||||
try {
|
||||
return await Message.deleteMany(filter);
|
||||
} catch (err) {
|
||||
logger.error('Error deleting messages:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Message,
|
||||
|
||||
async saveMessage({
|
||||
user,
|
||||
endpoint,
|
||||
iconURL,
|
||||
messageId,
|
||||
newMessageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
sender,
|
||||
text,
|
||||
isCreatedByUser,
|
||||
error,
|
||||
unfinished,
|
||||
files,
|
||||
isEdited,
|
||||
finish_reason,
|
||||
tokenCount,
|
||||
plugin,
|
||||
plugins,
|
||||
model,
|
||||
}) {
|
||||
try {
|
||||
const validConvoId = idSchema.safeParse(conversationId);
|
||||
if (!validConvoId.success) {
|
||||
return;
|
||||
}
|
||||
|
||||
const update = {
|
||||
user,
|
||||
iconURL,
|
||||
endpoint,
|
||||
messageId: newMessageId || messageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
sender,
|
||||
text,
|
||||
isCreatedByUser,
|
||||
isEdited,
|
||||
finish_reason,
|
||||
error,
|
||||
unfinished,
|
||||
tokenCount,
|
||||
plugin,
|
||||
plugins,
|
||||
model,
|
||||
};
|
||||
|
||||
if (files) {
|
||||
update.files = files;
|
||||
}
|
||||
|
||||
const message = await Message.findOneAndUpdate({ messageId }, update, {
|
||||
upsert: true,
|
||||
new: true,
|
||||
});
|
||||
|
||||
return message.toObject();
|
||||
} catch (err) {
|
||||
logger.error('Error saving message:', err);
|
||||
throw new Error('Failed to save message.');
|
||||
}
|
||||
},
|
||||
|
||||
async bulkSaveMessages(messages) {
|
||||
try {
|
||||
const bulkOps = messages.map((message) => ({
|
||||
updateOne: {
|
||||
filter: { messageId: message.messageId },
|
||||
update: message,
|
||||
upsert: true,
|
||||
},
|
||||
}));
|
||||
|
||||
const result = await Message.bulkWrite(bulkOps);
|
||||
return result;
|
||||
} catch (err) {
|
||||
logger.error('Error saving messages in bulk:', err);
|
||||
throw new Error('Failed to save messages in bulk.');
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Records a message in the database.
|
||||
*
|
||||
* @async
|
||||
* @function recordMessage
|
||||
* @param {Object} params - The message data object.
|
||||
* @param {string} params.user - The identifier of the user.
|
||||
* @param {string} params.endpoint - The endpoint where the message originated.
|
||||
* @param {string} params.messageId - The unique identifier for the message.
|
||||
* @param {string} params.conversationId - The identifier of the conversation.
|
||||
* @param {string} [params.parentMessageId] - The identifier of the parent message, if any.
|
||||
* @param {Partial<TMessage>} rest - Any additional properties from the TMessage typedef not explicitly listed.
|
||||
* @returns {Promise<Object>} The updated or newly inserted message document.
|
||||
* @throws {Error} If there is an error in saving the message.
|
||||
*/
|
||||
async recordMessage({ user, endpoint, messageId, conversationId, parentMessageId, ...rest }) {
|
||||
try {
|
||||
// No parsing of convoId as may use threadId
|
||||
const message = {
|
||||
user,
|
||||
endpoint,
|
||||
messageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
...rest,
|
||||
};
|
||||
|
||||
return await Message.findOneAndUpdate({ user, messageId }, message, {
|
||||
upsert: true,
|
||||
new: true,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error('Error saving message:', err);
|
||||
throw new Error('Failed to save message.');
|
||||
}
|
||||
},
|
||||
async updateMessageText({ messageId, text }) {
|
||||
try {
|
||||
await Message.updateOne({ messageId }, { text });
|
||||
} catch (err) {
|
||||
logger.error('Error updating message text:', err);
|
||||
throw new Error('Failed to update message text.');
|
||||
}
|
||||
},
|
||||
async updateMessage(message) {
|
||||
try {
|
||||
const { messageId, ...update } = message;
|
||||
update.isEdited = true;
|
||||
const updatedMessage = await Message.findOneAndUpdate({ messageId }, update, {
|
||||
new: true,
|
||||
});
|
||||
|
||||
if (!updatedMessage) {
|
||||
throw new Error('Message not found.');
|
||||
}
|
||||
|
||||
return {
|
||||
messageId: updatedMessage.messageId,
|
||||
conversationId: updatedMessage.conversationId,
|
||||
parentMessageId: updatedMessage.parentMessageId,
|
||||
sender: updatedMessage.sender,
|
||||
text: updatedMessage.text,
|
||||
isCreatedByUser: updatedMessage.isCreatedByUser,
|
||||
tokenCount: updatedMessage.tokenCount,
|
||||
isEdited: true,
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error('Error updating message:', err);
|
||||
throw new Error('Failed to update message.');
|
||||
}
|
||||
},
|
||||
async deleteMessagesSince({ messageId, conversationId }) {
|
||||
try {
|
||||
const message = await Message.findOne({ messageId }).lean();
|
||||
|
||||
if (message) {
|
||||
return await Message.find({ conversationId }).deleteMany({
|
||||
createdAt: { $gt: message.createdAt },
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error deleting messages:', err);
|
||||
throw new Error('Failed to delete messages.');
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieves messages from the database.
|
||||
* @param {Record<string, unknown>} filter
|
||||
* @param {string | undefined} [select]
|
||||
* @returns
|
||||
*/
|
||||
async getMessages(filter, select) {
|
||||
try {
|
||||
if (select) {
|
||||
return await Message.find(filter).select(select).sort({ createdAt: 1 }).lean();
|
||||
}
|
||||
|
||||
return await Message.find(filter).sort({ createdAt: 1 }).lean();
|
||||
} catch (err) {
|
||||
logger.error('Error getting messages:', err);
|
||||
throw new Error('Failed to get messages.');
|
||||
}
|
||||
},
|
||||
|
||||
async deleteMessages(filter) {
|
||||
try {
|
||||
return await Message.deleteMany(filter);
|
||||
} catch (err) {
|
||||
logger.error('Error deleting messages:', err);
|
||||
throw new Error('Failed to delete messages.');
|
||||
}
|
||||
},
|
||||
saveMessage,
|
||||
bulkSaveMessages,
|
||||
recordMessage,
|
||||
updateMessageText,
|
||||
updateMessage,
|
||||
deleteMessagesSince,
|
||||
getMessages,
|
||||
deleteMessages,
|
||||
};
|
||||
|
||||
239
api/models/Message.spec.js
Normal file
239
api/models/Message.spec.js
Normal file
@@ -0,0 +1,239 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
|
||||
jest.mock('mongoose');
|
||||
|
||||
const mockFindQuery = {
|
||||
select: jest.fn().mockReturnThis(),
|
||||
sort: jest.fn().mockReturnThis(),
|
||||
lean: jest.fn().mockReturnThis(),
|
||||
deleteMany: jest.fn().mockResolvedValue({ deletedCount: 1 }),
|
||||
};
|
||||
|
||||
const mockSchema = {
|
||||
findOneAndUpdate: jest.fn(),
|
||||
updateOne: jest.fn(),
|
||||
findOne: jest.fn(() => ({
|
||||
lean: jest.fn(),
|
||||
})),
|
||||
find: jest.fn(() => mockFindQuery),
|
||||
deleteMany: jest.fn(),
|
||||
};
|
||||
|
||||
mongoose.model.mockReturnValue(mockSchema);
|
||||
|
||||
jest.mock('~/models/schema/messageSchema', () => mockSchema);
|
||||
|
||||
jest.mock('~/config/winston', () => ({
|
||||
error: jest.fn(),
|
||||
}));
|
||||
|
||||
const {
|
||||
saveMessage,
|
||||
getMessages,
|
||||
updateMessage,
|
||||
deleteMessages,
|
||||
updateMessageText,
|
||||
deleteMessagesSince,
|
||||
} = require('~/models/Message');
|
||||
|
||||
describe('Message Operations', () => {
|
||||
let mockReq;
|
||||
let mockMessage;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
mockReq = {
|
||||
user: { id: 'user123' },
|
||||
};
|
||||
|
||||
mockMessage = {
|
||||
messageId: 'msg123',
|
||||
conversationId: uuidv4(),
|
||||
text: 'Hello, world!',
|
||||
user: 'user123',
|
||||
};
|
||||
|
||||
mockSchema.findOneAndUpdate.mockResolvedValue({
|
||||
toObject: () => mockMessage,
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveMessage', () => {
|
||||
it('should save a message for an authenticated user', async () => {
|
||||
const result = await saveMessage(mockReq, mockMessage);
|
||||
expect(result).toEqual(mockMessage);
|
||||
expect(mockSchema.findOneAndUpdate).toHaveBeenCalledWith(
|
||||
{ messageId: 'msg123', user: 'user123' },
|
||||
expect.objectContaining({ user: 'user123' }),
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error for unauthenticated user', async () => {
|
||||
mockReq.user = null;
|
||||
await expect(saveMessage(mockReq, mockMessage)).rejects.toThrow('User not authenticated');
|
||||
});
|
||||
|
||||
it('should throw an error for invalid conversation ID', async () => {
|
||||
mockMessage.conversationId = 'invalid-id';
|
||||
await expect(saveMessage(mockReq, mockMessage)).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateMessageText', () => {
|
||||
it('should update message text for the authenticated user', async () => {
|
||||
await updateMessageText(mockReq, { messageId: 'msg123', text: 'Updated text' });
|
||||
expect(mockSchema.updateOne).toHaveBeenCalledWith(
|
||||
{ messageId: 'msg123', user: 'user123' },
|
||||
{ text: 'Updated text' },
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateMessage', () => {
|
||||
it('should update a message for the authenticated user', async () => {
|
||||
mockSchema.findOneAndUpdate.mockResolvedValue(mockMessage);
|
||||
const result = await updateMessage(mockReq, { messageId: 'msg123', text: 'Updated text' });
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
messageId: 'msg123',
|
||||
text: 'Hello, world!',
|
||||
isEdited: true,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if message is not found', async () => {
|
||||
mockSchema.findOneAndUpdate.mockResolvedValue(null);
|
||||
await expect(
|
||||
updateMessage(mockReq, { messageId: 'nonexistent', text: 'Test' }),
|
||||
).rejects.toThrow('Message not found or user not authorized.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteMessagesSince', () => {
|
||||
it('should delete messages only for the authenticated user', async () => {
|
||||
mockSchema.findOne().lean.mockResolvedValueOnce({ createdAt: new Date() });
|
||||
mockFindQuery.deleteMany.mockResolvedValueOnce({ deletedCount: 1 });
|
||||
const result = await deleteMessagesSince(mockReq, {
|
||||
messageId: 'msg123',
|
||||
conversationId: 'convo123',
|
||||
});
|
||||
expect(mockSchema.findOne).toHaveBeenCalledWith({ messageId: 'msg123', user: 'user123' });
|
||||
expect(mockSchema.find).not.toHaveBeenCalled();
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return undefined if no message is found', async () => {
|
||||
mockSchema.findOne().lean.mockResolvedValueOnce(null);
|
||||
const result = await deleteMessagesSince(mockReq, {
|
||||
messageId: 'nonexistent',
|
||||
conversationId: 'convo123',
|
||||
});
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMessages', () => {
|
||||
it('should retrieve messages with the correct filter', async () => {
|
||||
const filter = { conversationId: 'convo123' };
|
||||
await getMessages(filter);
|
||||
expect(mockSchema.find).toHaveBeenCalledWith(filter);
|
||||
expect(mockFindQuery.sort).toHaveBeenCalledWith({ createdAt: 1 });
|
||||
expect(mockFindQuery.lean).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteMessages', () => {
|
||||
it('should delete messages with the correct filter', async () => {
|
||||
await deleteMessages({ user: 'user123' });
|
||||
expect(mockSchema.deleteMany).toHaveBeenCalledWith({ user: 'user123' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('Conversation Hijacking Prevention', () => {
|
||||
it('should not allow editing a message in another user\'s conversation', async () => {
|
||||
const attackerReq = { user: { id: 'attacker123' } };
|
||||
const victimConversationId = 'victim-convo-123';
|
||||
const victimMessageId = 'victim-msg-123';
|
||||
|
||||
mockSchema.findOneAndUpdate.mockResolvedValue(null);
|
||||
|
||||
await expect(
|
||||
updateMessage(attackerReq, {
|
||||
messageId: victimMessageId,
|
||||
conversationId: victimConversationId,
|
||||
text: 'Hacked message',
|
||||
}),
|
||||
).rejects.toThrow('Message not found or user not authorized.');
|
||||
|
||||
expect(mockSchema.findOneAndUpdate).toHaveBeenCalledWith(
|
||||
{ messageId: victimMessageId, user: 'attacker123' },
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not allow deleting messages from another user\'s conversation', async () => {
|
||||
const attackerReq = { user: { id: 'attacker123' } };
|
||||
const victimConversationId = 'victim-convo-123';
|
||||
const victimMessageId = 'victim-msg-123';
|
||||
|
||||
mockSchema.findOne().lean.mockResolvedValueOnce(null); // Simulating message not found for this user
|
||||
const result = await deleteMessagesSince(attackerReq, {
|
||||
messageId: victimMessageId,
|
||||
conversationId: victimConversationId,
|
||||
});
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(mockSchema.findOne).toHaveBeenCalledWith({
|
||||
messageId: victimMessageId,
|
||||
user: 'attacker123',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not allow inserting a new message into another user\'s conversation', async () => {
|
||||
const attackerReq = { user: { id: 'attacker123' } };
|
||||
const victimConversationId = uuidv4(); // Use a valid UUID
|
||||
|
||||
await expect(
|
||||
saveMessage(attackerReq, {
|
||||
conversationId: victimConversationId,
|
||||
text: 'Inserted malicious message',
|
||||
messageId: 'new-msg-123',
|
||||
}),
|
||||
).resolves.not.toThrow(); // It should not throw an error
|
||||
|
||||
// Check that the message was saved with the attacker's user ID
|
||||
expect(mockSchema.findOneAndUpdate).toHaveBeenCalledWith(
|
||||
{ messageId: 'new-msg-123', user: 'attacker123' },
|
||||
expect.objectContaining({
|
||||
user: 'attacker123',
|
||||
conversationId: victimConversationId,
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow retrieving messages from any conversation', async () => {
|
||||
const victimConversationId = 'victim-convo-123';
|
||||
|
||||
await getMessages({ conversationId: victimConversationId });
|
||||
|
||||
expect(mockSchema.find).toHaveBeenCalledWith({
|
||||
conversationId: victimConversationId,
|
||||
});
|
||||
|
||||
mockSchema.find.mockReturnValueOnce({
|
||||
select: jest.fn().mockReturnThis(),
|
||||
sort: jest.fn().mockReturnThis(),
|
||||
lean: jest.fn().mockResolvedValue([{ text: 'Test message' }]),
|
||||
});
|
||||
|
||||
const result = await getMessages({ conversationId: victimConversationId });
|
||||
expect(result).toEqual([{ text: 'Test message' }]);
|
||||
});
|
||||
});
|
||||
});
|
||||
31
api/models/schema/conversationTagSchema.js
Normal file
31
api/models/schema/conversationTagSchema.js
Normal file
@@ -0,0 +1,31 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const conversationTagSchema = mongoose.Schema(
|
||||
{
|
||||
tag: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
description: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
count: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
position: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
conversationTagSchema.index({ tag: 1, user: 1 }, { unique: true });
|
||||
|
||||
module.exports = mongoose.model('ConversationTag', conversationTagSchema);
|
||||
@@ -42,6 +42,11 @@ const convoSchema = mongoose.Schema(
|
||||
invocationId: {
|
||||
type: Number,
|
||||
},
|
||||
tags: {
|
||||
type: [String],
|
||||
default: [],
|
||||
meiliIndex: true,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
@@ -103,6 +103,10 @@ const conversationPreset = {
|
||||
spec: {
|
||||
type: String,
|
||||
},
|
||||
tags: {
|
||||
type: [String],
|
||||
default: [],
|
||||
},
|
||||
tools: { type: [{ type: String }], default: undefined },
|
||||
maxContextTokens: {
|
||||
type: Number,
|
||||
|
||||
@@ -129,6 +129,7 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
}
|
||||
|
||||
messageSchema.index({ createdAt: 1 });
|
||||
messageSchema.index({ messageId: 1, user: 1 }, { unique: true });
|
||||
|
||||
const Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { SystemRoles } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoSession
|
||||
@@ -79,7 +78,7 @@ const userSchema = mongoose.Schema(
|
||||
},
|
||||
role: {
|
||||
type: String,
|
||||
default: SystemRoles.USER,
|
||||
default: 'USER',
|
||||
},
|
||||
googleId: {
|
||||
type: String,
|
||||
|
||||
@@ -12,6 +12,7 @@ const tokenValues = {
|
||||
'4k': { prompt: 1.5, completion: 2 },
|
||||
'16k': { prompt: 3, completion: 4 },
|
||||
'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 },
|
||||
'gpt-4o-mini': { prompt: 0.15, completion: 0.6 },
|
||||
'gpt-4o': { prompt: 5, completion: 15 },
|
||||
'gpt-4-1106': { prompt: 10, completion: 30 },
|
||||
'gpt-3.5-turbo-0125': { prompt: 0.5, completion: 1.5 },
|
||||
@@ -54,6 +55,8 @@ const getValueKey = (model, endpoint) => {
|
||||
return 'gpt-3.5-turbo-1106';
|
||||
} else if (modelName.includes('gpt-3.5')) {
|
||||
return '4k';
|
||||
} else if (modelName.includes('gpt-4o-mini')) {
|
||||
return 'gpt-4o-mini';
|
||||
} else if (modelName.includes('gpt-4o')) {
|
||||
return 'gpt-4o';
|
||||
} else if (modelName.includes('gpt-4-vision')) {
|
||||
|
||||
@@ -49,6 +49,12 @@ describe('getValueKey', () => {
|
||||
expect(getValueKey('gpt-4o-0125')).toBe('gpt-4o');
|
||||
});
|
||||
|
||||
it('should return "gpt-4o-mini" for model type of "gpt-4o-mini"', () => {
|
||||
expect(getValueKey('gpt-4o-mini-2024-07-18')).toBe('gpt-4o-mini');
|
||||
expect(getValueKey('openai/gpt-4o-mini')).toBe('gpt-4o-mini');
|
||||
expect(getValueKey('gpt-4o-mini-0718')).toBe('gpt-4o-mini');
|
||||
});
|
||||
|
||||
it('should return "claude-3-5-sonnet" for model type of "claude-3-5-sonnet-"', () => {
|
||||
expect(getValueKey('claude-3-5-sonnet-20240620')).toBe('claude-3-5-sonnet');
|
||||
expect(getValueKey('anthropic/claude-3-5-sonnet')).toBe('claude-3-5-sonnet');
|
||||
@@ -109,6 +115,19 @@ describe('getMultiplier', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-4o-mini', () => {
|
||||
const valueKey = getValueKey('gpt-4o-mini-2024-07-18');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-4o-mini'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-4o-mini'].completion,
|
||||
);
|
||||
expect(getMultiplier({ valueKey, tokenType: 'completion' })).not.toBe(
|
||||
tokenValues['gpt-4-1106'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should derive the valueKey from the model if not provided for new models', () => {
|
||||
expect(
|
||||
getMultiplier({ tokenType: 'prompt', model: 'gpt-3.5-turbo-1106-some-other-info' }),
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
const throttle = require('lodash/throttle');
|
||||
const { getResponseSender, Constants, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getResponseSender, Constants, CacheKeys, Time } = require('librechat-data-provider');
|
||||
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { saveMessage } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -51,11 +52,13 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
|
||||
try {
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
const unfinished = endpointOption.endpoint === EModelEndpoint.google ? false : true;
|
||||
const messageCache = getLogStores(CacheKeys.MESSAGES);
|
||||
const { onProgress: progressCallback, getPartialText } = createOnProgress({
|
||||
onProgress: throttle(
|
||||
({ text: partialText }) => {
|
||||
saveMessage({
|
||||
/*
|
||||
const unfinished = endpointOption.endpoint === EModelEndpoint.google ? false : true;
|
||||
messageCache.set(responseMessageId, {
|
||||
messageId: responseMessageId,
|
||||
sender,
|
||||
conversationId,
|
||||
@@ -65,7 +68,10 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
unfinished,
|
||||
error: false,
|
||||
user,
|
||||
});
|
||||
}, Time.FIVE_MINUTES);
|
||||
*/
|
||||
|
||||
messageCache.set(responseMessageId, partialText, Time.FIVE_MINUTES);
|
||||
},
|
||||
3000,
|
||||
{ trailing: false },
|
||||
@@ -144,11 +150,17 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
});
|
||||
res.end();
|
||||
|
||||
await saveMessage({ ...response, user });
|
||||
await saveMessage(
|
||||
req,
|
||||
{ ...response, user },
|
||||
{ context: 'api/server/controllers/AskController.js - response end' },
|
||||
);
|
||||
}
|
||||
|
||||
if (!client.skipSaveUserMessage) {
|
||||
await saveMessage(userMessage);
|
||||
await saveMessage(req, userMessage, {
|
||||
context: 'api/server/controllers/AskController.js - don\'t skip saving user message',
|
||||
});
|
||||
}
|
||||
|
||||
if (addTitle && parentMessageId === Constants.NO_PARENT && newConvo) {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
const throttle = require('lodash/throttle');
|
||||
const { getResponseSender, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getResponseSender, CacheKeys, Time } = require('librechat-data-provider');
|
||||
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { saveMessage } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -51,12 +52,14 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const unfinished = endpointOption.endpoint === EModelEndpoint.google ? false : true;
|
||||
const messageCache = getLogStores(CacheKeys.MESSAGES);
|
||||
const { onProgress: progressCallback, getPartialText } = createOnProgress({
|
||||
generation,
|
||||
onProgress: throttle(
|
||||
({ text: partialText }) => {
|
||||
saveMessage({
|
||||
/*
|
||||
const unfinished = endpointOption.endpoint === EModelEndpoint.google ? false : true;
|
||||
{
|
||||
messageId: responseMessageId,
|
||||
sender,
|
||||
conversationId,
|
||||
@@ -67,7 +70,8 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
isEdited: true,
|
||||
error: false,
|
||||
user,
|
||||
});
|
||||
} */
|
||||
messageCache.set(responseMessageId, partialText, Time.FIVE_MINUTES);
|
||||
},
|
||||
3000,
|
||||
{ trailing: false },
|
||||
@@ -141,7 +145,11 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
});
|
||||
res.end();
|
||||
|
||||
await saveMessage({ ...response, user });
|
||||
await saveMessage(
|
||||
req,
|
||||
{ ...response, user },
|
||||
{ context: 'api/server/controllers/EditController.js - response end' },
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
const partialText = getPartialText();
|
||||
|
||||
@@ -120,21 +120,22 @@ const chatV1 = async (req, res) => {
|
||||
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
||||
: ''
|
||||
}`;
|
||||
return sendResponse(res, messageData, errorMessage);
|
||||
return sendResponse(req, res, messageData, errorMessage);
|
||||
} else if (error?.message?.includes('string too long')) {
|
||||
return sendResponse(
|
||||
req,
|
||||
res,
|
||||
messageData,
|
||||
'Message too long. The Assistants API has a limit of 32,768 characters per message. Please shorten it and try again.',
|
||||
);
|
||||
} else if (error?.message?.includes(ViolationTypes.TOKEN_BALANCE)) {
|
||||
return sendResponse(res, messageData, error.message);
|
||||
return sendResponse(req, res, messageData, error.message);
|
||||
} else {
|
||||
logger.error('[/assistants/chat/]', error);
|
||||
}
|
||||
|
||||
if (!openai || !thread_id || !run_id) {
|
||||
return sendResponse(res, messageData, defaultErrorMessage);
|
||||
return sendResponse(req, res, messageData, defaultErrorMessage);
|
||||
}
|
||||
|
||||
await sleep(2000);
|
||||
@@ -221,10 +222,10 @@ const chatV1 = async (req, res) => {
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('[/assistants/chat/] Error finalizing error process', error);
|
||||
return sendResponse(res, messageData, 'The Assistant run failed');
|
||||
return sendResponse(req, res, messageData, 'The Assistant run failed');
|
||||
}
|
||||
|
||||
return sendResponse(res, finalEvent);
|
||||
return sendResponse(req, res, finalEvent);
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -382,6 +383,9 @@ const chatV1 = async (req, res) => {
|
||||
return files;
|
||||
};
|
||||
|
||||
/** @type {Promise<Run>|undefined} */
|
||||
let userMessagePromise;
|
||||
|
||||
const initializeThread = async () => {
|
||||
/** @type {[ undefined | MongoFile[]]}*/
|
||||
const [processedFiles] = await Promise.all([addVisionPrompt(), getRequestFileIds()]);
|
||||
@@ -438,7 +442,7 @@ const chatV1 = async (req, res) => {
|
||||
previousMessages.push(requestMessage);
|
||||
|
||||
/* asynchronous */
|
||||
saveUserMessage({ ...requestMessage, model });
|
||||
userMessagePromise = saveUserMessage(req, { ...requestMessage, model });
|
||||
|
||||
conversation = {
|
||||
conversationId,
|
||||
@@ -582,7 +586,10 @@ const chatV1 = async (req, res) => {
|
||||
});
|
||||
res.end();
|
||||
|
||||
await saveAssistantMessage({ ...responseMessage, model });
|
||||
if (userMessagePromise) {
|
||||
await userMessagePromise;
|
||||
}
|
||||
await saveAssistantMessage(req, { ...responseMessage, model });
|
||||
|
||||
if (parentMessageId === Constants.NO_PARENT && !_thread_id) {
|
||||
addTitle(req, {
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
const { v4 } = require('uuid');
|
||||
const {
|
||||
Time,
|
||||
Constants,
|
||||
RunStatus,
|
||||
CacheKeys,
|
||||
ContentTypes,
|
||||
ToolCallTypes,
|
||||
EModelEndpoint,
|
||||
ViolationTypes,
|
||||
retrievalMimeTypes,
|
||||
AssistantStreamEvents,
|
||||
} = require('librechat-data-provider');
|
||||
@@ -14,12 +14,12 @@ const {
|
||||
initThread,
|
||||
recordUsage,
|
||||
saveUserMessage,
|
||||
checkMessageGaps,
|
||||
addThreadMetadata,
|
||||
saveAssistantMessage,
|
||||
} = require('~/server/services/Threads');
|
||||
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
|
||||
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
|
||||
const { sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
|
||||
const { createErrorHandler } = require('~/server/controllers/assistants/errors');
|
||||
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||
@@ -44,7 +44,7 @@ const ten_minutes = 1000 * 60 * 10;
|
||||
const chatV2 = async (req, res) => {
|
||||
logger.debug('[/assistants/chat/] req.body', req.body);
|
||||
|
||||
/** @type {{ files: MongoFile[]}} */
|
||||
/** @type {{files: MongoFile[]}} */
|
||||
const {
|
||||
text,
|
||||
model,
|
||||
@@ -90,139 +90,20 @@ const chatV2 = async (req, res) => {
|
||||
/** @type {Run | undefined} - The completed run, undefined if incomplete */
|
||||
let completedRun;
|
||||
|
||||
const handleError = async (error) => {
|
||||
const defaultErrorMessage =
|
||||
'The Assistant run failed to initialize. Try sending a message in a new conversation.';
|
||||
const messageData = {
|
||||
thread_id,
|
||||
assistant_id,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
sender: 'System',
|
||||
user: req.user.id,
|
||||
shouldSaveMessage: false,
|
||||
messageId: responseMessageId,
|
||||
endpoint,
|
||||
};
|
||||
const getContext = () => ({
|
||||
openai,
|
||||
run_id,
|
||||
endpoint,
|
||||
cacheKey,
|
||||
thread_id,
|
||||
completedRun,
|
||||
assistant_id,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
responseMessageId,
|
||||
});
|
||||
|
||||
if (error.message === 'Run cancelled') {
|
||||
return res.end();
|
||||
} else if (error.message === 'Request closed' && completedRun) {
|
||||
return;
|
||||
} else if (error.message === 'Request closed') {
|
||||
logger.debug('[/assistants/chat/] Request aborted on close');
|
||||
} else if (/Files.*are invalid/.test(error.message)) {
|
||||
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
||||
endpoint === EModelEndpoint.azureAssistants
|
||||
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
||||
: ''
|
||||
}`;
|
||||
return sendResponse(res, messageData, errorMessage);
|
||||
} else if (error?.message?.includes('string too long')) {
|
||||
return sendResponse(
|
||||
res,
|
||||
messageData,
|
||||
'Message too long. The Assistants API has a limit of 32,768 characters per message. Please shorten it and try again.',
|
||||
);
|
||||
} else if (error?.message?.includes(ViolationTypes.TOKEN_BALANCE)) {
|
||||
return sendResponse(res, messageData, error.message);
|
||||
} else {
|
||||
logger.error('[/assistants/chat/]', error);
|
||||
}
|
||||
|
||||
if (!openai || !thread_id || !run_id) {
|
||||
return sendResponse(res, messageData, defaultErrorMessage);
|
||||
}
|
||||
|
||||
await sleep(2000);
|
||||
|
||||
try {
|
||||
const status = await cache.get(cacheKey);
|
||||
if (status === 'cancelled') {
|
||||
logger.debug('[/assistants/chat/] Run already cancelled');
|
||||
return res.end();
|
||||
}
|
||||
await cache.delete(cacheKey);
|
||||
const cancelledRun = await openai.beta.threads.runs.cancel(thread_id, run_id);
|
||||
logger.debug('[/assistants/chat/] Cancelled run:', cancelledRun);
|
||||
} catch (error) {
|
||||
logger.error('[/assistants/chat/] Error cancelling run', error);
|
||||
}
|
||||
|
||||
await sleep(2000);
|
||||
|
||||
let run;
|
||||
try {
|
||||
run = await openai.beta.threads.runs.retrieve(thread_id, run_id);
|
||||
await recordUsage({
|
||||
...run.usage,
|
||||
model: run.model,
|
||||
user: req.user.id,
|
||||
conversationId,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[/assistants/chat/] Error fetching or processing run', error);
|
||||
}
|
||||
|
||||
let finalEvent;
|
||||
try {
|
||||
const runMessages = await checkMessageGaps({
|
||||
openai,
|
||||
run_id,
|
||||
endpoint,
|
||||
thread_id,
|
||||
conversationId,
|
||||
latestMessageId: responseMessageId,
|
||||
});
|
||||
|
||||
const errorContentPart = {
|
||||
text: {
|
||||
value:
|
||||
error?.message ?? 'There was an error processing your request. Please try again later.',
|
||||
},
|
||||
type: ContentTypes.ERROR,
|
||||
};
|
||||
|
||||
if (!Array.isArray(runMessages[runMessages.length - 1]?.content)) {
|
||||
runMessages[runMessages.length - 1].content = [errorContentPart];
|
||||
} else {
|
||||
const contentParts = runMessages[runMessages.length - 1].content;
|
||||
for (let i = 0; i < contentParts.length; i++) {
|
||||
const currentPart = contentParts[i];
|
||||
/** @type {CodeToolCall | RetrievalToolCall | FunctionToolCall | undefined} */
|
||||
const toolCall = currentPart?.[ContentTypes.TOOL_CALL];
|
||||
if (
|
||||
toolCall &&
|
||||
toolCall?.function &&
|
||||
!(toolCall?.function?.output || toolCall?.function?.output?.length)
|
||||
) {
|
||||
contentParts[i] = {
|
||||
...currentPart,
|
||||
[ContentTypes.TOOL_CALL]: {
|
||||
...toolCall,
|
||||
function: {
|
||||
...toolCall.function,
|
||||
output: 'error processing tool',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
runMessages[runMessages.length - 1].content.push(errorContentPart);
|
||||
}
|
||||
|
||||
finalEvent = {
|
||||
final: true,
|
||||
conversation: await getConvo(req.user.id, conversationId),
|
||||
runMessages,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('[/assistants/chat/] Error finalizing error process', error);
|
||||
return sendResponse(res, messageData, 'The Assistant run failed');
|
||||
}
|
||||
|
||||
return sendResponse(res, finalEvent);
|
||||
};
|
||||
const handleError = createErrorHandler({ req, res, getContext });
|
||||
|
||||
try {
|
||||
res.on('close', async () => {
|
||||
@@ -365,6 +246,9 @@ const chatV2 = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
/** @type {Promise<Run>|undefined} */
|
||||
let userMessagePromise;
|
||||
|
||||
const initializeThread = async () => {
|
||||
await getRequestFileIds();
|
||||
|
||||
@@ -407,7 +291,7 @@ const chatV2 = async (req, res) => {
|
||||
previousMessages.push(requestMessage);
|
||||
|
||||
/* asynchronous */
|
||||
saveUserMessage({ ...requestMessage, model });
|
||||
userMessagePromise = saveUserMessage(req, { ...requestMessage, model });
|
||||
|
||||
conversation = {
|
||||
conversationId,
|
||||
@@ -489,6 +373,11 @@ const chatV2 = async (req, res) => {
|
||||
},
|
||||
};
|
||||
|
||||
/** @type {undefined | TAssistantEndpoint} */
|
||||
const config = req.app.locals[endpoint] ?? {};
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const allConfig = req.app.locals.all;
|
||||
|
||||
const streamRunManager = new StreamRunManager({
|
||||
req,
|
||||
res,
|
||||
@@ -498,6 +387,7 @@ const chatV2 = async (req, res) => {
|
||||
attachedFileIds,
|
||||
parentMessageId: userMessageId,
|
||||
responseMessage: openai.responseMessage,
|
||||
streamRate: allConfig?.streamRate ?? config.streamRate,
|
||||
// streamOptions: {
|
||||
|
||||
// },
|
||||
@@ -510,6 +400,16 @@ const chatV2 = async (req, res) => {
|
||||
|
||||
response = streamRunManager;
|
||||
response.text = streamRunManager.intermediateText;
|
||||
|
||||
const messageCache = getLogStores(CacheKeys.MESSAGES);
|
||||
messageCache.set(
|
||||
responseMessageId,
|
||||
{
|
||||
complete: true,
|
||||
text: response.text,
|
||||
},
|
||||
Time.FIVE_MINUTES,
|
||||
);
|
||||
};
|
||||
|
||||
await processRun();
|
||||
@@ -552,7 +452,10 @@ const chatV2 = async (req, res) => {
|
||||
});
|
||||
res.end();
|
||||
|
||||
await saveAssistantMessage({ ...responseMessage, model });
|
||||
if (userMessagePromise) {
|
||||
await userMessagePromise;
|
||||
}
|
||||
await saveAssistantMessage(req, { ...responseMessage, model });
|
||||
|
||||
if (parentMessageId === Constants.NO_PARENT && !_thread_id) {
|
||||
addTitle(req, {
|
||||
|
||||
193
api/server/controllers/assistants/errors.js
Normal file
193
api/server/controllers/assistants/errors.js
Normal file
@@ -0,0 +1,193 @@
|
||||
// errorHandler.js
|
||||
const { sendResponse } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { CacheKeys, ViolationTypes, ContentTypes } = require('librechat-data-provider');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const { recordUsage, checkMessageGaps } = require('~/server/services/Threads');
|
||||
|
||||
/**
|
||||
* @typedef {Object} ErrorHandlerContext
|
||||
* @property {OpenAIClient} openai - The OpenAI client
|
||||
* @property {string} thread_id - The thread ID
|
||||
* @property {string} run_id - The run ID
|
||||
* @property {boolean} completedRun - Whether the run has completed
|
||||
* @property {string} assistant_id - The assistant ID
|
||||
* @property {string} conversationId - The conversation ID
|
||||
* @property {string} parentMessageId - The parent message ID
|
||||
* @property {string} responseMessageId - The response message ID
|
||||
* @property {string} endpoint - The endpoint being used
|
||||
* @property {string} cacheKey - The cache key for the current request
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ErrorHandlerDependencies
|
||||
* @property {Express.Request} req - The Express request object
|
||||
* @property {Express.Response} res - The Express response object
|
||||
* @property {() => ErrorHandlerContext} getContext - Function to get the current context
|
||||
* @property {string} [originPath] - The origin path for the error handler
|
||||
*/
|
||||
|
||||
/**
|
||||
* Creates an error handler function with the given dependencies
|
||||
* @param {ErrorHandlerDependencies} dependencies - The dependencies for the error handler
|
||||
* @returns {(error: Error) => Promise<void>} The error handler function
|
||||
*/
|
||||
const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/chat/' }) => {
|
||||
const cache = getLogStores(CacheKeys.ABORT_KEYS);
|
||||
|
||||
/**
|
||||
* Handles errors that occur during the chat process
|
||||
* @param {Error} error - The error that occurred
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
return async (error) => {
|
||||
const {
|
||||
openai,
|
||||
run_id,
|
||||
endpoint,
|
||||
cacheKey,
|
||||
thread_id,
|
||||
completedRun,
|
||||
assistant_id,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
responseMessageId,
|
||||
} = getContext();
|
||||
|
||||
const defaultErrorMessage =
|
||||
'The Assistant run failed to initialize. Try sending a message in a new conversation.';
|
||||
const messageData = {
|
||||
thread_id,
|
||||
assistant_id,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
sender: 'System',
|
||||
user: req.user.id,
|
||||
shouldSaveMessage: false,
|
||||
messageId: responseMessageId,
|
||||
endpoint,
|
||||
};
|
||||
|
||||
if (error.message === 'Run cancelled') {
|
||||
return res.end();
|
||||
} else if (error.message === 'Request closed' && completedRun) {
|
||||
return;
|
||||
} else if (error.message === 'Request closed') {
|
||||
logger.debug(`[${originPath}] Request aborted on close`);
|
||||
} else if (/Files.*are invalid/.test(error.message)) {
|
||||
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
||||
endpoint === 'azureAssistants'
|
||||
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
||||
: ''
|
||||
}`;
|
||||
return sendResponse(req, res, messageData, errorMessage);
|
||||
} else if (error?.message?.includes('string too long')) {
|
||||
return sendResponse(
|
||||
req,
|
||||
res,
|
||||
messageData,
|
||||
'Message too long. The Assistants API has a limit of 32,768 characters per message. Please shorten it and try again.',
|
||||
);
|
||||
} else if (error?.message?.includes(ViolationTypes.TOKEN_BALANCE)) {
|
||||
return sendResponse(req, res, messageData, error.message);
|
||||
} else {
|
||||
logger.error(`[${originPath}]`, error);
|
||||
}
|
||||
|
||||
if (!openai || !thread_id || !run_id) {
|
||||
return sendResponse(req, res, messageData, defaultErrorMessage);
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
try {
|
||||
const status = await cache.get(cacheKey);
|
||||
if (status === 'cancelled') {
|
||||
logger.debug(`[${originPath}] Run already cancelled`);
|
||||
return res.end();
|
||||
}
|
||||
await cache.delete(cacheKey);
|
||||
const cancelledRun = await openai.beta.threads.runs.cancel(thread_id, run_id);
|
||||
logger.debug(`[${originPath}] Cancelled run:`, cancelledRun);
|
||||
} catch (error) {
|
||||
logger.error(`[${originPath}] Error cancelling run`, error);
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
let run;
|
||||
try {
|
||||
run = await openai.beta.threads.runs.retrieve(thread_id, run_id);
|
||||
await recordUsage({
|
||||
...run.usage,
|
||||
model: run.model,
|
||||
user: req.user.id,
|
||||
conversationId,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(`[${originPath}] Error fetching or processing run`, error);
|
||||
}
|
||||
|
||||
let finalEvent;
|
||||
try {
|
||||
const runMessages = await checkMessageGaps({
|
||||
openai,
|
||||
run_id,
|
||||
endpoint,
|
||||
thread_id,
|
||||
conversationId,
|
||||
latestMessageId: responseMessageId,
|
||||
});
|
||||
|
||||
const errorContentPart = {
|
||||
text: {
|
||||
value:
|
||||
error?.message ?? 'There was an error processing your request. Please try again later.',
|
||||
},
|
||||
type: ContentTypes.ERROR,
|
||||
};
|
||||
|
||||
if (!Array.isArray(runMessages[runMessages.length - 1]?.content)) {
|
||||
runMessages[runMessages.length - 1].content = [errorContentPart];
|
||||
} else {
|
||||
const contentParts = runMessages[runMessages.length - 1].content;
|
||||
for (let i = 0; i < contentParts.length; i++) {
|
||||
const currentPart = contentParts[i];
|
||||
/** @type {CodeToolCall | RetrievalToolCall | FunctionToolCall | undefined} */
|
||||
const toolCall = currentPart?.[ContentTypes.TOOL_CALL];
|
||||
if (
|
||||
toolCall &&
|
||||
toolCall?.function &&
|
||||
!(toolCall?.function?.output || toolCall?.function?.output?.length)
|
||||
) {
|
||||
contentParts[i] = {
|
||||
...currentPart,
|
||||
[ContentTypes.TOOL_CALL]: {
|
||||
...toolCall,
|
||||
function: {
|
||||
...toolCall.function,
|
||||
output: 'error processing tool',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
runMessages[runMessages.length - 1].content.push(errorContentPart);
|
||||
}
|
||||
|
||||
finalEvent = {
|
||||
final: true,
|
||||
conversation: await getConvo(req.user.id, conversationId),
|
||||
runMessages,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`[${originPath}] Error finalizing error process`, error);
|
||||
return sendResponse(req, res, messageData, 'The Assistant run failed');
|
||||
}
|
||||
|
||||
return sendResponse(req, res, finalEvent);
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = { createErrorHandler };
|
||||
@@ -94,6 +94,7 @@ const startServer = async () => {
|
||||
app.use('/api/share', routes.share);
|
||||
app.use('/api/roles', routes.roles);
|
||||
|
||||
app.use('/api/tags', routes.tags);
|
||||
app.use((req, res) => {
|
||||
res.sendFile(path.join(app.locals.paths.dist, 'index.html'));
|
||||
});
|
||||
|
||||
@@ -30,7 +30,10 @@ async function abortMessage(req, res) {
|
||||
return res.status(204).send({ message: 'Request not found' });
|
||||
}
|
||||
const finalEvent = await abortController.abortCompletion();
|
||||
logger.info('[abortMessage] Aborted request', { abortKey });
|
||||
logger.debug(
|
||||
`[abortMessage] ID: ${req.user.id} | ${req.user.email} | Aborted request: ` +
|
||||
JSON.stringify({ abortKey }),
|
||||
);
|
||||
abortControllers.delete(abortKey);
|
||||
|
||||
if (res.headersSent && finalEvent) {
|
||||
@@ -116,7 +119,11 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
||||
{ promptTokens, completionTokens },
|
||||
);
|
||||
|
||||
saveMessage({ ...responseMessage, user });
|
||||
saveMessage(
|
||||
req,
|
||||
{ ...responseMessage, user },
|
||||
{ context: 'api/server/middleware/abortMiddleware.js' },
|
||||
);
|
||||
|
||||
let conversation;
|
||||
if (userMessagePromise) {
|
||||
@@ -190,7 +197,7 @@ const handleAbortError = async (res, req, error, data) => {
|
||||
}
|
||||
};
|
||||
|
||||
await sendError(res, options, callback);
|
||||
await sendError(req, res, options, callback);
|
||||
};
|
||||
|
||||
if (partialText && partialText.length > 5) {
|
||||
|
||||
@@ -19,7 +19,8 @@ const validateAssistant = async (req, res, next) => {
|
||||
}
|
||||
|
||||
const { supportedIds, excludedIds } = assistantsConfig;
|
||||
const error = { message: 'Assistant not supported' };
|
||||
const error = { message: 'validateAssistant: Assistant not supported' };
|
||||
|
||||
if (supportedIds?.length && !supportedIds.includes(assistant_id)) {
|
||||
return await handleAbortError(res, req, error, {
|
||||
sender: 'System',
|
||||
|
||||
@@ -41,10 +41,14 @@ const denyRequest = async (req, res, errorMessage) => {
|
||||
const shouldSaveMessage = _convoId && parentMessageId && parentMessageId !== Constants.NO_PARENT;
|
||||
|
||||
if (shouldSaveMessage) {
|
||||
await saveMessage({ ...userMessage, user: req.user.id });
|
||||
await saveMessage(
|
||||
req,
|
||||
{ ...userMessage, user: req.user.id },
|
||||
{ context: `api/server/middleware/denyRequest.js - ${responseText}` },
|
||||
);
|
||||
}
|
||||
|
||||
return await sendError(res, {
|
||||
return await sendError(req, res, {
|
||||
sender: getResponseSender(req.body),
|
||||
messageId: crypto.randomUUID(),
|
||||
conversationId,
|
||||
|
||||
@@ -31,10 +31,14 @@ function validateImageRequest(req, res, next) {
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
if (req.path.includes(payload.id)) {
|
||||
const fullPath = decodeURIComponent(req.originalUrl);
|
||||
const pathPattern = new RegExp(`^/images/${payload.id}/[^/]+$`);
|
||||
|
||||
if (pathPattern.test(fullPath)) {
|
||||
logger.debug('[validateImageRequest] Image request validated');
|
||||
next();
|
||||
} else {
|
||||
logger.warn('[validateImageRequest] Invalid image path');
|
||||
res.status(403).send('Access Denied');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,7 +76,9 @@ describe.skip('GET /', () => {
|
||||
openidLoginEnabled: true,
|
||||
openidLabel: 'Test OpenID',
|
||||
openidImageUrl: 'http://test-server.com',
|
||||
ldapLoginEnabled: true,
|
||||
ldap: {
|
||||
enabled: true,
|
||||
},
|
||||
serverDomain: 'http://test-server.com',
|
||||
emailLoginEnabled: 'true',
|
||||
registrationEnabled: 'true',
|
||||
|
||||
55
api/server/routes/__tests__/ldap.spec.js
Normal file
55
api/server/routes/__tests__/ldap.spec.js
Normal file
@@ -0,0 +1,55 @@
|
||||
const request = require('supertest');
|
||||
const express = require('express');
|
||||
const { getLdapConfig } = require('~/server/services/Config/ldap');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
jest.mock('~/server/services/Config/ldap');
|
||||
jest.mock('~/server/utils');
|
||||
|
||||
const app = express();
|
||||
|
||||
// Mock the route handler
|
||||
app.get('/api/config', (req, res) => {
|
||||
const ldapConfig = getLdapConfig();
|
||||
res.json({ ldap: ldapConfig });
|
||||
});
|
||||
|
||||
describe('LDAP Config Tests', () => {
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
it('should return LDAP config with username property when LDAP_LOGIN_USES_USERNAME is enabled', async () => {
|
||||
getLdapConfig.mockReturnValue({ enabled: true, username: true });
|
||||
isEnabled.mockReturnValue(true);
|
||||
|
||||
const response = await request(app).get('/api/config');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.ldap).toEqual({
|
||||
enabled: true,
|
||||
username: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return LDAP config without username property when LDAP_LOGIN_USES_USERNAME is not enabled', async () => {
|
||||
getLdapConfig.mockReturnValue({ enabled: true });
|
||||
isEnabled.mockReturnValue(false);
|
||||
|
||||
const response = await request(app).get('/api/config');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.ldap).toEqual({
|
||||
enabled: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not return LDAP config when LDAP is not enabled', async () => {
|
||||
getLdapConfig.mockReturnValue(undefined);
|
||||
|
||||
const response = await request(app).get('/api/config');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.ldap).toBeUndefined();
|
||||
});
|
||||
});
|
||||
@@ -51,8 +51,8 @@ router.post('/', setHeaders, async (req, res) => {
|
||||
});
|
||||
|
||||
if (!overrideParentMessageId) {
|
||||
await saveMessage({ ...userMessage, user: req.user.id });
|
||||
await saveConvo(req.user.id, {
|
||||
await saveMessage(req, { ...userMessage, user: req.user.id });
|
||||
await saveConvo(req, {
|
||||
...userMessage,
|
||||
...endpointOption,
|
||||
conversationId,
|
||||
@@ -93,7 +93,7 @@ const ask = async ({
|
||||
const currentTimestamp = Date.now();
|
||||
if (currentTimestamp - lastSavedTimestamp > 500) {
|
||||
lastSavedTimestamp = currentTimestamp;
|
||||
saveMessage({
|
||||
saveMessage(req, {
|
||||
messageId: responseMessageId,
|
||||
sender: endpointOption?.jailbreak ? 'Sydney' : 'BingAI',
|
||||
conversationId,
|
||||
@@ -159,7 +159,7 @@ const ask = async ({
|
||||
isCreatedByUser: false,
|
||||
};
|
||||
|
||||
await saveMessage({ ...responseMessage, user });
|
||||
await saveMessage(req, { ...responseMessage, user });
|
||||
responseMessage.messageId = newResponseMessageId;
|
||||
|
||||
// STEP2 update the conversation
|
||||
@@ -183,7 +183,7 @@ const ask = async ({
|
||||
}
|
||||
}
|
||||
|
||||
await saveConvo(user, conversationUpdate);
|
||||
await saveConvo(req, conversationUpdate);
|
||||
conversationId = newConversationId;
|
||||
|
||||
// STEP3 update the user message
|
||||
@@ -192,7 +192,7 @@ const ask = async ({
|
||||
|
||||
// If response has parentMessageId, the fake userMessage.messageId should be updated to the real one.
|
||||
if (!overrideParentMessageId) {
|
||||
await saveMessage({
|
||||
await saveMessage(req, {
|
||||
...userMessage,
|
||||
user,
|
||||
messageId: userMessageId,
|
||||
@@ -213,7 +213,7 @@ const ask = async ({
|
||||
if (userParentMessageId == Constants.NO_PARENT) {
|
||||
// const title = await titleConvo({ endpoint: endpointOption?.endpoint, text, response: responseMessage });
|
||||
const title = await response.details.title;
|
||||
await saveConvo(user, {
|
||||
await saveConvo(req, {
|
||||
conversationId: conversationId,
|
||||
title,
|
||||
});
|
||||
@@ -229,7 +229,7 @@ const ask = async ({
|
||||
isCreatedByUser: false,
|
||||
text: `${getPartialMessage() ?? ''}\n\nError message: "${error.message}"`,
|
||||
};
|
||||
await saveMessage({ ...errorMessage, user });
|
||||
await saveMessage(req, { ...errorMessage, user });
|
||||
handleError(res, errorMessage);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -70,8 +70,8 @@ router.post('/', setHeaders, async (req, res) => {
|
||||
});
|
||||
|
||||
if (!overrideParentMessageId) {
|
||||
await saveMessage({ ...userMessage, user: req.user.id });
|
||||
await saveConvo(req.user.id, {
|
||||
await saveMessage(req, { ...userMessage, user: req.user.id });
|
||||
await saveConvo(req, {
|
||||
...userMessage,
|
||||
...endpointOption,
|
||||
conversationId,
|
||||
@@ -118,7 +118,7 @@ const ask = async ({
|
||||
const currentTimestamp = Date.now();
|
||||
if (currentTimestamp - lastSavedTimestamp > 500) {
|
||||
lastSavedTimestamp = currentTimestamp;
|
||||
saveMessage({
|
||||
saveMessage(req, {
|
||||
messageId: responseMessageId,
|
||||
sender: model,
|
||||
conversationId,
|
||||
@@ -197,7 +197,7 @@ const ask = async ({
|
||||
isCreatedByUser: false,
|
||||
};
|
||||
|
||||
await saveMessage({ ...responseMessage, user });
|
||||
await saveMessage(req, { ...responseMessage, user });
|
||||
responseMessage.messageId = newResponseMessageId;
|
||||
|
||||
let conversationUpdate = {
|
||||
@@ -216,12 +216,12 @@ const ask = async ({
|
||||
conversationUpdate.invocationId = response.invocationId;
|
||||
}
|
||||
|
||||
await saveConvo(user, conversationUpdate);
|
||||
await saveConvo(req, conversationUpdate);
|
||||
userMessage.messageId = newUserMessageId;
|
||||
|
||||
// If response has parentMessageId, the fake userMessage.messageId should be updated to the real one.
|
||||
if (!overrideParentMessageId) {
|
||||
await saveMessage({
|
||||
await saveMessage(req, {
|
||||
...userMessage,
|
||||
user,
|
||||
messageId: userMessageId,
|
||||
@@ -245,7 +245,7 @@ const ask = async ({
|
||||
response: responseMessage,
|
||||
});
|
||||
|
||||
await saveConvo(user, {
|
||||
await saveConvo(req, {
|
||||
conversationId: conversationId,
|
||||
title,
|
||||
});
|
||||
@@ -266,7 +266,7 @@ const ask = async ({
|
||||
isCreatedByUser: false,
|
||||
};
|
||||
|
||||
saveMessage({ ...responseMessage, user });
|
||||
saveMessage(req, { ...responseMessage, user });
|
||||
|
||||
return {
|
||||
title: await getConvoTitle(user, conversationId),
|
||||
@@ -288,7 +288,7 @@ const ask = async ({
|
||||
model,
|
||||
isCreatedByUser: false,
|
||||
};
|
||||
await saveMessage({ ...errorMessage, user });
|
||||
await saveMessage(req, { ...errorMessage, user });
|
||||
handleError(res, errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
const express = require('express');
|
||||
const throttle = require('lodash/throttle');
|
||||
const { getResponseSender, Constants } = require('librechat-data-provider');
|
||||
const { getResponseSender, Constants, CacheKeys, Time } = require('librechat-data-provider');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/gptPlugins');
|
||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||
const { addTitle } = require('~/server/services/Endpoints/openAI');
|
||||
const { saveMessage } = require('~/models');
|
||||
const { saveMessage, updateMessage } = require('~/models');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const {
|
||||
handleAbort,
|
||||
createAbortController,
|
||||
@@ -71,7 +72,15 @@ router.post(
|
||||
}
|
||||
};
|
||||
|
||||
const throttledSaveMessage = throttle(saveMessage, 3000, { trailing: false });
|
||||
const messageCache = getLogStores(CacheKeys.MESSAGES);
|
||||
const throttledCacheSet = throttle(
|
||||
(text) => {
|
||||
messageCache.set(responseMessageId, text, Time.FIVE_MINUTES);
|
||||
},
|
||||
3000,
|
||||
{ trailing: false },
|
||||
);
|
||||
|
||||
let streaming = null;
|
||||
let timer = null;
|
||||
|
||||
@@ -85,18 +94,7 @@ router.post(
|
||||
clearTimeout(timer);
|
||||
}
|
||||
|
||||
throttledSaveMessage({
|
||||
messageId: responseMessageId,
|
||||
sender,
|
||||
conversationId,
|
||||
parentMessageId: overrideParentMessageId || userMessageId,
|
||||
text: partialText,
|
||||
model: endpointOption.modelOptions.model,
|
||||
unfinished: true,
|
||||
error: false,
|
||||
plugins,
|
||||
user,
|
||||
});
|
||||
throttledCacheSet(partialText);
|
||||
|
||||
streaming = new Promise((resolve) => {
|
||||
timer = setTimeout(() => {
|
||||
@@ -170,7 +168,11 @@ router.post(
|
||||
|
||||
const onChainEnd = () => {
|
||||
if (!client.skipSaveUserMessage) {
|
||||
saveMessage({ ...userMessage, user });
|
||||
saveMessage(
|
||||
req,
|
||||
{ ...userMessage, user },
|
||||
{ context: 'api/server/routes/ask/gptPlugins.js - onChainEnd' },
|
||||
);
|
||||
}
|
||||
sendIntermediateMessage(res, {
|
||||
plugins,
|
||||
@@ -207,9 +209,6 @@ router.post(
|
||||
|
||||
logger.debug('[/ask/gptPlugins]', response);
|
||||
|
||||
response.plugins = plugins.map((p) => ({ ...p, loading: false }));
|
||||
await saveMessage({ ...response, user });
|
||||
|
||||
const { conversation = {} } = await client.responsePromise;
|
||||
conversation.title =
|
||||
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
|
||||
@@ -230,6 +229,15 @@ router.post(
|
||||
client,
|
||||
});
|
||||
}
|
||||
|
||||
response.plugins = plugins.map((p) => ({ ...p, loading: false }));
|
||||
if (response.plugins?.length > 0) {
|
||||
await updateMessage(
|
||||
req,
|
||||
{ ...response, user },
|
||||
{ context: 'api/server/routes/ask/gptPlugins.js - save plugins used' },
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
const partialText = getPartialText();
|
||||
handleAbortError(res, req, error, {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const express = require('express');
|
||||
const { CacheKeys, defaultSocialLogins } = require('librechat-data-provider');
|
||||
const { getLdapConfig } = require('~/server/services/Config/ldap');
|
||||
const { getProjectByName } = require('~/models/Project');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { getLogStores } = require('~/cache');
|
||||
@@ -33,7 +34,8 @@ router.get('/', async function (req, res) {
|
||||
|
||||
const instanceProject = await getProjectByName('instance', '_id');
|
||||
|
||||
const ldapLoginEnabled = !!process.env.LDAP_URL && !!process.env.LDAP_USER_SEARCH_BASE;
|
||||
const ldap = getLdapConfig();
|
||||
|
||||
try {
|
||||
/** @type {TStartupConfig} */
|
||||
const payload = {
|
||||
@@ -51,10 +53,9 @@ router.get('/', async function (req, res) {
|
||||
!!process.env.OPENID_SESSION_SECRET,
|
||||
openidLabel: process.env.OPENID_BUTTON_LABEL || 'Continue with OpenID',
|
||||
openidImageUrl: process.env.OPENID_IMAGE_URL,
|
||||
ldapLoginEnabled,
|
||||
serverDomain: process.env.DOMAIN_SERVER || 'http://localhost:3080',
|
||||
emailLoginEnabled,
|
||||
registrationEnabled: !ldapLoginEnabled && isEnabled(process.env.ALLOW_REGISTRATION),
|
||||
registrationEnabled: !ldap?.enabled && isEnabled(process.env.ALLOW_REGISTRATION),
|
||||
socialLoginEnabled: isEnabled(process.env.ALLOW_SOCIAL_LOGIN),
|
||||
emailEnabled:
|
||||
(!!process.env.EMAIL_SERVICE || !!process.env.EMAIL_HOST) &&
|
||||
@@ -76,6 +77,10 @@ router.get('/', async function (req, res) {
|
||||
instanceProjectId: instanceProject._id.toString(),
|
||||
};
|
||||
|
||||
if (ldap) {
|
||||
payload.ldap = ldap;
|
||||
}
|
||||
|
||||
if (typeof process.env.CUSTOM_FOOTER === 'string') {
|
||||
payload.customFooter = process.env.CUSTOM_FOOTER;
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
||||
const { forkConversation } = require('~/server/utils/import/fork');
|
||||
const { importConversations } = require('~/server/utils/import');
|
||||
const { createImportLimiters } = require('~/server/middleware');
|
||||
const { updateTagsForConversation } = require('~/models/ConversationTag');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
@@ -30,8 +31,13 @@ router.get('/', async (req, res) => {
|
||||
return res.status(400).json({ error: 'Invalid page size' });
|
||||
}
|
||||
const isArchived = req.query.isArchived === 'true';
|
||||
const tags = req.query.tags
|
||||
? Array.isArray(req.query.tags)
|
||||
? req.query.tags
|
||||
: [req.query.tags]
|
||||
: undefined;
|
||||
|
||||
res.status(200).send(await getConvosByPage(req.user.id, pageNumber, pageSize, isArchived));
|
||||
res.status(200).send(await getConvosByPage(req.user.id, pageNumber, pageSize, isArchived, tags));
|
||||
});
|
||||
|
||||
router.get('/:conversationId', async (req, res) => {
|
||||
@@ -104,7 +110,7 @@ router.post('/update', async (req, res) => {
|
||||
const update = req.body.arg;
|
||||
|
||||
try {
|
||||
const dbResponse = await saveConvo(req.user.id, update);
|
||||
const dbResponse = await saveConvo(req, update, { context: 'POST /api/convos/update' });
|
||||
res.status(201).json(dbResponse);
|
||||
} catch (error) {
|
||||
logger.error('Error updating conversation', error);
|
||||
@@ -167,4 +173,9 @@ router.post('/fork', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
router.put('/tags/:conversationId', async (req, res) => {
|
||||
const tag = await updateTagsForConversation(req.user.id, req.params.conversationId, req.body);
|
||||
res.status(200).json(tag);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1,19 +1,20 @@
|
||||
const express = require('express');
|
||||
const throttle = require('lodash/throttle');
|
||||
const { getResponseSender } = require('librechat-data-provider');
|
||||
const { getResponseSender, CacheKeys, Time } = require('librechat-data-provider');
|
||||
const {
|
||||
handleAbort,
|
||||
createAbortController,
|
||||
handleAbortError,
|
||||
setHeaders,
|
||||
handleAbort,
|
||||
moderateText,
|
||||
validateModel,
|
||||
handleAbortError,
|
||||
validateEndpoint,
|
||||
buildEndpointOption,
|
||||
moderateText,
|
||||
createAbortController,
|
||||
} = require('~/server/middleware');
|
||||
const { sendMessage, createOnProgress, formatSteps, formatAction } = require('~/server/utils');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/gptPlugins');
|
||||
const { saveMessage } = require('~/models');
|
||||
const { saveMessage, updateMessage } = require('~/models');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { validateTools } = require('~/app');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -79,7 +80,15 @@ router.post(
|
||||
}
|
||||
};
|
||||
|
||||
const throttledSaveMessage = throttle(saveMessage, 3000, { trailing: false });
|
||||
const messageCache = getLogStores(CacheKeys.MESSAGES);
|
||||
const throttledCacheSet = throttle(
|
||||
(text) => {
|
||||
messageCache.set(responseMessageId, text, Time.FIVE_MINUTES);
|
||||
},
|
||||
3000,
|
||||
{ trailing: false },
|
||||
);
|
||||
|
||||
const {
|
||||
onProgress: progressCallback,
|
||||
sendIntermediateMessage,
|
||||
@@ -90,19 +99,7 @@ router.post(
|
||||
if (plugin.loading === true) {
|
||||
plugin.loading = false;
|
||||
}
|
||||
|
||||
throttledSaveMessage({
|
||||
messageId: responseMessageId,
|
||||
sender,
|
||||
conversationId,
|
||||
parentMessageId: overrideParentMessageId || userMessageId,
|
||||
text: partialText,
|
||||
model: endpointOption.modelOptions.model,
|
||||
unfinished: true,
|
||||
isEdited: true,
|
||||
error: false,
|
||||
user,
|
||||
});
|
||||
throttledCacheSet(partialText);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -110,7 +107,11 @@ router.post(
|
||||
let { intermediateSteps: steps } = data;
|
||||
plugin.outputs = steps && steps[0].action ? formatSteps(steps) : 'An error occurred.';
|
||||
plugin.loading = false;
|
||||
saveMessage({ ...userMessage, user });
|
||||
saveMessage(
|
||||
req,
|
||||
{ ...userMessage, user },
|
||||
{ context: 'api/server/routes/ask/gptPlugins.js - onChainEnd' },
|
||||
);
|
||||
sendIntermediateMessage(res, {
|
||||
plugin,
|
||||
parentMessageId: userMessage.messageId,
|
||||
@@ -141,7 +142,11 @@ router.post(
|
||||
plugin.inputs.push(formattedAction);
|
||||
plugin.latest = formattedAction.plugin;
|
||||
if (!start && !client.skipSaveUserMessage) {
|
||||
saveMessage({ ...userMessage, user });
|
||||
saveMessage(
|
||||
req,
|
||||
{ ...userMessage, user },
|
||||
{ context: 'api/server/routes/ask/gptPlugins.js - onAgentAction' },
|
||||
);
|
||||
}
|
||||
sendIntermediateMessage(res, {
|
||||
plugin,
|
||||
@@ -179,8 +184,6 @@ router.post(
|
||||
}
|
||||
|
||||
logger.debug('[/edit/gptPlugins] CLIENT RESPONSE', response);
|
||||
response.plugin = { ...plugin, loading: false };
|
||||
await saveMessage({ ...response, user });
|
||||
|
||||
const { conversation = {} } = await client.responsePromise;
|
||||
conversation.title =
|
||||
@@ -194,6 +197,13 @@ router.post(
|
||||
responseMessage: response,
|
||||
});
|
||||
res.end();
|
||||
|
||||
response.plugin = { ...plugin, loading: false };
|
||||
await updateMessage(
|
||||
req,
|
||||
{ ...response, user },
|
||||
{ context: 'api/server/routes/edit/gptPlugins.js' },
|
||||
);
|
||||
} catch (error) {
|
||||
const partialText = getPartialText();
|
||||
handleAbortError(res, req, error, {
|
||||
|
||||
@@ -21,6 +21,7 @@ const staticRoute = require('./static');
|
||||
const share = require('./share');
|
||||
const categories = require('./categories');
|
||||
const roles = require('./roles');
|
||||
const tags = require('./tags');
|
||||
|
||||
module.exports = {
|
||||
search,
|
||||
@@ -46,4 +47,5 @@ module.exports = {
|
||||
share,
|
||||
categories,
|
||||
roles,
|
||||
tags,
|
||||
};
|
||||
|
||||
@@ -1,49 +1,79 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const {
|
||||
getMessages,
|
||||
updateMessage,
|
||||
saveConvo,
|
||||
saveMessage,
|
||||
deleteMessages,
|
||||
} = require('../../models');
|
||||
const { countTokens } = require('../utils');
|
||||
const { requireJwtAuth, validateMessageReq } = require('../middleware/');
|
||||
const { saveConvo, saveMessage, getMessages, updateMessage, deleteMessages } = require('~/models');
|
||||
const { requireJwtAuth, validateMessageReq } = require('~/server/middleware');
|
||||
const { countTokens } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
router.use(requireJwtAuth);
|
||||
|
||||
/* Note: It's necessary to add `validateMessageReq` within route definition for correct params */
|
||||
router.get('/:conversationId', validateMessageReq, async (req, res) => {
|
||||
const { conversationId } = req.params;
|
||||
res.status(200).send(await getMessages({ conversationId }, '-_id -__v -user'));
|
||||
try {
|
||||
const { conversationId } = req.params;
|
||||
const messages = await getMessages({ conversationId }, '-_id -__v -user');
|
||||
res.status(200).json(messages);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching messages:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// CREATE
|
||||
router.post('/:conversationId', validateMessageReq, async (req, res) => {
|
||||
const message = req.body;
|
||||
const savedMessage = await saveMessage({ ...message, user: req.user.id });
|
||||
await saveConvo(req.user.id, savedMessage);
|
||||
res.status(201).send(savedMessage);
|
||||
try {
|
||||
const message = req.body;
|
||||
const savedMessage = await saveMessage(
|
||||
req,
|
||||
{ ...message, user: req.user.id },
|
||||
{ context: 'POST /api/messages/:conversationId' },
|
||||
);
|
||||
if (!savedMessage) {
|
||||
return res.status(400).json({ error: 'Message not saved' });
|
||||
}
|
||||
await saveConvo(req, savedMessage, { context: 'POST /api/messages/:conversationId' });
|
||||
res.status(201).json(savedMessage);
|
||||
} catch (error) {
|
||||
logger.error('Error saving message:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// READ
|
||||
router.get('/:conversationId/:messageId', validateMessageReq, async (req, res) => {
|
||||
const { conversationId, messageId } = req.params;
|
||||
res.status(200).send(await getMessages({ conversationId, messageId }, '-_id -__v -user'));
|
||||
try {
|
||||
const { conversationId, messageId } = req.params;
|
||||
const message = await getMessages({ conversationId, messageId }, '-_id -__v -user');
|
||||
if (!message) {
|
||||
return res.status(404).json({ error: 'Message not found' });
|
||||
}
|
||||
res.status(200).json(message);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching message:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// UPDATE
|
||||
router.put('/:conversationId/:messageId', validateMessageReq, async (req, res) => {
|
||||
const { messageId, model } = req.params;
|
||||
const { text } = req.body;
|
||||
const tokenCount = await countTokens(text, model);
|
||||
res.status(201).json(await updateMessage({ messageId, text, tokenCount }));
|
||||
try {
|
||||
const { messageId, model } = req.params;
|
||||
const { text } = req.body;
|
||||
const tokenCount = await countTokens(text, model);
|
||||
const result = await updateMessage(req, { messageId, text, tokenCount });
|
||||
res.status(200).json(result);
|
||||
} catch (error) {
|
||||
logger.error('Error updating message:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// DELETE
|
||||
router.delete('/:conversationId/:messageId', validateMessageReq, async (req, res) => {
|
||||
const { messageId } = req.params;
|
||||
await deleteMessages({ messageId });
|
||||
res.status(204).send();
|
||||
try {
|
||||
const { messageId } = req.params;
|
||||
await deleteMessages({ messageId });
|
||||
res.status(204).send();
|
||||
} catch (error) {
|
||||
logger.error('Error deleting message:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
||||
44
api/server/routes/tags.js
Normal file
44
api/server/routes/tags.js
Normal file
@@ -0,0 +1,44 @@
|
||||
const express = require('express');
|
||||
|
||||
const {
|
||||
getConversationTags,
|
||||
updateConversationTag,
|
||||
createConversationTag,
|
||||
deleteConversationTag,
|
||||
rebuildConversationTags,
|
||||
} = require('~/models/ConversationTag');
|
||||
const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
||||
const router = express.Router();
|
||||
router.use(requireJwtAuth);
|
||||
|
||||
router.get('/', async (req, res) => {
|
||||
const tags = await getConversationTags(req.user.id);
|
||||
|
||||
if (tags) {
|
||||
res.status(200).json(tags);
|
||||
} else {
|
||||
res.status(404).end();
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
const tag = await createConversationTag(req.user.id, req.body);
|
||||
res.status(200).json(tag);
|
||||
});
|
||||
|
||||
router.post('/rebuild', async (req, res) => {
|
||||
const tag = await rebuildConversationTags(req.user.id);
|
||||
res.status(200).json(tag);
|
||||
});
|
||||
|
||||
router.put('/:tag', async (req, res) => {
|
||||
const tag = await updateConversationTag(req.user.id, req.params.tag, req.body);
|
||||
res.status(200).json(tag);
|
||||
});
|
||||
|
||||
router.delete('/:tag', async (req, res) => {
|
||||
const tag = await deleteConversationTag(req.user.id, req.params.tag);
|
||||
res.status(200).json(tag);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -67,17 +67,18 @@ const AppService = async (app) => {
|
||||
handleRateLimits(config?.rateLimits);
|
||||
|
||||
const endpointLocals = {};
|
||||
const endpoints = config?.endpoints;
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.azureOpenAI]) {
|
||||
if (endpoints?.[EModelEndpoint.azureOpenAI]) {
|
||||
endpointLocals[EModelEndpoint.azureOpenAI] = azureConfigSetup(config);
|
||||
checkAzureVariables();
|
||||
}
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||
if (endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||
endpointLocals[EModelEndpoint.azureAssistants] = azureAssistantsDefaults();
|
||||
}
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.azureAssistants]) {
|
||||
if (endpoints?.[EModelEndpoint.azureAssistants]) {
|
||||
endpointLocals[EModelEndpoint.azureAssistants] = assistantsConfigSetup(
|
||||
config,
|
||||
EModelEndpoint.azureAssistants,
|
||||
@@ -85,7 +86,7 @@ const AppService = async (app) => {
|
||||
);
|
||||
}
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.assistants]) {
|
||||
if (endpoints?.[EModelEndpoint.assistants]) {
|
||||
endpointLocals[EModelEndpoint.assistants] = assistantsConfigSetup(
|
||||
config,
|
||||
EModelEndpoint.assistants,
|
||||
@@ -93,6 +94,19 @@ const AppService = async (app) => {
|
||||
);
|
||||
}
|
||||
|
||||
if (endpoints?.[EModelEndpoint.openAI]) {
|
||||
endpointLocals[EModelEndpoint.openAI] = endpoints[EModelEndpoint.openAI];
|
||||
}
|
||||
if (endpoints?.[EModelEndpoint.google]) {
|
||||
endpointLocals[EModelEndpoint.google] = endpoints[EModelEndpoint.google];
|
||||
}
|
||||
if (endpoints?.[EModelEndpoint.anthropic]) {
|
||||
endpointLocals[EModelEndpoint.anthropic] = endpoints[EModelEndpoint.anthropic];
|
||||
}
|
||||
if (endpoints?.[EModelEndpoint.gptPlugins]) {
|
||||
endpointLocals[EModelEndpoint.gptPlugins] = endpoints[EModelEndpoint.gptPlugins];
|
||||
}
|
||||
|
||||
app.locals = {
|
||||
...defaultLocals,
|
||||
modelSpecs: config.modelSpecs,
|
||||
|
||||
24
api/server/services/Config/ldap.js
Normal file
24
api/server/services/Config/ldap.js
Normal file
@@ -0,0 +1,24 @@
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
/** @returns {TStartupConfig['ldap'] | undefined} */
|
||||
const getLdapConfig = () => {
|
||||
const ldapLoginEnabled = !!process.env.LDAP_URL && !!process.env.LDAP_USER_SEARCH_BASE;
|
||||
|
||||
const ldap = {
|
||||
enabled: ldapLoginEnabled,
|
||||
};
|
||||
const ldapLoginUsesUsername = isEnabled(process.env.LDAP_LOGIN_USES_USERNAME);
|
||||
if (!ldapLoginEnabled) {
|
||||
return ldap;
|
||||
}
|
||||
|
||||
if (ldapLoginUsesUsername) {
|
||||
ldap.username = true;
|
||||
}
|
||||
|
||||
return ldap;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getLdapConfig,
|
||||
};
|
||||
@@ -23,10 +23,14 @@ const addTitle = async (req, { text, response, client }) => {
|
||||
|
||||
const title = await client.titleConvo({ text, responseText: response?.text });
|
||||
await titleCache.set(key, title, 120000);
|
||||
await saveConvo(req.user.id, {
|
||||
conversationId: response.conversationId,
|
||||
title,
|
||||
});
|
||||
await saveConvo(
|
||||
req,
|
||||
{
|
||||
conversationId: response.conversationId,
|
||||
title,
|
||||
},
|
||||
{ context: 'api/server/services/Endpoints/anthropic/addTitle.js' },
|
||||
);
|
||||
};
|
||||
|
||||
module.exports = addTitle;
|
||||
|
||||
@@ -19,11 +19,27 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
checkUserKeyExpiry(expiresAt, EModelEndpoint.anthropic);
|
||||
}
|
||||
|
||||
const clientOptions = {};
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const anthropicConfig = req.app.locals[EModelEndpoint.anthropic];
|
||||
|
||||
if (anthropicConfig) {
|
||||
clientOptions.streamRate = anthropicConfig.streamRate;
|
||||
}
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const allConfig = req.app.locals.all;
|
||||
if (allConfig) {
|
||||
clientOptions.streamRate = allConfig.streamRate;
|
||||
}
|
||||
|
||||
const client = new AnthropicClient(anthropicApiKey, {
|
||||
req,
|
||||
res,
|
||||
reverseProxyUrl: ANTHROPIC_REVERSE_PROXY ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
...clientOptions,
|
||||
...endpointOption,
|
||||
});
|
||||
|
||||
|
||||
@@ -19,10 +19,14 @@ const addTitle = async (req, { text, responseText, conversationId, client }) =>
|
||||
const title = await client.titleConvo({ text, conversationId, responseText });
|
||||
await titleCache.set(key, title, 120000);
|
||||
|
||||
await saveConvo(req.user.id, {
|
||||
conversationId,
|
||||
title,
|
||||
});
|
||||
await saveConvo(
|
||||
req,
|
||||
{
|
||||
conversationId,
|
||||
title,
|
||||
},
|
||||
{ context: 'api/server/services/Endpoints/assistants/addTitle.js' },
|
||||
);
|
||||
};
|
||||
|
||||
module.exports = addTitle;
|
||||
|
||||
@@ -114,9 +114,16 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
contextStrategy: endpointConfig.summarize ? 'summarize' : null,
|
||||
directEndpoint: endpointConfig.directEndpoint,
|
||||
titleMessageRole: endpointConfig.titleMessageRole,
|
||||
streamRate: endpointConfig.streamRate,
|
||||
endpointTokenConfig,
|
||||
};
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const allConfig = req.app.locals.all;
|
||||
if (allConfig) {
|
||||
customOptions.streamRate = allConfig.streamRate;
|
||||
}
|
||||
|
||||
const clientOptions = {
|
||||
reverseProxyUrl: baseURL ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
|
||||
@@ -49,10 +49,14 @@ const addTitle = async (req, { text, response, client }) => {
|
||||
|
||||
const title = await titleClient.titleConvo({ text, responseText: response?.text });
|
||||
await titleCache.set(key, title, 120000);
|
||||
await saveConvo(req.user.id, {
|
||||
conversationId: response.conversationId,
|
||||
title,
|
||||
});
|
||||
await saveConvo(
|
||||
req,
|
||||
{
|
||||
conversationId: response.conversationId,
|
||||
title,
|
||||
},
|
||||
{ context: 'api/server/services/Endpoints/google/addTitle.js' },
|
||||
);
|
||||
};
|
||||
|
||||
module.exports = addTitle;
|
||||
|
||||
@@ -27,11 +27,27 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
[AuthKeys.GOOGLE_API_KEY]: GOOGLE_KEY,
|
||||
};
|
||||
|
||||
const clientOptions = {};
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const allConfig = req.app.locals.all;
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const googleConfig = req.app.locals[EModelEndpoint.google];
|
||||
|
||||
if (googleConfig) {
|
||||
clientOptions.streamRate = googleConfig.streamRate;
|
||||
}
|
||||
|
||||
if (allConfig) {
|
||||
clientOptions.streamRate = allConfig.streamRate;
|
||||
}
|
||||
|
||||
const client = new GoogleClient(credentials, {
|
||||
req,
|
||||
res,
|
||||
reverseProxyUrl: GOOGLE_REVERSE_PROXY ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
...clientOptions,
|
||||
...endpointOption,
|
||||
});
|
||||
|
||||
|
||||
@@ -8,6 +8,8 @@ jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKey: jest.fn().mockImplementation(() => ({})),
|
||||
}));
|
||||
|
||||
const app = { locals: {} };
|
||||
|
||||
describe('google/initializeClient', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
@@ -23,6 +25,7 @@ describe('google/initializeClient', () => {
|
||||
const req = {
|
||||
body: { key: expiresAt },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
@@ -44,6 +47,7 @@ describe('google/initializeClient', () => {
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
@@ -66,6 +70,7 @@ describe('google/initializeClient', () => {
|
||||
const req = {
|
||||
body: { key: expiresAt },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
@@ -86,6 +86,9 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
clientOptions.titleModel = azureConfig.titleModel;
|
||||
clientOptions.titleMethod = azureConfig.titleMethod ?? 'completion';
|
||||
|
||||
const azureRate = modelName.includes('gpt-4') ? 30 : 17;
|
||||
clientOptions.streamRate = azureConfig.streamRate ?? azureRate;
|
||||
|
||||
const groupName = modelGroupMap[modelName].group;
|
||||
clientOptions.addParams = azureConfig.groupMap[groupName].addParams;
|
||||
clientOptions.dropParams = azureConfig.groupMap[groupName].dropParams;
|
||||
@@ -98,6 +101,19 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
apiKey = clientOptions.azure.azureOpenAIApiKey;
|
||||
}
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const pluginsConfig = req.app.locals[EModelEndpoint.gptPlugins];
|
||||
|
||||
if (!useAzure && pluginsConfig) {
|
||||
clientOptions.streamRate = pluginsConfig.streamRate;
|
||||
}
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const allConfig = req.app.locals.all;
|
||||
if (allConfig) {
|
||||
clientOptions.streamRate = allConfig.streamRate;
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error(`${endpoint} API key not provided. Please provide it again.`);
|
||||
}
|
||||
|
||||
@@ -23,10 +23,14 @@ const addTitle = async (req, { text, response, client }) => {
|
||||
|
||||
const title = await client.titleConvo({ text, responseText: response?.text });
|
||||
await titleCache.set(key, title, 120000);
|
||||
await saveConvo(req.user.id, {
|
||||
conversationId: response.conversationId,
|
||||
title,
|
||||
});
|
||||
await saveConvo(
|
||||
req,
|
||||
{
|
||||
conversationId: response.conversationId,
|
||||
title,
|
||||
},
|
||||
{ context: 'api/server/services/Endpoints/openAI/addTitle.js' },
|
||||
);
|
||||
};
|
||||
|
||||
module.exports = addTitle;
|
||||
|
||||
@@ -76,6 +76,10 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
|
||||
clientOptions.titleConvo = azureConfig.titleConvo;
|
||||
clientOptions.titleModel = azureConfig.titleModel;
|
||||
|
||||
const azureRate = modelName.includes('gpt-4') ? 30 : 17;
|
||||
clientOptions.streamRate = azureConfig.streamRate ?? azureRate;
|
||||
|
||||
clientOptions.titleMethod = azureConfig.titleMethod ?? 'completion';
|
||||
|
||||
const groupName = modelGroupMap[modelName].group;
|
||||
@@ -90,6 +94,19 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
apiKey = clientOptions.azure.azureOpenAIApiKey;
|
||||
}
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const openAIConfig = req.app.locals[EModelEndpoint.openAI];
|
||||
|
||||
if (!isAzureOpenAI && openAIConfig) {
|
||||
clientOptions.streamRate = openAIConfig.streamRate;
|
||||
}
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const allConfig = req.app.locals.all;
|
||||
if (allConfig) {
|
||||
clientOptions.streamRate = allConfig.streamRate;
|
||||
}
|
||||
|
||||
if (userProvidesKey & !apiKey) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
|
||||
@@ -15,37 +15,43 @@ const getCustomConfig = require('~/server/services/Config/getCustomConfig');
|
||||
async function getCustomConfigSpeech(req, res) {
|
||||
try {
|
||||
const customConfig = await getCustomConfig();
|
||||
const sttExternal = !!customConfig.speech?.stt;
|
||||
const ttsExternal = !!customConfig.speech?.tts;
|
||||
let settings = {
|
||||
sttExternal,
|
||||
ttsExternal,
|
||||
};
|
||||
|
||||
if (!customConfig || !customConfig.speech?.speechTab) {
|
||||
throw new Error('Configuration or speechTab schema is missing');
|
||||
return res.status(200).send(settings);
|
||||
}
|
||||
|
||||
const ttsSchema = customConfig.speech?.speechTab;
|
||||
let settings = {};
|
||||
const speechTab = customConfig.speech.speechTab;
|
||||
|
||||
if (ttsSchema.advancedMode !== undefined) {
|
||||
settings.advancedMode = ttsSchema.advancedMode;
|
||||
if (speechTab.advancedMode !== undefined) {
|
||||
settings.advancedMode = speechTab.advancedMode;
|
||||
}
|
||||
|
||||
if (ttsSchema.speechToText) {
|
||||
for (const key in ttsSchema.speechToText) {
|
||||
if (ttsSchema.speechToText[key] !== undefined) {
|
||||
settings[key] = ttsSchema.speechToText[key];
|
||||
if (speechTab.speechToText) {
|
||||
for (const key in speechTab.speechToText) {
|
||||
if (speechTab.speechToText[key] !== undefined) {
|
||||
settings[key] = speechTab.speechToText[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ttsSchema.textToSpeech) {
|
||||
for (const key in ttsSchema.textToSpeech) {
|
||||
if (ttsSchema.textToSpeech[key] !== undefined) {
|
||||
settings[key] = ttsSchema.textToSpeech[key];
|
||||
if (speechTab.textToSpeech) {
|
||||
for (const key in speechTab.textToSpeech) {
|
||||
if (speechTab.textToSpeech[key] !== undefined) {
|
||||
settings[key] = speechTab.textToSpeech[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(200).send(settings);
|
||||
} catch (error) {
|
||||
res.status(200).send();
|
||||
console.error('Failed to get custom config speech settings:', error);
|
||||
res.status(500).send('Internal Server Error');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const WebSocket = require('ws');
|
||||
const { Message } = require('~/models/Message');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
/**
|
||||
* @param {string[]} voiceIds - Array of voice IDs
|
||||
@@ -104,6 +105,8 @@ function createChunkProcessor(messageId) {
|
||||
throw new Error('Message ID is required');
|
||||
}
|
||||
|
||||
const messageCache = getLogStores(CacheKeys.MESSAGES);
|
||||
|
||||
/**
|
||||
* @returns {Promise<{ text: string, isFinished: boolean }[] | string>}
|
||||
*/
|
||||
@@ -116,14 +119,17 @@ function createChunkProcessor(messageId) {
|
||||
return `No change in message after ${MAX_NO_CHANGE_COUNT} attempts`;
|
||||
}
|
||||
|
||||
const message = await Message.findOne({ messageId }, 'text unfinished').lean();
|
||||
/** @type { string | { text: string; complete: boolean } } */
|
||||
const message = await messageCache.get(messageId);
|
||||
|
||||
if (!message || !message.text) {
|
||||
if (!message) {
|
||||
notFoundCount++;
|
||||
return [];
|
||||
}
|
||||
|
||||
const { text, unfinished } = message;
|
||||
const text = typeof message === 'string' ? message : message.text;
|
||||
const complete = typeof message === 'string' ? false : message.complete;
|
||||
|
||||
if (text === processedText) {
|
||||
noChangeCount++;
|
||||
}
|
||||
@@ -131,7 +137,7 @@ function createChunkProcessor(messageId) {
|
||||
const remainingText = text.slice(processedText.length);
|
||||
const chunks = [];
|
||||
|
||||
if (unfinished && remainingText.length >= 20) {
|
||||
if (!complete && remainingText.length >= 20) {
|
||||
const separatorIndex = findLastSeparatorIndex(remainingText);
|
||||
if (separatorIndex !== -1) {
|
||||
const chunkText = remainingText.slice(0, separatorIndex + 1);
|
||||
@@ -141,7 +147,7 @@ function createChunkProcessor(messageId) {
|
||||
chunks.push({ text: remainingText, isFinished: false });
|
||||
processedText = text;
|
||||
}
|
||||
} else if (!unfinished && remainingText.trim().length > 0) {
|
||||
} else if (complete && remainingText.trim().length > 0) {
|
||||
chunks.push({ text: remainingText.trim(), isFinished: true });
|
||||
processedText = text;
|
||||
}
|
||||
|
||||
@@ -1,89 +1,145 @@
|
||||
const { createChunkProcessor, splitTextIntoChunks } = require('./streamAudio');
|
||||
const { Message } = require('~/models/Message');
|
||||
|
||||
jest.mock('~/models/Message', () => ({
|
||||
Message: {
|
||||
findOne: jest.fn().mockReturnValue({
|
||||
lean: jest.fn(),
|
||||
}),
|
||||
},
|
||||
}));
|
||||
jest.mock('keyv');
|
||||
|
||||
const globalCache = {};
|
||||
jest.mock('~/cache/getLogStores', () => {
|
||||
return jest.fn().mockImplementation(() => {
|
||||
const EventEmitter = require('events');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
|
||||
class KeyvMongo extends EventEmitter {
|
||||
constructor(url = 'mongodb://127.0.0.1:27017', options) {
|
||||
super();
|
||||
this.ttlSupport = false;
|
||||
url = url ?? {};
|
||||
if (typeof url === 'string') {
|
||||
url = { url };
|
||||
}
|
||||
if (url.uri) {
|
||||
url = { url: url.uri, ...url };
|
||||
}
|
||||
this.opts = {
|
||||
url,
|
||||
collection: 'keyv',
|
||||
...url,
|
||||
...options,
|
||||
};
|
||||
}
|
||||
|
||||
get = async (key) => {
|
||||
return new Promise((resolve) => {
|
||||
resolve(globalCache[key] || null);
|
||||
});
|
||||
};
|
||||
|
||||
set = async (key, value) => {
|
||||
return new Promise((resolve) => {
|
||||
globalCache[key] = value;
|
||||
resolve(true);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
return new KeyvMongo('', {
|
||||
namespace: CacheKeys.MESSAGES,
|
||||
ttl: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('processChunks', () => {
|
||||
let processChunks;
|
||||
let mockMessageCache;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
mockMessageCache = {
|
||||
get: jest.fn(),
|
||||
};
|
||||
require('~/cache/getLogStores').mockReturnValue(mockMessageCache);
|
||||
processChunks = createChunkProcessor('message-id');
|
||||
Message.findOne.mockClear();
|
||||
Message.findOne().lean.mockClear();
|
||||
});
|
||||
|
||||
it('should return an empty array when the message is not found', async () => {
|
||||
Message.findOne().lean.mockResolvedValueOnce(null);
|
||||
mockMessageCache.get.mockResolvedValueOnce(null);
|
||||
|
||||
const result = await processChunks();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(Message.findOne).toHaveBeenCalledWith({ messageId: 'message-id' }, 'text unfinished');
|
||||
expect(Message.findOne().lean).toHaveBeenCalled();
|
||||
expect(mockMessageCache.get).toHaveBeenCalledWith('message-id');
|
||||
});
|
||||
|
||||
it('should return an empty array when the message does not have a text property', async () => {
|
||||
Message.findOne().lean.mockResolvedValueOnce({ unfinished: true });
|
||||
it('should return an error message after MAX_NOT_FOUND_COUNT attempts', async () => {
|
||||
mockMessageCache.get.mockResolvedValue(null);
|
||||
|
||||
for (let i = 0; i < 6; i++) {
|
||||
await processChunks();
|
||||
}
|
||||
const result = await processChunks();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(Message.findOne).toHaveBeenCalledWith({ messageId: 'message-id' }, 'text unfinished');
|
||||
expect(Message.findOne().lean).toHaveBeenCalled();
|
||||
expect(result).toBe('Message not found after 6 attempts');
|
||||
});
|
||||
|
||||
it('should return chunks for an unfinished message with separators', async () => {
|
||||
it('should return chunks for an incomplete message with separators', async () => {
|
||||
const messageText = 'This is a long message. It should be split into chunks. Lol hi mom';
|
||||
Message.findOne().lean.mockResolvedValueOnce({ text: messageText, unfinished: true });
|
||||
mockMessageCache.get.mockResolvedValueOnce({ text: messageText, complete: false });
|
||||
|
||||
const result = await processChunks();
|
||||
|
||||
expect(result).toEqual([
|
||||
{ text: 'This is a long message. It should be split into chunks.', isFinished: false },
|
||||
]);
|
||||
expect(Message.findOne).toHaveBeenCalledWith({ messageId: 'message-id' }, 'text unfinished');
|
||||
expect(Message.findOne().lean).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return chunks for an unfinished message without separators', async () => {
|
||||
it('should return chunks for an incomplete message without separators', async () => {
|
||||
const messageText = 'This is a long message without separators hello there my friend';
|
||||
Message.findOne().lean.mockResolvedValueOnce({ text: messageText, unfinished: true });
|
||||
mockMessageCache.get.mockResolvedValueOnce({ text: messageText, complete: false });
|
||||
|
||||
const result = await processChunks();
|
||||
|
||||
expect(result).toEqual([{ text: messageText, isFinished: false }]);
|
||||
expect(Message.findOne).toHaveBeenCalledWith({ messageId: 'message-id' }, 'text unfinished');
|
||||
expect(Message.findOne().lean).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return the remaining text as a chunk for a finished message', async () => {
|
||||
it('should return the remaining text as a chunk for a complete message', async () => {
|
||||
const messageText = 'This is a finished message.';
|
||||
Message.findOne().lean.mockResolvedValueOnce({ text: messageText, unfinished: false });
|
||||
mockMessageCache.get.mockResolvedValueOnce({ text: messageText, complete: true });
|
||||
|
||||
const result = await processChunks();
|
||||
|
||||
expect(result).toEqual([{ text: messageText, isFinished: true }]);
|
||||
expect(Message.findOne).toHaveBeenCalledWith({ messageId: 'message-id' }, 'text unfinished');
|
||||
expect(Message.findOne().lean).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return an empty array for a finished message with no remaining text', async () => {
|
||||
it('should return an empty array for a complete message with no remaining text', async () => {
|
||||
const messageText = 'This is a finished message.';
|
||||
Message.findOne().lean.mockResolvedValueOnce({ text: messageText, unfinished: false });
|
||||
mockMessageCache.get.mockResolvedValueOnce({ text: messageText, complete: true });
|
||||
|
||||
await processChunks();
|
||||
Message.findOne().lean.mockResolvedValueOnce({ text: messageText, unfinished: false });
|
||||
mockMessageCache.get.mockResolvedValueOnce({ text: messageText, complete: true });
|
||||
const result = await processChunks();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(Message.findOne).toHaveBeenCalledWith({ messageId: 'message-id' }, 'text unfinished');
|
||||
expect(Message.findOne().lean).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should return an error message after MAX_NO_CHANGE_COUNT attempts with no change', async () => {
|
||||
const messageText = 'This is a message that does not change.';
|
||||
mockMessageCache.get.mockResolvedValue({ text: messageText, complete: false });
|
||||
|
||||
for (let i = 0; i < 11; i++) {
|
||||
await processChunks();
|
||||
}
|
||||
const result = await processChunks();
|
||||
|
||||
expect(result).toBe('No change in message after 10 attempts');
|
||||
});
|
||||
|
||||
it('should handle string messages as incomplete', async () => {
|
||||
const messageText = 'This is a message as a string.';
|
||||
mockMessageCache.get.mockResolvedValueOnce(messageText);
|
||||
|
||||
const result = await processChunks();
|
||||
|
||||
expect(result).toEqual([{ text: messageText, isFinished: false }]);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -442,7 +442,7 @@ async function streamAudio(req, res) {
|
||||
break;
|
||||
}
|
||||
} catch (innerError) {
|
||||
logger.error('Error processing update:', update, innerError);
|
||||
logger.error('Error processing audio stream update:', update, innerError);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).end();
|
||||
}
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
const throttle = require('lodash/throttle');
|
||||
const {
|
||||
Time,
|
||||
CacheKeys,
|
||||
StepTypes,
|
||||
ContentTypes,
|
||||
ToolCallTypes,
|
||||
// StepStatus,
|
||||
MessageContentTypes,
|
||||
AssistantStreamEvents,
|
||||
Constants,
|
||||
} = require('librechat-data-provider');
|
||||
const { retrieveAndProcessFile } = require('~/server/services/Files/process');
|
||||
const { processRequiredActions } = require('~/server/services/ToolService');
|
||||
const { saveMessage, updateMessageText } = require('~/models/Message');
|
||||
const { createOnProgress, sendMessage } = require('~/server/utils');
|
||||
const { createOnProgress, sendMessage, sleep } = require('~/server/utils');
|
||||
const { processMessages } = require('~/server/services/Threads');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
@@ -68,8 +70,8 @@ class StreamRunManager {
|
||||
this.attachedFileIds = fields.attachedFileIds;
|
||||
/** @type {undefined | Promise<ChatCompletion>} */
|
||||
this.visionPromise = fields.visionPromise;
|
||||
/** @type {boolean} */
|
||||
this.savedInitialMessage = false;
|
||||
/** @type {number} */
|
||||
this.streamRate = fields.streamRate ?? Constants.DEFAULT_STREAM_RATE;
|
||||
|
||||
/**
|
||||
* @type {Object.<AssistantStreamEvents, (event: AssistantStreamEvent) => Promise<void>>}
|
||||
@@ -139,11 +141,11 @@ class StreamRunManager {
|
||||
return this.intermediateText;
|
||||
}
|
||||
|
||||
/** Saves the initial intermediate message
|
||||
* @returns {Promise<void>}
|
||||
/** Returns the current, intermediate message
|
||||
* @returns {TMessage}
|
||||
*/
|
||||
async saveInitialMessage() {
|
||||
return saveMessage({
|
||||
getIntermediateMessage() {
|
||||
return {
|
||||
conversationId: this.finalMessage.conversationId,
|
||||
messageId: this.finalMessage.messageId,
|
||||
parentMessageId: this.parentMessageId,
|
||||
@@ -155,7 +157,7 @@ class StreamRunManager {
|
||||
sender: 'Assistant',
|
||||
unfinished: true,
|
||||
error: false,
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/* <------------------ Main Event Handlers ------------------> */
|
||||
@@ -347,6 +349,8 @@ class StreamRunManager {
|
||||
type: ContentTypes.TOOL_CALL,
|
||||
index,
|
||||
});
|
||||
|
||||
await sleep(this.streamRate);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -444,6 +448,7 @@ class StreamRunManager {
|
||||
if (content && content.type === MessageContentTypes.TEXT) {
|
||||
this.intermediateText += content.text.value;
|
||||
onProgress(content.text.value);
|
||||
await sleep(this.streamRate);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -589,21 +594,14 @@ class StreamRunManager {
|
||||
const index = this.getStepIndex(stepKey);
|
||||
this.orderedRunSteps.set(index, message_creation);
|
||||
|
||||
const messageCache = getLogStores(CacheKeys.MESSAGES);
|
||||
// Create the Factory Function to stream the message
|
||||
const { onProgress: progressCallback } = createOnProgress({
|
||||
onProgress: throttle(
|
||||
() => {
|
||||
if (!this.savedInitialMessage) {
|
||||
this.saveInitialMessage();
|
||||
this.savedInitialMessage = true;
|
||||
} else {
|
||||
updateMessageText({
|
||||
messageId: this.finalMessage.messageId,
|
||||
text: this.getText(),
|
||||
});
|
||||
}
|
||||
messageCache.set(this.finalMessage.messageId, this.getText(), Time.FIVE_MINUTES);
|
||||
},
|
||||
2000,
|
||||
3000,
|
||||
{ trailing: false },
|
||||
),
|
||||
});
|
||||
|
||||
@@ -41,6 +41,7 @@ async function initThread({ openai, body, thread_id: _thread_id }) {
|
||||
/**
|
||||
* Saves a user message to the DB in the Assistants endpoint format.
|
||||
*
|
||||
* @param {Object} req - The request object.
|
||||
* @param {Object} params - The parameters of the user message
|
||||
* @param {string} params.user - The user's ID.
|
||||
* @param {string} params.text - The user's prompt.
|
||||
@@ -59,7 +60,7 @@ async function initThread({ openai, body, thread_id: _thread_id }) {
|
||||
* @param {string[]} [params.file_ids] - Optional. List of File IDs attached to the userMessage.
|
||||
* @return {Promise<Run>} A promise that resolves to the created run object.
|
||||
*/
|
||||
async function saveUserMessage(params) {
|
||||
async function saveUserMessage(req, params) {
|
||||
const tokenCount = await countTokens(params.text);
|
||||
|
||||
// todo: do this on the frontend
|
||||
@@ -110,14 +111,16 @@ async function saveUserMessage(params) {
|
||||
}
|
||||
|
||||
const message = await recordMessage(userMessage);
|
||||
await saveConvo(params.user, convo);
|
||||
|
||||
await saveConvo(req, convo, {
|
||||
context: 'api/server/services/Threads/manage.js #saveUserMessage',
|
||||
});
|
||||
return message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves an Assistant message to the DB in the Assistants endpoint format.
|
||||
*
|
||||
* @param {Object} req - The request object.
|
||||
* @param {Object} params - The parameters of the Assistant message
|
||||
* @param {string} params.user - The user's ID.
|
||||
* @param {string} params.messageId - The message Id.
|
||||
@@ -134,7 +137,7 @@ async function saveUserMessage(params) {
|
||||
* @param {string} [params.promptPrefix] - Optional: from preset for `additional_instructions` field.
|
||||
* @return {Promise<Run>} A promise that resolves to the created run object.
|
||||
*/
|
||||
async function saveAssistantMessage(params) {
|
||||
async function saveAssistantMessage(req, params) {
|
||||
// const tokenCount = // TODO: need to count each content part
|
||||
|
||||
const message = await recordMessage({
|
||||
@@ -154,14 +157,18 @@ async function saveAssistantMessage(params) {
|
||||
// tokenCount,
|
||||
});
|
||||
|
||||
await saveConvo(params.user, {
|
||||
endpoint: params.endpoint,
|
||||
conversationId: params.conversationId,
|
||||
promptPrefix: params.promptPrefix,
|
||||
instructions: params.instructions,
|
||||
assistant_id: params.assistant_id,
|
||||
model: params.model,
|
||||
});
|
||||
await saveConvo(
|
||||
req,
|
||||
{
|
||||
endpoint: params.endpoint,
|
||||
conversationId: params.conversationId,
|
||||
promptPrefix: params.promptPrefix,
|
||||
instructions: params.instructions,
|
||||
assistant_id: params.assistant_id,
|
||||
model: params.model,
|
||||
},
|
||||
{ context: 'api/server/services/Threads/manage.js #saveAssistantMessage' },
|
||||
);
|
||||
|
||||
return message;
|
||||
}
|
||||
@@ -338,10 +345,14 @@ async function syncMessages({
|
||||
await Promise.all(modifyPromises);
|
||||
await Promise.all(recordPromises);
|
||||
|
||||
await saveConvo(openai.req.user.id, {
|
||||
conversationId,
|
||||
file_ids: attached_file_ids,
|
||||
});
|
||||
await saveConvo(
|
||||
openai.req,
|
||||
{
|
||||
conversationId,
|
||||
file_ids: attached_file_ids,
|
||||
},
|
||||
{ context: 'api/server/services/Threads/manage.js #syncMessages' },
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -73,10 +73,6 @@ function loadAndFormatTools({ directory, adminFilter = [], adminIncluded = [] })
|
||||
continue;
|
||||
}
|
||||
|
||||
if (included.size > 0 && !included.has(file)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let toolInstance = null;
|
||||
try {
|
||||
toolInstance = new ToolClass({ override: true });
|
||||
@@ -92,6 +88,14 @@ function loadAndFormatTools({ directory, adminFilter = [], adminIncluded = [] })
|
||||
continue;
|
||||
}
|
||||
|
||||
if (filter.has(toolInstance.name) && included.size === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (included.size > 0 && !included.has(file) && !included.has(toolInstance.name)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const formattedTool = formatToOpenAIAssistantTool(toolInstance);
|
||||
tools.push(formattedTool);
|
||||
}
|
||||
|
||||
@@ -51,6 +51,7 @@ function assistantsConfigSetup(config, assistantsEndpoint, prevConfig = {}) {
|
||||
excludedIds: parsedConfig.excludedIds,
|
||||
privateAssistants: parsedConfig.privateAssistants,
|
||||
timeoutMs: parsedConfig.timeoutMs,
|
||||
streamRate: parsedConfig.streamRate,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -30,7 +30,8 @@ const sendMessage = (res, message, event = 'message') => {
|
||||
/**
|
||||
* Processes an error with provided options, saves the error message and sends a corresponding SSE response
|
||||
* @async
|
||||
* @param {object} res - The server response.
|
||||
* @param {object} req - The request.
|
||||
* @param {object} res - The response.
|
||||
* @param {object} options - The options for handling the error containing message properties.
|
||||
* @param {object} options.user - The user ID.
|
||||
* @param {string} options.sender - The sender of the message.
|
||||
@@ -41,7 +42,7 @@ const sendMessage = (res, message, event = 'message') => {
|
||||
* @param {boolean} options.shouldSaveMessage - [Optional] Whether the message should be saved. Default is true.
|
||||
* @param {function} callback - [Optional] The callback function to be executed.
|
||||
*/
|
||||
const sendError = async (res, options, callback) => {
|
||||
const sendError = async (req, res, options, callback) => {
|
||||
const {
|
||||
user,
|
||||
sender,
|
||||
@@ -69,7 +70,7 @@ const sendError = async (res, options, callback) => {
|
||||
}
|
||||
|
||||
if (shouldSaveMessage) {
|
||||
await saveMessage({ ...errorMessage, user });
|
||||
await saveMessage(req, { ...errorMessage, user });
|
||||
}
|
||||
|
||||
if (!errorMessage.error) {
|
||||
@@ -97,11 +98,12 @@ const sendError = async (res, options, callback) => {
|
||||
|
||||
/**
|
||||
* Sends the response based on whether headers have been sent or not.
|
||||
* @param {Express.Request} req - The server response.
|
||||
* @param {Express.Response} res - The server response.
|
||||
* @param {Object} data - The data to be sent.
|
||||
* @param {string} [errorMessage] - The error message, if any.
|
||||
*/
|
||||
const sendResponse = (res, data, errorMessage) => {
|
||||
const sendResponse = (req, res, data, errorMessage) => {
|
||||
if (!res.headersSent) {
|
||||
if (errorMessage) {
|
||||
return res.status(500).json({ error: errorMessage });
|
||||
@@ -110,7 +112,7 @@ const sendResponse = (res, data, errorMessage) => {
|
||||
}
|
||||
|
||||
if (errorMessage) {
|
||||
return sendError(res, { ...data, text: errorMessage });
|
||||
return sendError(req, res, { ...data, text: errorMessage });
|
||||
}
|
||||
return sendMessage(res, data);
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const fs = require('fs');
|
||||
const LdapStrategy = require('passport-ldapauth');
|
||||
const { findUser, createUser, updateUser } = require('~/models/userMethods');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const logger = require('~/utils/logger');
|
||||
|
||||
const {
|
||||
@@ -13,6 +14,7 @@ const {
|
||||
LDAP_FULL_NAME,
|
||||
LDAP_ID,
|
||||
LDAP_USERNAME,
|
||||
LDAP_TLS_REJECT_UNAUTHORIZED,
|
||||
} = process.env;
|
||||
|
||||
// Check required environment variables
|
||||
@@ -41,6 +43,7 @@ if (LDAP_ID) {
|
||||
if (LDAP_USERNAME) {
|
||||
searchAttributes.push(LDAP_USERNAME);
|
||||
}
|
||||
const rejectUnauthorized = isEnabled(LDAP_TLS_REJECT_UNAUTHORIZED);
|
||||
|
||||
const ldapOptions = {
|
||||
server: {
|
||||
@@ -52,6 +55,7 @@ const ldapOptions = {
|
||||
searchAttributes: [...new Set(searchAttributes)],
|
||||
...(LDAP_CA_CERT_PATH && {
|
||||
tlsOptions: {
|
||||
rejectUnauthorized,
|
||||
ca: (() => {
|
||||
try {
|
||||
return [fs.readFileSync(LDAP_CA_CERT_PATH)];
|
||||
|
||||
@@ -465,6 +465,12 @@
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TBaseEndpoint
|
||||
* @typedef {import('librechat-data-provider').TBaseEndpoint} TBaseEndpoint
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TEndpoint
|
||||
* @typedef {import('librechat-data-provider').TEndpoint} TEndpoint
|
||||
|
||||
@@ -1,45 +1,6 @@
|
||||
const z = require('zod');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
|
||||
const models = [
|
||||
'text-davinci-003',
|
||||
'text-davinci-002',
|
||||
'text-davinci-001',
|
||||
'text-curie-001',
|
||||
'text-babbage-001',
|
||||
'text-ada-001',
|
||||
'davinci',
|
||||
'curie',
|
||||
'babbage',
|
||||
'ada',
|
||||
'code-davinci-002',
|
||||
'code-davinci-001',
|
||||
'code-cushman-002',
|
||||
'code-cushman-001',
|
||||
'davinci-codex',
|
||||
'cushman-codex',
|
||||
'text-davinci-edit-001',
|
||||
'code-davinci-edit-001',
|
||||
'text-embedding-ada-002',
|
||||
'text-similarity-davinci-001',
|
||||
'text-similarity-curie-001',
|
||||
'text-similarity-babbage-001',
|
||||
'text-similarity-ada-001',
|
||||
'text-search-davinci-doc-001',
|
||||
'text-search-curie-doc-001',
|
||||
'text-search-babbage-doc-001',
|
||||
'text-search-ada-doc-001',
|
||||
'code-search-babbage-code-001',
|
||||
'code-search-ada-code-001',
|
||||
'gpt2',
|
||||
'gpt-4',
|
||||
'gpt-4-0314',
|
||||
'gpt-4-32k',
|
||||
'gpt-4-32k-0314',
|
||||
'gpt-3.5-turbo',
|
||||
'gpt-3.5-turbo-0301',
|
||||
];
|
||||
|
||||
const openAIModels = {
|
||||
'gpt-4': 8187, // -5 from max
|
||||
'gpt-4-0613': 8187, // -5 from max
|
||||
@@ -49,6 +10,7 @@ const openAIModels = {
|
||||
'gpt-4-1106': 127990, // -10 from max
|
||||
'gpt-4-0125': 127990, // -10 from max
|
||||
'gpt-4o': 127990, // -10 from max
|
||||
'gpt-4o-mini': 127990, // -10 from max
|
||||
'gpt-4-turbo': 127990, // -10 from max
|
||||
'gpt-4-vision': 127990, // -10 from max
|
||||
'gpt-3.5-turbo': 16375, // -10 from max
|
||||
@@ -101,7 +63,6 @@ const anthropicModels = {
|
||||
|
||||
const aggregateModels = { ...openAIModels, ...googleModels, ...anthropicModels, ...cohereModels };
|
||||
|
||||
// Order is important here: by model series and context size (gpt-4 then gpt-3, ascending)
|
||||
const maxTokensMap = {
|
||||
[EModelEndpoint.azureOpenAI]: openAIModels,
|
||||
[EModelEndpoint.openAI]: aggregateModels,
|
||||
@@ -110,6 +71,24 @@ const maxTokensMap = {
|
||||
[EModelEndpoint.anthropic]: anthropicModels,
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds the first matching pattern in the tokens map.
|
||||
* @param {string} modelName
|
||||
* @param {Record<string, number>} tokensMap
|
||||
* @returns {string|null}
|
||||
*/
|
||||
function findMatchingPattern(modelName, tokensMap) {
|
||||
const keys = Object.keys(tokensMap);
|
||||
for (let i = keys.length - 1; i >= 0; i--) {
|
||||
const modelKey = keys[i];
|
||||
if (modelName.includes(modelKey)) {
|
||||
return modelKey;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the maximum tokens for a given model name. If the exact model name isn't found,
|
||||
* it searches for partial matches within the model name, checking keys in reverse order.
|
||||
@@ -143,12 +122,11 @@ function getModelMaxTokens(modelName, endpoint = EModelEndpoint.openAI, endpoint
|
||||
return tokensMap[modelName];
|
||||
}
|
||||
|
||||
const keys = Object.keys(tokensMap);
|
||||
for (let i = keys.length - 1; i >= 0; i--) {
|
||||
if (modelName.includes(keys[i])) {
|
||||
const result = tokensMap[keys[i]];
|
||||
return result?.context ?? result;
|
||||
}
|
||||
const matchedPattern = findMatchingPattern(modelName, tokensMap);
|
||||
|
||||
if (matchedPattern) {
|
||||
const result = tokensMap[matchedPattern];
|
||||
return result?.context ?? result;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
@@ -181,15 +159,8 @@ function matchModelName(modelName, endpoint = EModelEndpoint.openAI) {
|
||||
return modelName;
|
||||
}
|
||||
|
||||
const keys = Object.keys(tokensMap);
|
||||
for (let i = keys.length - 1; i >= 0; i--) {
|
||||
const modelKey = keys[i];
|
||||
if (modelName.includes(modelKey)) {
|
||||
return modelKey;
|
||||
}
|
||||
}
|
||||
|
||||
return modelName;
|
||||
const matchedPattern = findMatchingPattern(modelName, tokensMap);
|
||||
return matchedPattern || modelName;
|
||||
}
|
||||
|
||||
const modelSchema = z.object({
|
||||
@@ -241,8 +212,47 @@ function processModelData(input) {
|
||||
return tokenConfig;
|
||||
}
|
||||
|
||||
const tiktokenModels = new Set([
|
||||
'text-davinci-003',
|
||||
'text-davinci-002',
|
||||
'text-davinci-001',
|
||||
'text-curie-001',
|
||||
'text-babbage-001',
|
||||
'text-ada-001',
|
||||
'davinci',
|
||||
'curie',
|
||||
'babbage',
|
||||
'ada',
|
||||
'code-davinci-002',
|
||||
'code-davinci-001',
|
||||
'code-cushman-002',
|
||||
'code-cushman-001',
|
||||
'davinci-codex',
|
||||
'cushman-codex',
|
||||
'text-davinci-edit-001',
|
||||
'code-davinci-edit-001',
|
||||
'text-embedding-ada-002',
|
||||
'text-similarity-davinci-001',
|
||||
'text-similarity-curie-001',
|
||||
'text-similarity-babbage-001',
|
||||
'text-similarity-ada-001',
|
||||
'text-search-davinci-doc-001',
|
||||
'text-search-curie-doc-001',
|
||||
'text-search-babbage-doc-001',
|
||||
'text-search-ada-doc-001',
|
||||
'code-search-babbage-code-001',
|
||||
'code-search-ada-code-001',
|
||||
'gpt2',
|
||||
'gpt-4',
|
||||
'gpt-4-0314',
|
||||
'gpt-4-32k',
|
||||
'gpt-4-32k-0314',
|
||||
'gpt-3.5-turbo',
|
||||
'gpt-3.5-turbo-0301',
|
||||
]);
|
||||
|
||||
module.exports = {
|
||||
tiktokenModels: new Set(models),
|
||||
tiktokenModels,
|
||||
maxTokensMap,
|
||||
inputSchema,
|
||||
modelSchema,
|
||||
|
||||
9
client/src/Providers/BookmarkContext.tsx
Normal file
9
client/src/Providers/BookmarkContext.tsx
Normal file
@@ -0,0 +1,9 @@
|
||||
import { createContext, useContext } from 'react';
|
||||
import type { TConversationTag } from 'librechat-data-provider';
|
||||
|
||||
type TBookmarkContext = { bookmarks: TConversationTag[] };
|
||||
|
||||
export const BookmarkContext = createContext<TBookmarkContext>({
|
||||
bookmarks: [],
|
||||
} as TBookmarkContext);
|
||||
export const useBookmarkContext = () => useContext(BookmarkContext);
|
||||
@@ -7,6 +7,7 @@ export * from './SearchContext';
|
||||
export * from './FileMapContext';
|
||||
export * from './AddedChatContext';
|
||||
export * from './ChatFormContext';
|
||||
export * from './BookmarkContext';
|
||||
export * from './DashboardContext';
|
||||
export * from './AssistantsContext';
|
||||
export * from './AssistantsMapContext';
|
||||
|
||||
@@ -83,7 +83,7 @@ export type NavLink = {
|
||||
label?: string;
|
||||
icon: LucideIcon | React.FC;
|
||||
Component?: React.ComponentType;
|
||||
onClick?: () => void;
|
||||
onClick?: (e?: React.MouseEvent) => void;
|
||||
variant?: 'default' | 'ghost';
|
||||
id: string;
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useForm } from 'react-hook-form';
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { useGetStartupConfig } from 'librechat-data-provider/react-query';
|
||||
import type { TLoginUser, TStartupConfig } from 'librechat-data-provider';
|
||||
import type { TAuthContext } from '~/common';
|
||||
import { useResendVerificationEmail } from '~/data-provider';
|
||||
@@ -22,6 +23,9 @@ const LoginForm: React.FC<TLoginFormProps> = ({ onSubmit, startupConfig, error,
|
||||
} = useForm<TLoginUser>();
|
||||
const [showResendLink, setShowResendLink] = useState<boolean>(false);
|
||||
|
||||
const { data: config } = useGetStartupConfig();
|
||||
const useUsernameLogin = config?.ldap?.username;
|
||||
|
||||
useEffect(() => {
|
||||
if (error && error.includes('422') && !showResendLink) {
|
||||
setShowResendLink(true);
|
||||
@@ -82,12 +86,15 @@ const LoginForm: React.FC<TLoginFormProps> = ({ onSubmit, startupConfig, error,
|
||||
<input
|
||||
type="text"
|
||||
id="email"
|
||||
autoComplete="email"
|
||||
autoComplete={useUsernameLogin ? 'username' : 'email'}
|
||||
aria-label={localize('com_auth_email')}
|
||||
{...register('email', {
|
||||
required: localize('com_auth_email_required'),
|
||||
maxLength: { value: 120, message: localize('com_auth_email_max_length') },
|
||||
pattern: { value: /\S+@\S+\.\S+/, message: localize('com_auth_email_pattern') },
|
||||
pattern: {
|
||||
value: useUsernameLogin ? /\S+/ : /\S+@\S+\.\S+/,
|
||||
message: localize('com_auth_email_pattern'),
|
||||
},
|
||||
})}
|
||||
aria-invalid={!!errors.email}
|
||||
className="webkit-dark-styles peer block w-full appearance-none rounded-md border border-gray-300 bg-transparent px-3.5 pb-3.5 pt-4 text-sm text-gray-900 focus:border-green-500 focus:outline-none focus:ring-0 dark:border-gray-600 dark:text-white dark:focus:border-green-500"
|
||||
@@ -97,7 +104,9 @@ const LoginForm: React.FC<TLoginFormProps> = ({ onSubmit, startupConfig, error,
|
||||
htmlFor="email"
|
||||
className="absolute start-1 top-2 z-10 origin-[0] -translate-y-4 scale-75 transform bg-white px-3 text-sm text-gray-500 duration-100 peer-placeholder-shown:top-1/2 peer-placeholder-shown:-translate-y-1/2 peer-placeholder-shown:scale-100 peer-focus:top-2 peer-focus:-translate-y-4 peer-focus:scale-75 peer-focus:px-3 peer-focus:text-green-600 dark:bg-gray-900 dark:text-gray-400 dark:peer-focus:text-green-500 rtl:peer-focus:left-auto rtl:peer-focus:translate-x-1/4"
|
||||
>
|
||||
{localize('com_auth_email_address')}
|
||||
{useUsernameLogin
|
||||
? localize('com_auth_username').replace(/ \(.*$/, '')
|
||||
: localize('com_auth_email_address')}
|
||||
</label>
|
||||
</div>
|
||||
{renderError('email')}
|
||||
|
||||
@@ -21,7 +21,9 @@ const mockStartupConfig = {
|
||||
openidLoginEnabled: true,
|
||||
openidLabel: 'Test OpenID',
|
||||
openidImageUrl: 'http://test-server.com',
|
||||
ldapLoginEnabled: false,
|
||||
ldap: {
|
||||
enabled: false,
|
||||
},
|
||||
registrationEnabled: true,
|
||||
emailLoginEnabled: true,
|
||||
socialLoginEnabled: true,
|
||||
|
||||
@@ -23,7 +23,9 @@ const mockStartupConfig: TStartupConfig = {
|
||||
passwordResetEnabled: true,
|
||||
serverDomain: 'mock-server',
|
||||
appTitle: '',
|
||||
ldapLoginEnabled: false,
|
||||
ldap: {
|
||||
enabled: false,
|
||||
},
|
||||
emailEnabled: false,
|
||||
checkBalance: false,
|
||||
showBirthdayIcon: false,
|
||||
|
||||
69
client/src/components/Bookmarks/BookmarkEditDialog.tsx
Normal file
69
client/src/components/Bookmarks/BookmarkEditDialog.tsx
Normal file
@@ -0,0 +1,69 @@
|
||||
import React, { useRef, useState } from 'react';
|
||||
import { TConversationTag, TConversation } from 'librechat-data-provider';
|
||||
import OGDialogTemplate from '~/components/ui/OGDialogTemplate';
|
||||
import { OGDialog, OGDialogTrigger, OGDialogClose } from '~/components/ui/';
|
||||
import BookmarkForm from './BookmarkForm';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { Spinner } from '../svg';
|
||||
|
||||
type BookmarkEditDialogProps = {
|
||||
bookmark?: TConversationTag;
|
||||
conversation?: TConversation;
|
||||
tags?: string[];
|
||||
setTags?: (tags: string[]) => void;
|
||||
trigger: React.ReactNode;
|
||||
};
|
||||
const BookmarkEditDialog = ({
|
||||
bookmark,
|
||||
conversation,
|
||||
tags,
|
||||
setTags,
|
||||
trigger,
|
||||
}: BookmarkEditDialogProps) => {
|
||||
const localize = useLocalize();
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [open, setOpen] = useState(false);
|
||||
const formRef = useRef<HTMLFormElement>(null);
|
||||
|
||||
const handleSubmitForm = () => {
|
||||
if (formRef.current) {
|
||||
formRef.current.dispatchEvent(new Event('submit', { cancelable: true, bubbles: true }));
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<OGDialog open={open} onOpenChange={setOpen}>
|
||||
<OGDialogTrigger asChild>{trigger}</OGDialogTrigger>
|
||||
<OGDialogTemplate
|
||||
title="Bookmark"
|
||||
className="w-11/12 sm:w-1/4"
|
||||
showCloseButton={false}
|
||||
main={
|
||||
<BookmarkForm
|
||||
conversation={conversation}
|
||||
onOpenChange={setOpen}
|
||||
setIsLoading={setIsLoading}
|
||||
bookmark={bookmark}
|
||||
formRef={formRef}
|
||||
setTags={setTags}
|
||||
tags={tags}
|
||||
/>
|
||||
}
|
||||
buttons={
|
||||
<OGDialogClose asChild>
|
||||
<button
|
||||
type="submit"
|
||||
disabled={isLoading}
|
||||
onClick={handleSubmitForm}
|
||||
className="btn rounded bg-green-500 font-bold text-white transition-all hover:bg-green-600"
|
||||
>
|
||||
{isLoading ? <Spinner /> : localize('com_ui_save')}
|
||||
</button>
|
||||
</OGDialogClose>
|
||||
}
|
||||
/>
|
||||
</OGDialog>
|
||||
);
|
||||
};
|
||||
|
||||
export default BookmarkEditDialog;
|
||||
195
client/src/components/Bookmarks/BookmarkForm.tsx
Normal file
195
client/src/components/Bookmarks/BookmarkForm.tsx
Normal file
@@ -0,0 +1,195 @@
|
||||
import React, { useEffect } from 'react';
|
||||
import { Controller, useForm } from 'react-hook-form';
|
||||
import type {
|
||||
TConversationTag,
|
||||
TConversation,
|
||||
TConversationTagRequest,
|
||||
} from 'librechat-data-provider';
|
||||
import { cn, removeFocusOutlines, defaultTextProps } from '~/utils/';
|
||||
import { useBookmarkContext } from '~/Providers/BookmarkContext';
|
||||
import { useConversationTagMutation } from '~/data-provider';
|
||||
import { Checkbox, Label, TextareaAutosize } from '~/components/ui/';
|
||||
import { NotificationSeverity } from '~/common';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
type TBookmarkFormProps = {
|
||||
bookmark?: TConversationTag;
|
||||
conversation?: TConversation;
|
||||
onOpenChange: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
formRef: React.RefObject<HTMLFormElement>;
|
||||
setIsLoading: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
tags?: string[];
|
||||
setTags?: (tags: string[]) => void;
|
||||
};
|
||||
const BookmarkForm = ({
|
||||
bookmark,
|
||||
conversation,
|
||||
onOpenChange,
|
||||
formRef,
|
||||
setIsLoading,
|
||||
tags,
|
||||
setTags,
|
||||
}: TBookmarkFormProps) => {
|
||||
const { showToast } = useToastContext();
|
||||
const localize = useLocalize();
|
||||
const mutation = useConversationTagMutation(bookmark?.tag);
|
||||
const { bookmarks } = useBookmarkContext();
|
||||
|
||||
const {
|
||||
register,
|
||||
handleSubmit,
|
||||
setValue,
|
||||
getValues,
|
||||
control,
|
||||
formState: { errors },
|
||||
} = useForm<TConversationTagRequest>({
|
||||
defaultValues: {
|
||||
tag: bookmark?.tag || '',
|
||||
description: bookmark?.description || '',
|
||||
conversationId: conversation?.conversationId || '',
|
||||
addToConversation: conversation ? true : false,
|
||||
},
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (bookmark) {
|
||||
setValue('tag', bookmark.tag || '');
|
||||
setValue('description', bookmark.description || '');
|
||||
}
|
||||
}, [bookmark, setValue]);
|
||||
|
||||
const onSubmit = (data: TConversationTagRequest) => {
|
||||
if (mutation.isLoading) {
|
||||
return;
|
||||
}
|
||||
if (data.tag === bookmark?.tag && data.description === bookmark?.description) {
|
||||
return;
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
mutation.mutate(data, {
|
||||
onSuccess: () => {
|
||||
showToast({
|
||||
message: bookmark
|
||||
? localize('com_ui_bookmarks_update_success')
|
||||
: localize('com_ui_bookmarks_create_success'),
|
||||
});
|
||||
setIsLoading(false);
|
||||
onOpenChange(false);
|
||||
if (setTags && data.addToConversation) {
|
||||
const newTags = [...(tags || []), data.tag].filter(
|
||||
(tag) => tag !== undefined,
|
||||
) as string[];
|
||||
setTags(newTags);
|
||||
}
|
||||
},
|
||||
onError: () => {
|
||||
showToast({
|
||||
message: bookmark
|
||||
? localize('com_ui_bookmarks_update_error')
|
||||
: localize('com_ui_bookmarks_create_error'),
|
||||
severity: NotificationSeverity.ERROR,
|
||||
});
|
||||
setIsLoading(false);
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<form
|
||||
ref={formRef}
|
||||
className="mt-6"
|
||||
aria-label="Bookmark form"
|
||||
method="POST"
|
||||
onSubmit={handleSubmit(onSubmit)}
|
||||
>
|
||||
<div className="flex w-full flex-col items-center gap-2">
|
||||
<div className="grid w-full items-center gap-2">
|
||||
<Label htmlFor="bookmark-tag" className="text-left text-sm font-medium">
|
||||
{localize('com_ui_bookmarks_title')}
|
||||
</Label>
|
||||
<input
|
||||
type="text"
|
||||
id="bookmark-tag"
|
||||
aria-label="Bookmark"
|
||||
{...register('tag', {
|
||||
required: 'tag is required',
|
||||
maxLength: {
|
||||
value: 128,
|
||||
message: localize('com_auth_password_max_length'),
|
||||
},
|
||||
validate: (value) => {
|
||||
return (
|
||||
value === bookmark?.tag ||
|
||||
bookmarks.every((bookmark) => bookmark.tag !== value) ||
|
||||
'tag must be unique'
|
||||
);
|
||||
},
|
||||
})}
|
||||
aria-invalid={!!errors.tag}
|
||||
className={cn(
|
||||
defaultTextProps,
|
||||
'flex h-10 max-h-10 w-full resize-none px-3 py-2',
|
||||
removeFocusOutlines,
|
||||
)}
|
||||
placeholder=" "
|
||||
/>
|
||||
{errors.tag && <span className="text-sm text-red-500">{errors.tag.message}</span>}
|
||||
</div>
|
||||
|
||||
<div className="grid w-full items-center gap-2">
|
||||
<Label htmlFor="bookmark-description" className="text-left text-sm font-medium">
|
||||
{localize('com_ui_bookmarks_description')}
|
||||
</Label>
|
||||
<TextareaAutosize
|
||||
{...register('description', {
|
||||
maxLength: {
|
||||
value: 1048,
|
||||
message: 'Maximum 1048 characters',
|
||||
},
|
||||
})}
|
||||
id="bookmark-description"
|
||||
disabled={false}
|
||||
className={cn(
|
||||
defaultTextProps,
|
||||
'flex max-h-[138px] min-h-[100px] w-full resize-none px-3 py-2',
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
{conversation && (
|
||||
<div className="mt-2 flex w-full items-center">
|
||||
<Controller
|
||||
name="addToConversation"
|
||||
control={control}
|
||||
render={({ field }) => (
|
||||
<Checkbox
|
||||
{...field}
|
||||
checked={field.value}
|
||||
onCheckedChange={field.onChange}
|
||||
className="relative float-left mr-2 inline-flex h-4 w-4 cursor-pointer"
|
||||
value={field?.value?.toString()}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
<label
|
||||
className="form-check-label text-token-text-primary w-full cursor-pointer"
|
||||
htmlFor="addToConversation"
|
||||
onClick={() =>
|
||||
setValue('addToConversation', !getValues('addToConversation'), {
|
||||
shouldDirty: true,
|
||||
})
|
||||
}
|
||||
>
|
||||
<div className="flex select-none items-center">
|
||||
{localize('com_ui_bookmarks_add_to_conversation')}
|
||||
</div>
|
||||
</label>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</form>
|
||||
);
|
||||
};
|
||||
|
||||
export default BookmarkForm;
|
||||
74
client/src/components/Bookmarks/BookmarkItem.tsx
Normal file
74
client/src/components/Bookmarks/BookmarkItem.tsx
Normal file
@@ -0,0 +1,74 @@
|
||||
import { useState } from 'react';
|
||||
import { BookmarkFilledIcon, BookmarkIcon } from '@radix-ui/react-icons';
|
||||
import type { FC } from 'react';
|
||||
import { Spinner } from '~/components/svg';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
type MenuItemProps = {
|
||||
tag: string;
|
||||
selected: boolean;
|
||||
count?: number;
|
||||
handleSubmit: (tag: string) => Promise<void>;
|
||||
icon?: React.ReactNode;
|
||||
highlightSelected?: boolean;
|
||||
};
|
||||
|
||||
const BookmarkItem: FC<MenuItemProps> = ({
|
||||
tag,
|
||||
selected,
|
||||
count,
|
||||
handleSubmit,
|
||||
icon,
|
||||
highlightSelected,
|
||||
...rest
|
||||
}) => {
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const clickHandler = async () => {
|
||||
setIsLoading(true);
|
||||
await handleSubmit(tag);
|
||||
setIsLoading(false);
|
||||
};
|
||||
return (
|
||||
<div
|
||||
role="menuitem"
|
||||
className={cn(
|
||||
'group m-1.5 flex cursor-pointer gap-2 rounded px-2 py-2.5 !pr-3 text-sm !opacity-100 focus:ring-0 radix-disabled:pointer-events-none radix-disabled:opacity-50',
|
||||
'hover:bg-black/5 dark:hover:bg-white/5',
|
||||
highlightSelected && selected && 'bg-black/5 dark:bg-white/5',
|
||||
)}
|
||||
tabIndex={-1}
|
||||
{...rest}
|
||||
onClick={clickHandler}
|
||||
>
|
||||
<div className="flex grow items-center justify-between gap-2">
|
||||
<div className="flex items-center gap-2">
|
||||
{icon ? (
|
||||
icon
|
||||
) : isLoading ? (
|
||||
<Spinner className="size-4" />
|
||||
) : selected ? (
|
||||
<BookmarkFilledIcon className="size-4" />
|
||||
) : (
|
||||
<BookmarkIcon className="size-4" />
|
||||
)}
|
||||
<div className="break-all">{tag}</div>
|
||||
</div>
|
||||
|
||||
{count !== undefined && (
|
||||
<div className="flex items-center justify-end">
|
||||
<span
|
||||
className={cn(
|
||||
'ml-auto w-9 min-w-max whitespace-nowrap rounded-md bg-white px-2.5 py-0.5 text-center text-xs font-medium leading-5 text-gray-600',
|
||||
'dark:bg-gray-800 dark:text-white',
|
||||
)}
|
||||
aria-hidden="true"
|
||||
>
|
||||
{count}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
export default BookmarkItem;
|
||||
30
client/src/components/Bookmarks/BookmarkItems.tsx
Normal file
30
client/src/components/Bookmarks/BookmarkItems.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import type { FC } from 'react';
|
||||
import { useBookmarkContext } from '~/Providers/BookmarkContext';
|
||||
import BookmarkItem from './BookmarkItem';
|
||||
|
||||
const BookmarkItems: FC<{
|
||||
tags: string[];
|
||||
handleSubmit: (tag: string) => Promise<void>;
|
||||
header: React.ReactNode;
|
||||
highlightSelected?: boolean;
|
||||
}> = ({ tags, handleSubmit, header, highlightSelected }) => {
|
||||
const { bookmarks } = useBookmarkContext();
|
||||
return (
|
||||
<>
|
||||
{header}
|
||||
<div className="my-1.5 h-px bg-black/10 dark:bg-white/10" role="none" />
|
||||
{bookmarks.length > 0 &&
|
||||
bookmarks.map((bookmark) => (
|
||||
<BookmarkItem
|
||||
key={bookmark.tag}
|
||||
tag={bookmark.tag}
|
||||
selected={tags.includes(bookmark.tag)}
|
||||
count={bookmark.count}
|
||||
handleSubmit={handleSubmit}
|
||||
highlightSelected={highlightSelected}
|
||||
/>
|
||||
))}
|
||||
</>
|
||||
);
|
||||
};
|
||||
export default BookmarkItems;
|
||||
58
client/src/components/Bookmarks/DeleteBookmarkButton.tsx
Normal file
58
client/src/components/Bookmarks/DeleteBookmarkButton.tsx
Normal file
@@ -0,0 +1,58 @@
|
||||
import { useCallback } from 'react';
|
||||
import type { FC } from 'react';
|
||||
import { useDeleteConversationTagMutation } from '~/data-provider';
|
||||
import TooltipIcon from '~/components/ui/TooltipIcon';
|
||||
import { NotificationSeverity } from '~/common';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import { TrashIcon } from '~/components/svg';
|
||||
import { Label } from '~/components/ui';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
const DeleteBookmarkButton: FC<{
|
||||
bookmark: string;
|
||||
tabIndex?: number;
|
||||
onFocus?: () => void;
|
||||
onBlur?: () => void;
|
||||
}> = ({ bookmark, tabIndex = 0, onFocus, onBlur }) => {
|
||||
const localize = useLocalize();
|
||||
const { showToast } = useToastContext();
|
||||
|
||||
const deleteBookmarkMutation = useDeleteConversationTagMutation({
|
||||
onSuccess: () => {
|
||||
showToast({
|
||||
message: localize('com_ui_bookmarks_delete_success'),
|
||||
});
|
||||
},
|
||||
onError: () => {
|
||||
showToast({
|
||||
message: localize('com_ui_bookmarks_delete_error'),
|
||||
severity: NotificationSeverity.ERROR,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const confirmDelete = useCallback(async () => {
|
||||
await deleteBookmarkMutation.mutateAsync(bookmark);
|
||||
}, [bookmark, deleteBookmarkMutation]);
|
||||
|
||||
return (
|
||||
<TooltipIcon
|
||||
disabled={false}
|
||||
appendLabel={false}
|
||||
title="Delete Bookmark"
|
||||
confirmMessage={
|
||||
<Label htmlFor="bookmark" className="text-left text-sm font-medium">
|
||||
{localize('com_ui_bookmark_delete_confirm')} {bookmark}
|
||||
</Label>
|
||||
}
|
||||
confirm={confirmDelete}
|
||||
className="transition-color flex h-7 w-7 min-w-7 items-center justify-center rounded-lg duration-200 hover:bg-gray-200 dark:hover:bg-gray-700"
|
||||
icon={<TrashIcon className="size-4" />}
|
||||
tabIndex={tabIndex}
|
||||
onFocus={onFocus}
|
||||
onBlur={onBlur}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default DeleteBookmarkButton;
|
||||
42
client/src/components/Bookmarks/EditBookmarkButton.tsx
Normal file
42
client/src/components/Bookmarks/EditBookmarkButton.tsx
Normal file
@@ -0,0 +1,42 @@
|
||||
import type { FC } from 'react';
|
||||
import type { TConversationTag } from 'librechat-data-provider';
|
||||
import BookmarkEditDialog from './BookmarkEditDialog';
|
||||
import { EditIcon } from '~/components/svg';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '~/components/ui';
|
||||
|
||||
const EditBookmarkButton: FC<{
|
||||
bookmark: TConversationTag;
|
||||
tabIndex?: number;
|
||||
onFocus?: () => void;
|
||||
onBlur?: () => void;
|
||||
}> = ({ bookmark, tabIndex = 0, onFocus, onBlur }) => {
|
||||
const localize = useLocalize();
|
||||
return (
|
||||
<BookmarkEditDialog
|
||||
bookmark={bookmark}
|
||||
trigger={
|
||||
<button
|
||||
type="button"
|
||||
className="transition-color flex h-7 w-7 min-w-7 items-center justify-center rounded-lg duration-200 hover:bg-gray-200 dark:hover:bg-gray-700"
|
||||
tabIndex={tabIndex}
|
||||
onFocus={onFocus}
|
||||
onBlur={onBlur}
|
||||
>
|
||||
<TooltipProvider delayDuration={250}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<EditIcon />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="top" sideOffset={0}>
|
||||
{localize('com_ui_edit')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</button>
|
||||
}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default EditBookmarkButton;
|
||||
6
client/src/components/Bookmarks/index.ts
Normal file
6
client/src/components/Bookmarks/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export { default as DeleteBookmarkButton } from './DeleteBookmarkButton';
|
||||
export { default as EditBookmarkButton } from './EditBookmarkButton';
|
||||
export { default as BookmarkEditDialog } from './BookmarkEditDialog';
|
||||
export { default as BookmarkItems } from './BookmarkItems';
|
||||
export { default as BookmarkItem } from './BookmarkItem';
|
||||
export { default as BookmarkForm } from './BookmarkForm';
|
||||
@@ -6,6 +6,7 @@ import type { ContextType } from '~/common';
|
||||
import { EndpointsMenu, ModelSpecsMenu, PresetsMenu, HeaderNewChat } from './Menus';
|
||||
import ExportAndShareMenu from './ExportAndShareMenu';
|
||||
import HeaderOptions from './Input/HeaderOptions';
|
||||
import BookmarkMenu from './Menus/BookmarkMenu';
|
||||
import AddMultiConvo from './AddMultiConvo';
|
||||
import { useMediaQuery } from '~/hooks';
|
||||
|
||||
@@ -37,6 +38,7 @@ export default function Header() {
|
||||
className="pl-0"
|
||||
/>
|
||||
)}
|
||||
<BookmarkMenu />
|
||||
<AddMultiConvo />
|
||||
</div>
|
||||
{!isSmallScreen && (
|
||||
|
||||
@@ -4,16 +4,19 @@ import { ListeningIcon, Spinner } from '~/components/svg';
|
||||
import { useLocalize, useSpeechToText } from '~/hooks';
|
||||
import { useChatFormContext } from '~/Providers';
|
||||
import { globalAudioId } from '~/common';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
export default function AudioRecorder({
|
||||
textAreaRef,
|
||||
methods,
|
||||
ask,
|
||||
isRTL,
|
||||
disabled,
|
||||
}: {
|
||||
textAreaRef: React.RefObject<HTMLTextAreaElement>;
|
||||
methods: ReturnType<typeof useChatFormContext>;
|
||||
ask: (data: { text: string }) => void;
|
||||
isRTL: boolean;
|
||||
disabled: boolean;
|
||||
}) {
|
||||
const localize = useLocalize();
|
||||
@@ -77,7 +80,12 @@ export default function AudioRecorder({
|
||||
<button
|
||||
onClick={isListening ? handleStopRecording : handleStartRecording}
|
||||
disabled={disabled}
|
||||
className="absolute bottom-1.5 right-12 flex h-[30px] w-[30px] items-center justify-center rounded-lg p-0.5 transition-colors hover:bg-gray-200 dark:hover:bg-gray-700 md:bottom-3 md:right-12"
|
||||
className={cn(
|
||||
'absolute flex h-[30px] w-[30px] items-center justify-center rounded-lg p-0.5 transition-colors hover:bg-gray-200 dark:hover:bg-gray-700',
|
||||
isRTL
|
||||
? 'bottom-1.5 left-4 md:bottom-3 md:left-12'
|
||||
: 'bottom-1.5 right-12 md:bottom-3 md:right-12',
|
||||
)}
|
||||
type="button"
|
||||
>
|
||||
{renderIcon()}
|
||||
|
||||
@@ -38,8 +38,8 @@ const ChatForm = ({ index = 0 }) => {
|
||||
const submitButtonRef = useRef<HTMLButtonElement>(null);
|
||||
const textAreaRef = useRef<HTMLTextAreaElement | null>(null);
|
||||
|
||||
const SpeechToText = useRecoilValue(store.speechToText);
|
||||
const TextToSpeech = useRecoilValue(store.textToSpeech);
|
||||
const SpeechToText = useRecoilState<boolean>(store.speechToText);
|
||||
const TextToSpeech = useRecoilState<boolean>(store.textToSpeech);
|
||||
const automaticPlayback = useRecoilValue(store.automaticPlayback);
|
||||
|
||||
const [showStopButton, setShowStopButton] = useRecoilState(store.showStopButtonByIndex(index));
|
||||
@@ -48,6 +48,9 @@ const ChatForm = ({ index = 0 }) => {
|
||||
store.showMentionPopoverFamily(index),
|
||||
);
|
||||
|
||||
const chatDirection = useRecoilValue(store.chatDirection).toLowerCase();
|
||||
const isRTL = chatDirection === 'rtl';
|
||||
|
||||
const { requiresKey } = useRequiresKey();
|
||||
const handleKeyUp = useHandleKeyUp({
|
||||
index,
|
||||
@@ -149,6 +152,7 @@ const ChatForm = ({ index = 0 }) => {
|
||||
files={files}
|
||||
setFiles={setFiles}
|
||||
setFilesLoading={setFilesLoading}
|
||||
isRTL={isRTL}
|
||||
Wrapper={({ children }) => (
|
||||
<div className="mx-2 mt-2 flex flex-wrap gap-2 px-2.5 md:pl-0 md:pr-4">
|
||||
{children}
|
||||
@@ -179,7 +183,7 @@ const ChatForm = ({ index = 0 }) => {
|
||||
? ' pl-10 md:pl-[55px]'
|
||||
: 'pl-3 md:pl-4',
|
||||
'm-0 w-full resize-none border-0 bg-transparent py-[10px] placeholder-black/50 focus:ring-0 focus-visible:ring-0 dark:bg-transparent dark:placeholder-white/50 md:py-3.5 ',
|
||||
SpeechToText ? 'pr-20 md:pr-[85px]' : 'pr-10 md:pr-12',
|
||||
SpeechToText && !isRTL ? 'pr-20 md:pr-[85px]' : 'pr-10 md:pr-12',
|
||||
'max-h-[65vh] md:max-h-[75vh]',
|
||||
removeFocusRings,
|
||||
)}
|
||||
@@ -188,15 +192,21 @@ const ChatForm = ({ index = 0 }) => {
|
||||
<AttachFile
|
||||
endpoint={_endpoint ?? ''}
|
||||
endpointType={endpointType}
|
||||
isRTL={isRTL}
|
||||
disabled={disableInputs}
|
||||
/>
|
||||
{(isSubmitting || isSubmittingAdded) && (showStopButton || showStopAdded) ? (
|
||||
<StopButton stop={handleStopGenerating} setShowStopButton={setShowStopButton} />
|
||||
<StopButton
|
||||
stop={handleStopGenerating}
|
||||
setShowStopButton={setShowStopButton}
|
||||
isRTL={isRTL}
|
||||
/>
|
||||
) : (
|
||||
endpoint && (
|
||||
<SendButton
|
||||
ref={submitButtonRef}
|
||||
control={methods.control}
|
||||
isRTL={isRTL}
|
||||
disabled={!!(filesLoading || isSubmitting || disableInputs)}
|
||||
/>
|
||||
)
|
||||
@@ -206,6 +216,7 @@ const ChatForm = ({ index = 0 }) => {
|
||||
disabled={!!disableInputs}
|
||||
textAreaRef={textAreaRef}
|
||||
ask={submitMessage}
|
||||
isRTL={isRTL}
|
||||
methods={methods}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -9,14 +9,17 @@ import { useGetFileConfig } from '~/data-provider';
|
||||
import { AttachmentIcon } from '~/components/svg';
|
||||
import { FileUpload } from '~/components/ui';
|
||||
import { useFileHandling } from '~/hooks';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
const AttachFile = ({
|
||||
endpoint,
|
||||
endpointType,
|
||||
isRTL,
|
||||
disabled = false,
|
||||
}: {
|
||||
endpoint: EModelEndpoint | '';
|
||||
endpointType?: EModelEndpoint;
|
||||
isRTL: boolean;
|
||||
disabled?: boolean | null;
|
||||
}) => {
|
||||
const { handleFileChange } = useFileHandling();
|
||||
@@ -30,7 +33,14 @@ const AttachFile = ({
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="absolute bottom-2 left-2 md:bottom-3 md:left-4">
|
||||
<div
|
||||
className={cn(
|
||||
'absolute',
|
||||
isRTL
|
||||
? 'bottom-2 right-14 md:bottom-3.5 md:right-3'
|
||||
: 'bottom-2 left-2 md:bottom-3.5 md:left-4',
|
||||
)}
|
||||
>
|
||||
<FileUpload handleFileChange={handleFileChange} className="flex">
|
||||
<button
|
||||
disabled={!!disabled}
|
||||
|
||||
@@ -13,6 +13,7 @@ export default function FileRow({
|
||||
assistant_id,
|
||||
tool_resource,
|
||||
fileFilter,
|
||||
isRTL,
|
||||
Wrapper,
|
||||
}: {
|
||||
files: Map<string, ExtendedFile>;
|
||||
@@ -21,6 +22,7 @@ export default function FileRow({
|
||||
fileFilter?: (file: ExtendedFile) => boolean;
|
||||
assistant_id?: string;
|
||||
tool_resource?: EToolResources;
|
||||
isRTL?: boolean;
|
||||
Wrapper?: React.FC<{ children: React.ReactNode }>;
|
||||
}) {
|
||||
const files = Array.from(_files.values()).filter((file) =>
|
||||
@@ -64,8 +66,11 @@ export default function FileRow({
|
||||
}
|
||||
|
||||
const renderFiles = () => {
|
||||
// Inline style for RTL
|
||||
const rowStyle = isRTL ? { display: 'flex', flexDirection: 'row-reverse' } : {};
|
||||
|
||||
return (
|
||||
<>
|
||||
<div style={rowStyle as React.CSSProperties}>
|
||||
{files
|
||||
.reduce(
|
||||
(acc, current) => {
|
||||
@@ -90,10 +95,9 @@ export default function FileRow({
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return <FileContainer key={index} file={file} onDelete={handleDelete} />;
|
||||
})}
|
||||
</>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ export default function RemoveFile({ onRemove }: { onRemove: () => void }) {
|
||||
return (
|
||||
<button
|
||||
type="button"
|
||||
className="absolute right-1 top-1 -translate-y-1/2 translate-x-1/2 rounded-full border border-white bg-gray-500 p-0.5 text-white transition-colors hover:bg-black hover:opacity-100 group-hover:opacity-100 md:opacity-0"
|
||||
className="absolute right-1 top-1 -translate-y-1/2 translate-x-1/2 rounded-full border border-gray-500 bg-gray-500 p-0.5 text-white transition-colors hover:bg-gray-700 hover:opacity-100 group-hover:opacity-100 md:opacity-0"
|
||||
onClick={onRemove}
|
||||
>
|
||||
<span>
|
||||
@@ -15,6 +15,8 @@ export default function RemoveFile({ onRemove }: { onRemove: () => void }) {
|
||||
strokeLinejoin="round"
|
||||
className="icon-sm"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="32"
|
||||
height="32"
|
||||
>
|
||||
<line x1="18" y1="6" x2="6" y2="18" />
|
||||
<line x1="6" y1="6" x2="18" y2="18" />
|
||||
|
||||
@@ -32,7 +32,7 @@ import {
|
||||
DropdownMenuTrigger,
|
||||
} from '~/components/ui';
|
||||
import { useDeleteFilesFromTable } from '~/hooks/Files';
|
||||
import { NewTrashIcon, Spinner } from '~/components/svg';
|
||||
import { TrashIcon, Spinner } from '~/components/svg';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
|
||||
interface DataTableProps<TData, TValue> {
|
||||
@@ -102,7 +102,7 @@ export default function DataTable<TData, TValue>({ columns, data }: DataTablePro
|
||||
{isDeleting ? (
|
||||
<Spinner className="h-4 w-4" />
|
||||
) : (
|
||||
<NewTrashIcon className="h-4 w-4 text-red-400" />
|
||||
<TrashIcon className="h-4 w-4 text-red-400" />
|
||||
)}
|
||||
{localize('com_ui_delete')}
|
||||
</Button>
|
||||
|
||||
@@ -9,42 +9,48 @@ import { cn } from '~/utils';
|
||||
type SendButtonProps = {
|
||||
disabled: boolean;
|
||||
control: Control<{ text: string }>;
|
||||
isRTL: boolean;
|
||||
};
|
||||
|
||||
const SubmitButton = React.memo(
|
||||
forwardRef((props: { disabled: boolean }, ref: React.ForwardedRef<HTMLButtonElement>) => {
|
||||
const localize = useLocalize();
|
||||
return (
|
||||
<TooltipProvider delayDuration={250}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
ref={ref}
|
||||
disabled={props.disabled}
|
||||
className={cn(
|
||||
'absolute bottom-1.5 right-2 rounded-lg border border-black p-0.5 text-white transition-colors enabled:bg-black disabled:bg-black disabled:text-gray-400 disabled:opacity-10 dark:border-white dark:bg-white dark:disabled:bg-white md:bottom-3 md:right-3',
|
||||
)}
|
||||
data-testid="send-button"
|
||||
type="submit"
|
||||
>
|
||||
<span className="" data-state="closed">
|
||||
<SendIcon size={24} />
|
||||
</span>
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="top" sideOffset={10}>
|
||||
{localize('com_nav_send_message')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
);
|
||||
}),
|
||||
forwardRef(
|
||||
(props: { disabled: boolean; isRTL: boolean }, ref: React.ForwardedRef<HTMLButtonElement>) => {
|
||||
const localize = useLocalize();
|
||||
return (
|
||||
<TooltipProvider delayDuration={250}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
ref={ref}
|
||||
disabled={props.disabled}
|
||||
className={cn(
|
||||
'absolute rounded-lg border border-black p-0.5 text-white transition-colors enabled:bg-black disabled:bg-black disabled:text-gray-400 disabled:opacity-10 dark:border-white dark:bg-white dark:disabled:bg-white',
|
||||
props.isRTL
|
||||
? 'bottom-1.5 left-2 md:bottom-3 md:left-3'
|
||||
: 'bottom-1.5 right-2 md:bottom-3 md:right-3',
|
||||
)}
|
||||
data-testid="send-button"
|
||||
type="submit"
|
||||
>
|
||||
<span className="" data-state="closed">
|
||||
<SendIcon size={24} />
|
||||
</span>
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="top" sideOffset={10}>
|
||||
{localize('com_nav_send_message')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
const SendButton = React.memo(
|
||||
forwardRef((props: SendButtonProps, ref: React.ForwardedRef<HTMLButtonElement>) => {
|
||||
const data = useWatch({ control: props.control });
|
||||
return <SubmitButton ref={ref} disabled={props.disabled || !data?.text} />;
|
||||
return <SubmitButton ref={ref} disabled={props.disabled || !data?.text} isRTL={props.isRTL} />;
|
||||
}),
|
||||
);
|
||||
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
export default function StopButton({ stop, setShowStopButton }) {
|
||||
import { cn } from '~/utils';
|
||||
|
||||
export default function StopButton({ stop, setShowStopButton, isRTL }) {
|
||||
return (
|
||||
<div className="absolute bottom-3 right-2 md:bottom-4 md:right-4">
|
||||
<div
|
||||
className={cn(
|
||||
'absolute',
|
||||
isRTL ? 'bottom-3 left-2 md:bottom-4 md:left-4' : 'bottom-3 right-2 md:bottom-4 md:right-4',
|
||||
)}
|
||||
>
|
||||
<button
|
||||
type="button"
|
||||
className="border-gizmo-gray-900 rounded-full border-2 p-1 dark:border-gray-200"
|
||||
|
||||
134
client/src/components/Chat/Menus/BookmarkMenu.tsx
Normal file
134
client/src/components/Chat/Menus/BookmarkMenu.tsx
Normal file
@@ -0,0 +1,134 @@
|
||||
import { useEffect, useState, type FC } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { TConversation } from 'librechat-data-provider';
|
||||
import { Content, Portal, Root, Trigger } from '@radix-ui/react-popover';
|
||||
import { BookmarkFilledIcon, BookmarkIcon } from '@radix-ui/react-icons';
|
||||
import { useConversationTagsQuery, useTagConversationMutation } from '~/data-provider';
|
||||
import { BookmarkMenuItems } from './Bookmarks/BookmarkMenuItems';
|
||||
import { BookmarkContext } from '~/Providers/BookmarkContext';
|
||||
import { Spinner } from '~/components';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { cn } from '~/utils';
|
||||
import store from '~/store';
|
||||
|
||||
const SAVED_TAG = 'Saved';
|
||||
const BookmarkMenu: FC = () => {
|
||||
const localize = useLocalize();
|
||||
const location = useLocation();
|
||||
|
||||
const activeConvo = useRecoilValue(store.conversationByIndex(0));
|
||||
|
||||
const globalConvo = useRecoilValue(store.conversation) ?? ({} as TConversation);
|
||||
const [tags, setTags] = useState<string[]>();
|
||||
|
||||
const [open, setIsOpen] = useState(false);
|
||||
const [conversation, setConversation] = useState<TConversation>();
|
||||
|
||||
let thisConversation: TConversation | null | undefined;
|
||||
if (location.state?.from?.pathname.includes('/chat')) {
|
||||
thisConversation = globalConvo;
|
||||
} else {
|
||||
thisConversation = activeConvo;
|
||||
}
|
||||
|
||||
const { mutateAsync, isLoading } = useTagConversationMutation(
|
||||
thisConversation?.conversationId ?? '',
|
||||
);
|
||||
|
||||
const { data } = useConversationTagsQuery();
|
||||
useEffect(() => {
|
||||
if (
|
||||
(!conversation && thisConversation) ||
|
||||
(conversation &&
|
||||
thisConversation &&
|
||||
conversation.conversationId !== thisConversation.conversationId)
|
||||
) {
|
||||
setConversation(thisConversation);
|
||||
setTags(thisConversation.tags ?? []);
|
||||
}
|
||||
if (tags === undefined && conversation) {
|
||||
setTags(conversation.tags ?? []);
|
||||
}
|
||||
}, [thisConversation, conversation, tags]);
|
||||
|
||||
const isActiveConvo =
|
||||
thisConversation &&
|
||||
thisConversation.conversationId &&
|
||||
thisConversation.conversationId !== 'new' &&
|
||||
thisConversation.conversationId !== 'search';
|
||||
|
||||
if (!isActiveConvo) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
const onOpenChange = async (open: boolean) => {
|
||||
if (!open) {
|
||||
setIsOpen(open);
|
||||
return;
|
||||
}
|
||||
if (open && tags && tags.length > 0) {
|
||||
setIsOpen(open);
|
||||
} else {
|
||||
if (thisConversation && thisConversation.conversationId) {
|
||||
await mutateAsync({
|
||||
conversationId: thisConversation.conversationId,
|
||||
tags: [SAVED_TAG],
|
||||
});
|
||||
setTags([SAVED_TAG]);
|
||||
setConversation({ ...thisConversation, tags: [SAVED_TAG] });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Root open={open} onOpenChange={onOpenChange}>
|
||||
<Trigger asChild>
|
||||
<button
|
||||
className={cn(
|
||||
'pointer-cursor relative flex flex-col rounded-md border border-gray-100 bg-white text-left focus:outline-none focus:ring-0 focus:ring-offset-0 dark:border-gray-700 dark:bg-gray-800 sm:text-sm',
|
||||
'hover:bg-gray-50 radix-state-open:bg-gray-50 dark:hover:bg-gray-700 dark:radix-state-open:bg-gray-700',
|
||||
'z-50 flex h-[40px] min-w-4 flex-none items-center justify-center px-3 focus:ring-0 focus:ring-offset-0',
|
||||
)}
|
||||
title={localize('com_ui_bookmarks')}
|
||||
>
|
||||
{isLoading ? (
|
||||
<Spinner />
|
||||
) : tags && tags.length > 0 ? (
|
||||
<BookmarkFilledIcon className="icon-sm" />
|
||||
) : (
|
||||
<BookmarkIcon className="icon-sm" />
|
||||
)}
|
||||
</button>
|
||||
</Trigger>
|
||||
<Portal>
|
||||
<Content
|
||||
className={cn(
|
||||
'grid w-full',
|
||||
'mt-2 min-w-[240px] overflow-y-auto rounded-lg border border-gray-200 bg-white shadow-lg dark:border-gray-700 dark:bg-gray-700 dark:text-white',
|
||||
'max-h-[500px]',
|
||||
)}
|
||||
side="bottom"
|
||||
align="start"
|
||||
>
|
||||
{data && conversation && (
|
||||
// Display all bookmarks registered by the user and highlight the tags of the currently selected conversation
|
||||
<BookmarkContext.Provider value={{ bookmarks: data }}>
|
||||
<BookmarkMenuItems
|
||||
// Currently selected conversation
|
||||
conversation={conversation}
|
||||
setConversation={setConversation}
|
||||
// Tags in the conversation
|
||||
tags={tags ?? []}
|
||||
// Update tags in the conversation
|
||||
setTags={setTags}
|
||||
/>
|
||||
</BookmarkContext.Provider>
|
||||
)}
|
||||
</Content>
|
||||
</Portal>
|
||||
</Root>
|
||||
);
|
||||
};
|
||||
|
||||
export default BookmarkMenu;
|
||||
@@ -0,0 +1,77 @@
|
||||
import { useCallback } from 'react';
|
||||
import { BookmarkPlusIcon } from 'lucide-react';
|
||||
import type { FC } from 'react';
|
||||
import type { TConversation } from 'librechat-data-provider';
|
||||
import { BookmarkItems, BookmarkEditDialog } from '~/components/Bookmarks';
|
||||
import { useTagConversationMutation } from '~/data-provider';
|
||||
import { NotificationSeverity } from '~/common';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
export const BookmarkMenuItems: FC<{
|
||||
conversation: TConversation;
|
||||
tags: string[];
|
||||
setTags: (tags: string[]) => void;
|
||||
setConversation: (conversation: TConversation) => void;
|
||||
}> = ({ conversation, tags, setTags, setConversation }) => {
|
||||
const { showToast } = useToastContext();
|
||||
const localize = useLocalize();
|
||||
|
||||
const { mutateAsync } = useTagConversationMutation(conversation?.conversationId ?? '');
|
||||
const handleSubmit = useCallback(
|
||||
async (tag: string): Promise<void> => {
|
||||
if (tags !== undefined && conversation?.conversationId) {
|
||||
const newTags = tags.includes(tag) ? tags.filter((t) => t !== tag) : [...tags, tag];
|
||||
await mutateAsync(
|
||||
{
|
||||
conversationId: conversation.conversationId,
|
||||
tags: newTags,
|
||||
},
|
||||
{
|
||||
onSuccess: (newTags: string[]) => {
|
||||
setTags(newTags);
|
||||
setConversation({ ...conversation, tags: newTags });
|
||||
},
|
||||
onError: () => {
|
||||
showToast({
|
||||
message: 'Error adding bookmark',
|
||||
severity: NotificationSeverity.ERROR,
|
||||
});
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
},
|
||||
[tags, conversation],
|
||||
);
|
||||
|
||||
return (
|
||||
<BookmarkItems
|
||||
tags={tags}
|
||||
handleSubmit={handleSubmit}
|
||||
header={
|
||||
<div>
|
||||
<BookmarkEditDialog
|
||||
conversation={conversation}
|
||||
tags={tags}
|
||||
setTags={setTags}
|
||||
trigger={
|
||||
<div
|
||||
role="menuitem"
|
||||
className="group m-1.5 flex cursor-pointer gap-2 rounded px-2 !pr-3.5 pb-2.5 pt-3 text-sm !opacity-100 hover:bg-black/5 focus:ring-0 radix-disabled:pointer-events-none radix-disabled:opacity-50 dark:hover:bg-white/5"
|
||||
tabIndex={-1}
|
||||
>
|
||||
<div className="flex grow items-center justify-between gap-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<BookmarkPlusIcon className="size-4" />
|
||||
<div className="break-all">{localize('com_ui_bookmarks_new')}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
);
|
||||
};
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import type { TMessage } from 'librechat-data-provider';
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '~/components/ui';
|
||||
import { VolumeIcon, VolumeMuteIcon, Spinner } from '~/components/svg';
|
||||
import { useLocalize, useTextToSpeech } from '~/hooks';
|
||||
import store from '~/store';
|
||||
@@ -14,32 +15,77 @@ type THoverButtons = {
|
||||
export default function MessageAudio({ index, message, isLast }: THoverButtons) {
|
||||
const localize = useLocalize();
|
||||
const playbackRate = useRecoilValue(store.playbackRate);
|
||||
const [audioText, setAudioText] = useState<string>(localize('com_ui_info_read_aloud'));
|
||||
const [tooltipOpen, setTooltipOpen] = useState(false);
|
||||
const [wasLongPress, setWasLongPress] = useState(false);
|
||||
|
||||
const { toggleSpeech, isSpeaking, isLoading, audioRef } = useTextToSpeech(message, isLast, index);
|
||||
|
||||
const isMouseDownRef = useRef(false);
|
||||
const timerRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const counterRef = useRef(0);
|
||||
|
||||
const renderIcon = (size: string) => {
|
||||
if (isLoading) {
|
||||
return <Spinner size={size} />;
|
||||
}
|
||||
return isSpeaking ? <VolumeMuteIcon size={size} /> : <VolumeIcon size={size} />;
|
||||
};
|
||||
|
||||
if (isSpeaking) {
|
||||
return <VolumeMuteIcon size={size} />;
|
||||
const handleMouseDown = () => {
|
||||
setWasLongPress(false);
|
||||
setTooltipOpen(true);
|
||||
if (isMouseDownRef.current) {
|
||||
return;
|
||||
}
|
||||
isMouseDownRef.current = true;
|
||||
counterRef.current = 2;
|
||||
setAudioText(localize('com_ui_hold_mouse_download', counterRef.current.toString()));
|
||||
timerRef.current = setInterval(() => {
|
||||
counterRef.current--;
|
||||
if (counterRef.current >= 0) {
|
||||
setAudioText(localize('com_ui_hold_mouse_download', counterRef.current.toString()));
|
||||
}
|
||||
if (isMouseDownRef.current && counterRef.current === 0) {
|
||||
setAudioText(localize('com_ui_downloading'));
|
||||
toggleSpeech(true);
|
||||
}
|
||||
if (counterRef.current < 0 && timerRef.current) {
|
||||
clearInterval(timerRef.current);
|
||||
}
|
||||
}, 1000);
|
||||
|
||||
window.addEventListener('mouseup', handleMouseUp);
|
||||
};
|
||||
|
||||
const handleMouseUp = () => {
|
||||
if (counterRef.current > 0) {
|
||||
toggleSpeech(false);
|
||||
}
|
||||
|
||||
return <VolumeIcon size={size} />;
|
||||
if (counterRef.current === 0) {
|
||||
setWasLongPress(true);
|
||||
}
|
||||
|
||||
setTooltipOpen(false);
|
||||
isMouseDownRef.current = false;
|
||||
if (timerRef.current) {
|
||||
clearInterval(timerRef.current);
|
||||
timerRef.current = null;
|
||||
setAudioText(localize('com_ui_info_read_aloud'));
|
||||
}
|
||||
|
||||
window.removeEventListener('mouseup', handleMouseUp);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const messageAudio = document.getElementById(
|
||||
`audio-${message.messageId}`,
|
||||
) as HTMLAudioElement | null;
|
||||
if (!messageAudio) {
|
||||
return;
|
||||
}
|
||||
if (
|
||||
playbackRate &&
|
||||
playbackRate > 0 &&
|
||||
messageAudio &&
|
||||
playbackRate !== null &&
|
||||
playbackRate > 0 &&
|
||||
messageAudio.playbackRate !== playbackRate
|
||||
) {
|
||||
messageAudio.playbackRate = playbackRate;
|
||||
@@ -47,48 +93,60 @@ export default function MessageAudio({ index, message, isLast }: THoverButtons)
|
||||
}, [audioRef, isSpeaking, playbackRate, message.messageId]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<button
|
||||
className="hover-button rounded-md p-1 pl-0 text-gray-400 hover:bg-gray-100 hover:text-gray-500 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible"
|
||||
// onMouseDownCapture={() => {
|
||||
// if (audioRef.current) {
|
||||
// audioRef.current.muted = false;
|
||||
// }
|
||||
// handleMouseDown();
|
||||
// }}
|
||||
// onMouseUpCapture={() => {
|
||||
// if (audioRef.current) {
|
||||
// audioRef.current.muted = false;
|
||||
// }
|
||||
// handleMouseUp();
|
||||
// }}
|
||||
onClickCapture={() => {
|
||||
if (audioRef.current) {
|
||||
audioRef.current.muted = false;
|
||||
}
|
||||
toggleSpeech();
|
||||
}}
|
||||
type="button"
|
||||
title={isSpeaking ? localize('com_ui_stop') : localize('com_ui_read_aloud')}
|
||||
>
|
||||
{renderIcon('19')}
|
||||
</button>
|
||||
<audio
|
||||
ref={audioRef}
|
||||
controls
|
||||
controlsList="nodownload nofullscreen noremoteplayback"
|
||||
style={{
|
||||
position: 'absolute',
|
||||
overflow: 'hidden',
|
||||
display: 'none',
|
||||
height: '0px',
|
||||
width: '0px',
|
||||
}}
|
||||
src={audioRef.current?.src || undefined}
|
||||
id={`audio-${message.messageId}`}
|
||||
muted
|
||||
autoPlay
|
||||
/>
|
||||
</>
|
||||
<TooltipProvider>
|
||||
<>
|
||||
<Tooltip open={tooltipOpen}>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
className="hover-button rounded-md p-1 pl-0 text-gray-400 hover:text-gray-950 dark:text-gray-400/70 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible"
|
||||
onMouseDownCapture={handleMouseDown}
|
||||
onMouseUpCapture={handleMouseUp}
|
||||
onMouseEnter={() => setTooltipOpen(true)}
|
||||
onMouseLeave={() => setTooltipOpen(false)}
|
||||
onClickCapture={() => {
|
||||
if (!wasLongPress && audioRef.current) {
|
||||
audioRef.current.muted = false;
|
||||
toggleSpeech(false);
|
||||
}
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
{renderIcon('19')}
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="bottom" sideOffset={0}>
|
||||
<div className="space-y-2">
|
||||
{isSpeaking ? (
|
||||
<p className="text-center text-sm text-gray-600 dark:text-gray-300">
|
||||
{localize('com_ui_stop')}
|
||||
</p>
|
||||
) : (
|
||||
<p className="text-center text-sm text-gray-600 dark:text-gray-300">
|
||||
{localize('com_ui_read_aloud')}
|
||||
<br />
|
||||
{audioText}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
<audio
|
||||
ref={audioRef}
|
||||
controls
|
||||
controlsList="nodownload nofullscreen noremoteplayback"
|
||||
style={{
|
||||
position: 'absolute',
|
||||
overflow: 'hidden',
|
||||
display: 'none',
|
||||
height: '0px',
|
||||
width: '0px',
|
||||
}}
|
||||
src={audioRef.current?.src || undefined}
|
||||
id={`audio-${message.messageId}`}
|
||||
muted
|
||||
autoPlay
|
||||
/>
|
||||
</>
|
||||
</TooltipProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ export default function MessagesView({
|
||||
<div
|
||||
onScroll={debouncedHandleScroll}
|
||||
ref={scrollableRef}
|
||||
tabIndex={0}
|
||||
style={{
|
||||
height: '100%',
|
||||
overflowY: 'auto',
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { useState, useRef, useMemo } from 'react';
|
||||
import { Constants } from 'librechat-data-provider';
|
||||
import { useGetEndpointsQuery, useGetStartupConfig } from 'librechat-data-provider/react-query';
|
||||
import type { MouseEvent, FocusEvent, KeyboardEvent } from 'react';
|
||||
import { useUpdateConversationMutation } from '~/data-provider';
|
||||
@@ -9,14 +10,14 @@ import { useConversations, useNavigateToConvo } from '~/hooks';
|
||||
import { NotificationSeverity } from '~/common';
|
||||
import { ArchiveIcon } from '~/components/svg';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import DropDownMenu from './DropDownMenu';
|
||||
import ArchiveButton from './ArchiveButton';
|
||||
import DropDownMenu from './DropDownMenu';
|
||||
import DeleteButton from './DeleteButton';
|
||||
import RenameButton from './RenameButton';
|
||||
import HoverToggle from './HoverToggle';
|
||||
import ShareButton from './ShareButton';
|
||||
import { cn } from '~/utils';
|
||||
import store from '~/store';
|
||||
import ShareButton from './ShareButton';
|
||||
|
||||
type KeyEvent = KeyboardEvent<HTMLInputElement>;
|
||||
|
||||
@@ -51,7 +52,8 @@ export default function Conversation({ conversation, retainView, toggleNav, isLa
|
||||
|
||||
// set document title
|
||||
document.title = title;
|
||||
navigateWithLastTools(conversation);
|
||||
/* Note: Latest Message should not be reset if existing convo */
|
||||
navigateWithLastTools(conversation, !conversationId || conversationId === Constants.NEW_CONVO);
|
||||
};
|
||||
|
||||
const renameHandler = (e: MouseEvent<HTMLButtonElement>) => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React from 'react';
|
||||
import TextareaAutosize from 'react-textarea-autosize';
|
||||
import { anthropicSettings } from 'librechat-data-provider';
|
||||
import type { TModelSelectProps, OnInputNumberChange } from '~/common';
|
||||
import {
|
||||
Input,
|
||||
@@ -41,15 +41,31 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||
return null;
|
||||
}
|
||||
|
||||
const setModel = setOption('model');
|
||||
const setModelLabel = setOption('modelLabel');
|
||||
const setPromptPrefix = setOption('promptPrefix');
|
||||
const setTemperature = setOption('temperature');
|
||||
const setTopP = setOption('topP');
|
||||
const setTopK = setOption('topK');
|
||||
const setMaxOutputTokens = setOption('maxOutputTokens');
|
||||
const setResendFiles = setOption('resendFiles');
|
||||
|
||||
const setModel = (newModel: string) => {
|
||||
const modelSetter = setOption('model');
|
||||
const maxOutputSetter = setOption('maxOutputTokens');
|
||||
if (maxOutputTokens) {
|
||||
maxOutputSetter(anthropicSettings.maxOutputTokens.set(maxOutputTokens, newModel));
|
||||
}
|
||||
modelSetter(newModel);
|
||||
};
|
||||
|
||||
const setMaxOutputTokens = (value: number) => {
|
||||
const setter = setOption('maxOutputTokens');
|
||||
if (model) {
|
||||
setter(anthropicSettings.maxOutputTokens.set(value, model));
|
||||
} else {
|
||||
setter(value);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="grid grid-cols-5 gap-6">
|
||||
<div className="col-span-5 flex flex-col items-center justify-start gap-6 sm:col-span-3">
|
||||
@@ -139,14 +155,16 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||
<div className="flex justify-between">
|
||||
<Label htmlFor="temp-int" className="text-left text-sm font-medium">
|
||||
{localize('com_endpoint_temperature')}{' '}
|
||||
<small className="opacity-40">({localize('com_endpoint_default')}: 1)</small>
|
||||
<small className="opacity-40">
|
||||
({localize('com_endpoint_default')}: {anthropicSettings.temperature.default})
|
||||
</small>
|
||||
</Label>
|
||||
<InputNumber
|
||||
id="temp-int"
|
||||
disabled={readonly}
|
||||
value={temperature}
|
||||
onChange={(value) => setTemperature(Number(value))}
|
||||
max={1}
|
||||
max={anthropicSettings.temperature.max}
|
||||
min={0}
|
||||
step={0.01}
|
||||
controls={false}
|
||||
@@ -161,10 +179,10 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||
</div>
|
||||
<Slider
|
||||
disabled={readonly}
|
||||
value={[temperature ?? 1]}
|
||||
value={[temperature ?? anthropicSettings.temperature.default]}
|
||||
onValueChange={(value) => setTemperature(value[0])}
|
||||
doubleClickHandler={() => setTemperature(1)}
|
||||
max={1}
|
||||
doubleClickHandler={() => setTemperature(anthropicSettings.temperature.default)}
|
||||
max={anthropicSettings.temperature.max}
|
||||
min={0}
|
||||
step={0.01}
|
||||
className="flex h-4 w-full"
|
||||
@@ -178,7 +196,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||
<Label htmlFor="top-p-int" className="text-left text-sm font-medium">
|
||||
{localize('com_endpoint_top_p')}{' '}
|
||||
<small className="opacity-40">
|
||||
({localize('com_endpoint_default_with_num', '0.7')})
|
||||
({localize('com_endpoint_default_with_num', anthropicSettings.topP.default + '')})
|
||||
</small>
|
||||
</Label>
|
||||
<InputNumber
|
||||
@@ -186,7 +204,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||
disabled={readonly}
|
||||
value={topP}
|
||||
onChange={(value) => setTopP(Number(value))}
|
||||
max={1}
|
||||
max={anthropicSettings.topP.max}
|
||||
min={0}
|
||||
step={0.01}
|
||||
controls={false}
|
||||
@@ -203,8 +221,8 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||
disabled={readonly}
|
||||
value={[topP ?? 0.7]}
|
||||
onValueChange={(value) => setTopP(value[0])}
|
||||
doubleClickHandler={() => setTopP(1)}
|
||||
max={1}
|
||||
doubleClickHandler={() => setTopP(anthropicSettings.topP.default)}
|
||||
max={anthropicSettings.topP.max}
|
||||
min={0}
|
||||
step={0.01}
|
||||
className="flex h-4 w-full"
|
||||
@@ -219,7 +237,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||
<Label htmlFor="top-k-int" className="text-left text-sm font-medium">
|
||||
{localize('com_endpoint_top_k')}{' '}
|
||||
<small className="opacity-40">
|
||||
({localize('com_endpoint_default_with_num', '5')})
|
||||
({localize('com_endpoint_default_with_num', anthropicSettings.topK.default + '')})
|
||||
</small>
|
||||
</Label>
|
||||
<InputNumber
|
||||
@@ -227,7 +245,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||
disabled={readonly}
|
||||
value={topK}
|
||||
onChange={(value) => setTopK(Number(value))}
|
||||
max={40}
|
||||
max={anthropicSettings.topK.max}
|
||||
min={1}
|
||||
step={0.01}
|
||||
controls={false}
|
||||
@@ -244,8 +262,8 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||
disabled={readonly}
|
||||
value={[topK ?? 5]}
|
||||
onValueChange={(value) => setTopK(value[0])}
|
||||
doubleClickHandler={() => setTopK(0)}
|
||||
max={40}
|
||||
doubleClickHandler={() => setTopK(anthropicSettings.topK.default)}
|
||||
max={anthropicSettings.topK.max}
|
||||
min={1}
|
||||
step={0.01}
|
||||
className="flex h-4 w-full"
|
||||
@@ -258,16 +276,14 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||
<div className="flex justify-between">
|
||||
<Label htmlFor="max-tokens-int" className="text-left text-sm font-medium">
|
||||
{localize('com_endpoint_max_output_tokens')}{' '}
|
||||
<small className="opacity-40">
|
||||
({localize('com_endpoint_default_with_num', '4000')})
|
||||
</small>
|
||||
<small className="opacity-40">({anthropicSettings.maxOutputTokens.default})</small>
|
||||
</Label>
|
||||
<InputNumber
|
||||
id="max-tokens-int"
|
||||
disabled={readonly}
|
||||
value={maxOutputTokens}
|
||||
onChange={(value) => setMaxOutputTokens(Number(value))}
|
||||
max={4000}
|
||||
max={anthropicSettings.maxOutputTokens.max}
|
||||
min={1}
|
||||
step={1}
|
||||
controls={false}
|
||||
@@ -282,10 +298,12 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||
</div>
|
||||
<Slider
|
||||
disabled={readonly}
|
||||
value={[maxOutputTokens ?? 4000]}
|
||||
value={[maxOutputTokens ?? anthropicSettings.maxOutputTokens.default]}
|
||||
onValueChange={(value) => setMaxOutputTokens(value[0])}
|
||||
doubleClickHandler={() => setMaxOutputTokens(0)}
|
||||
max={4000}
|
||||
doubleClickHandler={() =>
|
||||
setMaxOutputTokens(anthropicSettings.maxOutputTokens.default)
|
||||
}
|
||||
max={anthropicSettings.maxOutputTokens.max}
|
||||
min={1}
|
||||
step={1}
|
||||
className="flex h-4 w-full"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React from 'react';
|
||||
import { CrossIcon, NewTrashIcon } from '~/components/svg';
|
||||
import { CrossIcon, TrashIcon } from '~/components/svg';
|
||||
import { Button } from '~/components/ui';
|
||||
|
||||
type DeleteIconButtonProps = {
|
||||
@@ -10,7 +10,7 @@ export default function DeleteIconButton({ onClick }: DeleteIconButtonProps) {
|
||||
return (
|
||||
<div className="w-fit">
|
||||
<Button className="bg-red-400 p-3" onClick={onClick}>
|
||||
<NewTrashIcon />
|
||||
<TrashIcon />
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -32,7 +32,7 @@ import {
|
||||
DropdownMenuTrigger,
|
||||
} from '~/components/ui';
|
||||
import { useDeleteFilesFromTable } from '~/hooks/Files';
|
||||
import { NewTrashIcon, Spinner } from '~/components/svg';
|
||||
import { TrashIcon, Spinner } from '~/components/svg';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
import ActionButton from '../ActionButton';
|
||||
import UploadFileButton from './UploadFileButton';
|
||||
@@ -112,7 +112,7 @@ export default function DataTableFile<TData, TValue>({
|
||||
{isDeleting ? (
|
||||
<Spinner className="h-4 w-4" />
|
||||
) : (
|
||||
<NewTrashIcon className="h-4 w-4 text-red-400" />
|
||||
<TrashIcon className="h-4 w-4 text-red-400" />
|
||||
)}
|
||||
{localize('com_ui_delete')}
|
||||
</Button>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { TFile } from 'librechat-data-provider';
|
||||
import React from 'react';
|
||||
import { NewTrashIcon } from '~/components/svg';
|
||||
import { TrashIcon } from '~/components/svg';
|
||||
import { Button } from '~/components/ui';
|
||||
|
||||
type FileListItemProps = {
|
||||
@@ -25,7 +25,7 @@ export default function FileListItem({ file, deleteFile, width = '400px' }: File
|
||||
className="my-0 ml-3 bg-transparent p-0 text-[#666666] hover:bg-slate-200"
|
||||
onClick={() => deleteFile(file._id)}
|
||||
>
|
||||
<NewTrashIcon className="m-0 p-0" />
|
||||
<TrashIcon className="m-0 p-0" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -2,7 +2,7 @@ import type { TFile } from 'librechat-data-provider';
|
||||
import { FileIcon, PlusIcon } from 'lucide-react';
|
||||
import React from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { DotsIcon, NewTrashIcon } from '~/components/svg';
|
||||
import { DotsIcon, TrashIcon } from '~/components/svg';
|
||||
import { Button } from '~/components/ui';
|
||||
|
||||
type FileListItemProps = {
|
||||
@@ -68,7 +68,7 @@ export default function FileListItem2({
|
||||
className="w-min bg-transparent text-[#666666] hover:bg-slate-200"
|
||||
onClick={() => deleteFile(file._id)}
|
||||
>
|
||||
<NewTrashIcon className="" />
|
||||
<TrashIcon className="" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user