Compare commits
19 Commits
feat/granu
...
feat/compo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f75010369c | ||
|
|
dc9d219f3e | ||
|
|
b0c5db6756 | ||
|
|
ff722366e9 | ||
|
|
4f06c159be | ||
|
|
e042e1500f | ||
|
|
0503f0f903 | ||
|
|
e4adfe771b | ||
|
|
42977ac0d0 | ||
|
|
d9a0fe03ed | ||
|
|
d39b99971f | ||
|
|
1b7e044bf5 | ||
|
|
5c947be455 | ||
|
|
2b2f7fe289 | ||
|
|
a058963a9f | ||
|
|
01e9b196bc | ||
|
|
d835f48307 | ||
|
|
0587a1cc7c | ||
|
|
72cd159a37 |
@@ -58,7 +58,7 @@ DEBUG_CONSOLE=false
|
||||
# Endpoints #
|
||||
#===================================================#
|
||||
|
||||
# ENDPOINTS=openAI,assistants,azureOpenAI,google,gptPlugins,anthropic
|
||||
# ENDPOINTS=openAI,assistants,azureOpenAI,google,anthropic
|
||||
|
||||
PROXY=
|
||||
|
||||
|
||||
@@ -792,7 +792,8 @@ class BaseClient {
|
||||
|
||||
userMessage.tokenCount = userMessageTokenCount;
|
||||
/*
|
||||
Note: `AskController` saves the user message, so we update the count of its `userMessage` reference
|
||||
Note: `AgentController` saves the user message if not saved here
|
||||
(noted by `savedMessageIds`), so we update the count of its `userMessage` reference
|
||||
*/
|
||||
if (typeof opts?.getReqData === 'function') {
|
||||
opts.getReqData({
|
||||
@@ -801,7 +802,8 @@ class BaseClient {
|
||||
}
|
||||
/*
|
||||
Note: we update the user message to be sure it gets the calculated token count;
|
||||
though `AskController` saves the user message, EditController does not
|
||||
though `AgentController` saves the user message if not saved here
|
||||
(noted by `savedMessageIds`), EditController does not
|
||||
*/
|
||||
await userMessagePromise;
|
||||
await this.updateMessageInDatabase({
|
||||
|
||||
@@ -1,804 +0,0 @@
|
||||
const { Keyv } = require('keyv');
|
||||
const crypto = require('crypto');
|
||||
const { CohereClient } = require('cohere-ai');
|
||||
const { fetchEventSource } = require('@waylaidwanderer/fetch-event-source');
|
||||
const { constructAzureURL, genAzureChatCompletion } = require('@librechat/api');
|
||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
||||
const {
|
||||
ImageDetail,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
CohereConstants,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { createContextHandlers } = require('./prompts');
|
||||
const { createCoherePayload } = require('./llm');
|
||||
const { extractBaseURL } = require('~/utils');
|
||||
const BaseClient = require('./BaseClient');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const CHATGPT_MODEL = 'gpt-3.5-turbo';
|
||||
const tokenizersCache = {};
|
||||
|
||||
class ChatGPTClient extends BaseClient {
|
||||
constructor(apiKey, options = {}, cacheOptions = {}) {
|
||||
super(apiKey, options, cacheOptions);
|
||||
|
||||
cacheOptions.namespace = cacheOptions.namespace || 'chatgpt';
|
||||
this.conversationsCache = new Keyv(cacheOptions);
|
||||
this.setOptions(options);
|
||||
}
|
||||
|
||||
setOptions(options) {
|
||||
if (this.options && !this.options.replaceOptions) {
|
||||
// nested options aren't spread properly, so we need to do this manually
|
||||
this.options.modelOptions = {
|
||||
...this.options.modelOptions,
|
||||
...options.modelOptions,
|
||||
};
|
||||
delete options.modelOptions;
|
||||
// now we can merge options
|
||||
this.options = {
|
||||
...this.options,
|
||||
...options,
|
||||
};
|
||||
} else {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
if (this.options.openaiApiKey) {
|
||||
this.apiKey = this.options.openaiApiKey;
|
||||
}
|
||||
|
||||
const modelOptions = this.options.modelOptions || {};
|
||||
this.modelOptions = {
|
||||
...modelOptions,
|
||||
// set some good defaults (check for undefined in some cases because they may be 0)
|
||||
model: modelOptions.model || CHATGPT_MODEL,
|
||||
temperature: typeof modelOptions.temperature === 'undefined' ? 0.8 : modelOptions.temperature,
|
||||
top_p: typeof modelOptions.top_p === 'undefined' ? 1 : modelOptions.top_p,
|
||||
presence_penalty:
|
||||
typeof modelOptions.presence_penalty === 'undefined' ? 1 : modelOptions.presence_penalty,
|
||||
stop: modelOptions.stop,
|
||||
};
|
||||
|
||||
this.isChatGptModel = this.modelOptions.model.includes('gpt-');
|
||||
const { isChatGptModel } = this;
|
||||
this.isUnofficialChatGptModel =
|
||||
this.modelOptions.model.startsWith('text-chat') ||
|
||||
this.modelOptions.model.startsWith('text-davinci-002-render');
|
||||
const { isUnofficialChatGptModel } = this;
|
||||
|
||||
// Davinci models have a max context length of 4097 tokens.
|
||||
this.maxContextTokens = this.options.maxContextTokens || (isChatGptModel ? 4095 : 4097);
|
||||
// I decided to reserve 1024 tokens for the response.
|
||||
// The max prompt tokens is determined by the max context tokens minus the max response tokens.
|
||||
// Earlier messages will be dropped until the prompt is within the limit.
|
||||
this.maxResponseTokens = this.modelOptions.max_tokens || 1024;
|
||||
this.maxPromptTokens =
|
||||
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
|
||||
|
||||
if (this.maxPromptTokens + this.maxResponseTokens > this.maxContextTokens) {
|
||||
throw new Error(
|
||||
`maxPromptTokens + max_tokens (${this.maxPromptTokens} + ${this.maxResponseTokens} = ${
|
||||
this.maxPromptTokens + this.maxResponseTokens
|
||||
}) must be less than or equal to maxContextTokens (${this.maxContextTokens})`,
|
||||
);
|
||||
}
|
||||
|
||||
this.userLabel = this.options.userLabel || 'User';
|
||||
this.chatGptLabel = this.options.chatGptLabel || 'ChatGPT';
|
||||
|
||||
if (isChatGptModel) {
|
||||
// Use these faux tokens to help the AI understand the context since we are building the chat log ourselves.
|
||||
// Trying to use "<|im_start|>" causes the AI to still generate "<" or "<|" at the end sometimes for some reason,
|
||||
// without tripping the stop sequences, so I'm using "||>" instead.
|
||||
this.startToken = '||>';
|
||||
this.endToken = '';
|
||||
this.gptEncoder = this.constructor.getTokenizer('cl100k_base');
|
||||
} else if (isUnofficialChatGptModel) {
|
||||
this.startToken = '<|im_start|>';
|
||||
this.endToken = '<|im_end|>';
|
||||
this.gptEncoder = this.constructor.getTokenizer('text-davinci-003', true, {
|
||||
'<|im_start|>': 100264,
|
||||
'<|im_end|>': 100265,
|
||||
});
|
||||
} else {
|
||||
// Previously I was trying to use "<|endoftext|>" but there seems to be some bug with OpenAI's token counting
|
||||
// system that causes only the first "<|endoftext|>" to be counted as 1 token, and the rest are not treated
|
||||
// as a single token. So we're using this instead.
|
||||
this.startToken = '||>';
|
||||
this.endToken = '';
|
||||
try {
|
||||
this.gptEncoder = this.constructor.getTokenizer(this.modelOptions.model, true);
|
||||
} catch {
|
||||
this.gptEncoder = this.constructor.getTokenizer('text-davinci-003', true);
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.modelOptions.stop) {
|
||||
const stopTokens = [this.startToken];
|
||||
if (this.endToken && this.endToken !== this.startToken) {
|
||||
stopTokens.push(this.endToken);
|
||||
}
|
||||
stopTokens.push(`\n${this.userLabel}:`);
|
||||
stopTokens.push('<|diff_marker|>');
|
||||
// I chose not to do one for `chatGptLabel` because I've never seen it happen
|
||||
this.modelOptions.stop = stopTokens;
|
||||
}
|
||||
|
||||
if (this.options.reverseProxyUrl) {
|
||||
this.completionsUrl = this.options.reverseProxyUrl;
|
||||
} else if (isChatGptModel) {
|
||||
this.completionsUrl = 'https://api.openai.com/v1/chat/completions';
|
||||
} else {
|
||||
this.completionsUrl = 'https://api.openai.com/v1/completions';
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
static getTokenizer(encoding, isModelName = false, extendSpecialTokens = {}) {
|
||||
if (tokenizersCache[encoding]) {
|
||||
return tokenizersCache[encoding];
|
||||
}
|
||||
let tokenizer;
|
||||
if (isModelName) {
|
||||
tokenizer = encodingForModel(encoding, extendSpecialTokens);
|
||||
} else {
|
||||
tokenizer = getEncoding(encoding, extendSpecialTokens);
|
||||
}
|
||||
tokenizersCache[encoding] = tokenizer;
|
||||
return tokenizer;
|
||||
}
|
||||
|
||||
/** @type {getCompletion} */
|
||||
async getCompletion(input, onProgress, onTokenProgress, abortController = null) {
|
||||
if (!abortController) {
|
||||
abortController = new AbortController();
|
||||
}
|
||||
|
||||
let modelOptions = { ...this.modelOptions };
|
||||
if (typeof onProgress === 'function') {
|
||||
modelOptions.stream = true;
|
||||
}
|
||||
if (this.isChatGptModel) {
|
||||
modelOptions.messages = input;
|
||||
} else {
|
||||
modelOptions.prompt = input;
|
||||
}
|
||||
|
||||
if (this.useOpenRouter && modelOptions.prompt) {
|
||||
delete modelOptions.stop;
|
||||
}
|
||||
|
||||
const { debug } = this.options;
|
||||
let baseURL = this.completionsUrl;
|
||||
if (debug) {
|
||||
console.debug();
|
||||
console.debug(baseURL);
|
||||
console.debug(modelOptions);
|
||||
console.debug();
|
||||
}
|
||||
|
||||
const opts = {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
|
||||
if (this.isVisionModel) {
|
||||
modelOptions.max_tokens = 4000;
|
||||
}
|
||||
|
||||
/** @type {TAzureConfig | undefined} */
|
||||
const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI];
|
||||
|
||||
const isAzure = this.azure || this.options.azure;
|
||||
if (
|
||||
(isAzure && this.isVisionModel && azureConfig) ||
|
||||
(azureConfig && this.isVisionModel && this.options.endpoint === EModelEndpoint.azureOpenAI)
|
||||
) {
|
||||
const { modelGroupMap, groupMap } = azureConfig;
|
||||
const {
|
||||
azureOptions,
|
||||
baseURL,
|
||||
headers = {},
|
||||
serverless,
|
||||
} = mapModelToAzureConfig({
|
||||
modelName: modelOptions.model,
|
||||
modelGroupMap,
|
||||
groupMap,
|
||||
});
|
||||
opts.headers = resolveHeaders(headers);
|
||||
this.langchainProxy = extractBaseURL(baseURL);
|
||||
this.apiKey = azureOptions.azureOpenAIApiKey;
|
||||
|
||||
const groupName = modelGroupMap[modelOptions.model].group;
|
||||
this.options.addParams = azureConfig.groupMap[groupName].addParams;
|
||||
this.options.dropParams = azureConfig.groupMap[groupName].dropParams;
|
||||
// Note: `forcePrompt` not re-assigned as only chat models are vision models
|
||||
|
||||
this.azure = !serverless && azureOptions;
|
||||
this.azureEndpoint =
|
||||
!serverless && genAzureChatCompletion(this.azure, modelOptions.model, this);
|
||||
if (serverless === true) {
|
||||
this.options.defaultQuery = azureOptions.azureOpenAIApiVersion
|
||||
? { 'api-version': azureOptions.azureOpenAIApiVersion }
|
||||
: undefined;
|
||||
this.options.headers['api-key'] = this.apiKey;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.options.defaultQuery) {
|
||||
opts.defaultQuery = this.options.defaultQuery;
|
||||
}
|
||||
|
||||
if (this.options.headers) {
|
||||
opts.headers = { ...opts.headers, ...this.options.headers };
|
||||
}
|
||||
|
||||
if (isAzure) {
|
||||
// Azure does not accept `model` in the body, so we need to remove it.
|
||||
delete modelOptions.model;
|
||||
|
||||
baseURL = this.langchainProxy
|
||||
? constructAzureURL({
|
||||
baseURL: this.langchainProxy,
|
||||
azureOptions: this.azure,
|
||||
})
|
||||
: this.azureEndpoint.split(/(?<!\/)\/(chat|completion)\//)[0];
|
||||
|
||||
if (this.options.forcePrompt) {
|
||||
baseURL += '/completions';
|
||||
} else {
|
||||
baseURL += '/chat/completions';
|
||||
}
|
||||
|
||||
opts.defaultQuery = { 'api-version': this.azure.azureOpenAIApiVersion };
|
||||
opts.headers = { ...opts.headers, 'api-key': this.apiKey };
|
||||
} else if (this.apiKey) {
|
||||
opts.headers.Authorization = `Bearer ${this.apiKey}`;
|
||||
}
|
||||
|
||||
if (process.env.OPENAI_ORGANIZATION) {
|
||||
opts.headers['OpenAI-Organization'] = process.env.OPENAI_ORGANIZATION;
|
||||
}
|
||||
|
||||
if (this.useOpenRouter) {
|
||||
opts.headers['HTTP-Referer'] = 'https://librechat.ai';
|
||||
opts.headers['X-Title'] = 'LibreChat';
|
||||
}
|
||||
|
||||
/* hacky fixes for Mistral AI API:
|
||||
- Re-orders system message to the top of the messages payload, as not allowed anywhere else
|
||||
- If there is only one message and it's a system message, change the role to user
|
||||
*/
|
||||
if (baseURL.includes('https://api.mistral.ai/v1') && modelOptions.messages) {
|
||||
const { messages } = modelOptions;
|
||||
|
||||
const systemMessageIndex = messages.findIndex((msg) => msg.role === 'system');
|
||||
|
||||
if (systemMessageIndex > 0) {
|
||||
const [systemMessage] = messages.splice(systemMessageIndex, 1);
|
||||
messages.unshift(systemMessage);
|
||||
}
|
||||
|
||||
modelOptions.messages = messages;
|
||||
|
||||
if (messages.length === 1 && messages[0].role === 'system') {
|
||||
modelOptions.messages[0].role = 'user';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.options.addParams && typeof this.options.addParams === 'object') {
|
||||
modelOptions = {
|
||||
...modelOptions,
|
||||
...this.options.addParams,
|
||||
};
|
||||
logger.debug('[ChatGPTClient] chatCompletion: added params', {
|
||||
addParams: this.options.addParams,
|
||||
modelOptions,
|
||||
});
|
||||
}
|
||||
|
||||
if (this.options.dropParams && Array.isArray(this.options.dropParams)) {
|
||||
this.options.dropParams.forEach((param) => {
|
||||
delete modelOptions[param];
|
||||
});
|
||||
logger.debug('[ChatGPTClient] chatCompletion: dropped params', {
|
||||
dropParams: this.options.dropParams,
|
||||
modelOptions,
|
||||
});
|
||||
}
|
||||
|
||||
if (baseURL.startsWith(CohereConstants.API_URL)) {
|
||||
const payload = createCoherePayload({ modelOptions });
|
||||
return await this.cohereChatCompletion({ payload, onTokenProgress });
|
||||
}
|
||||
|
||||
if (baseURL.includes('v1') && !baseURL.includes('/completions') && !this.isChatCompletion) {
|
||||
baseURL = baseURL.split('v1')[0] + 'v1/completions';
|
||||
} else if (
|
||||
baseURL.includes('v1') &&
|
||||
!baseURL.includes('/chat/completions') &&
|
||||
this.isChatCompletion
|
||||
) {
|
||||
baseURL = baseURL.split('v1')[0] + 'v1/chat/completions';
|
||||
}
|
||||
|
||||
const BASE_URL = new URL(baseURL);
|
||||
if (opts.defaultQuery) {
|
||||
Object.entries(opts.defaultQuery).forEach(([key, value]) => {
|
||||
BASE_URL.searchParams.append(key, value);
|
||||
});
|
||||
delete opts.defaultQuery;
|
||||
}
|
||||
|
||||
const completionsURL = BASE_URL.toString();
|
||||
opts.body = JSON.stringify(modelOptions);
|
||||
|
||||
if (modelOptions.stream) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
let done = false;
|
||||
await fetchEventSource(completionsURL, {
|
||||
...opts,
|
||||
signal: abortController.signal,
|
||||
async onopen(response) {
|
||||
if (response.status === 200) {
|
||||
return;
|
||||
}
|
||||
if (debug) {
|
||||
console.debug(response);
|
||||
}
|
||||
let error;
|
||||
try {
|
||||
const body = await response.text();
|
||||
error = new Error(`Failed to send message. HTTP ${response.status} - ${body}`);
|
||||
error.status = response.status;
|
||||
error.json = JSON.parse(body);
|
||||
} catch {
|
||||
error = error || new Error(`Failed to send message. HTTP ${response.status}`);
|
||||
}
|
||||
throw error;
|
||||
},
|
||||
onclose() {
|
||||
if (debug) {
|
||||
console.debug('Server closed the connection unexpectedly, returning...');
|
||||
}
|
||||
// workaround for private API not sending [DONE] event
|
||||
if (!done) {
|
||||
onProgress('[DONE]');
|
||||
resolve();
|
||||
}
|
||||
},
|
||||
onerror(err) {
|
||||
if (debug) {
|
||||
console.debug(err);
|
||||
}
|
||||
// rethrow to stop the operation
|
||||
throw err;
|
||||
},
|
||||
onmessage(message) {
|
||||
if (debug) {
|
||||
console.debug(message);
|
||||
}
|
||||
if (!message.data || message.event === 'ping') {
|
||||
return;
|
||||
}
|
||||
if (message.data === '[DONE]') {
|
||||
onProgress('[DONE]');
|
||||
resolve();
|
||||
done = true;
|
||||
return;
|
||||
}
|
||||
onProgress(JSON.parse(message.data));
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
const response = await fetch(completionsURL, {
|
||||
...opts,
|
||||
signal: abortController.signal,
|
||||
});
|
||||
if (response.status !== 200) {
|
||||
const body = await response.text();
|
||||
const error = new Error(`Failed to send message. HTTP ${response.status} - ${body}`);
|
||||
error.status = response.status;
|
||||
try {
|
||||
error.json = JSON.parse(body);
|
||||
} catch {
|
||||
error.body = body;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/** @type {cohereChatCompletion} */
|
||||
async cohereChatCompletion({ payload, onTokenProgress }) {
|
||||
const cohere = new CohereClient({
|
||||
token: this.apiKey,
|
||||
environment: this.completionsUrl,
|
||||
});
|
||||
|
||||
if (!payload.stream) {
|
||||
const chatResponse = await cohere.chat(payload);
|
||||
return chatResponse.text;
|
||||
}
|
||||
|
||||
const chatStream = await cohere.chatStream(payload);
|
||||
let reply = '';
|
||||
for await (const message of chatStream) {
|
||||
if (!message) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (message.eventType === 'text-generation' && message.text) {
|
||||
onTokenProgress(message.text);
|
||||
reply += message.text;
|
||||
}
|
||||
/*
|
||||
Cohere API Chinese Unicode character replacement hotfix.
|
||||
Should be un-commented when the following issue is resolved:
|
||||
https://github.com/cohere-ai/cohere-typescript/issues/151
|
||||
|
||||
else if (message.eventType === 'stream-end' && message.response) {
|
||||
reply = message.response.text;
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
return reply;
|
||||
}
|
||||
|
||||
async generateTitle(userMessage, botMessage) {
|
||||
const instructionsPayload = {
|
||||
role: 'system',
|
||||
content: `Write an extremely concise subtitle for this conversation with no more than a few words. All words should be capitalized. Exclude punctuation.
|
||||
|
||||
||>Message:
|
||||
${userMessage.message}
|
||||
||>Response:
|
||||
${botMessage.message}
|
||||
|
||||
||>Title:`,
|
||||
};
|
||||
|
||||
const titleGenClientOptions = JSON.parse(JSON.stringify(this.options));
|
||||
titleGenClientOptions.modelOptions = {
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0,
|
||||
presence_penalty: 0,
|
||||
frequency_penalty: 0,
|
||||
};
|
||||
const titleGenClient = new ChatGPTClient(this.apiKey, titleGenClientOptions);
|
||||
const result = await titleGenClient.getCompletion([instructionsPayload], null);
|
||||
// remove any non-alphanumeric characters, replace multiple spaces with 1, and then trim
|
||||
return result.choices[0].message.content
|
||||
.replace(/[^a-zA-Z0-9' ]/g, '')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim();
|
||||
}
|
||||
|
||||
async sendMessage(message, opts = {}) {
|
||||
if (opts.clientOptions && typeof opts.clientOptions === 'object') {
|
||||
this.setOptions(opts.clientOptions);
|
||||
}
|
||||
|
||||
const conversationId = opts.conversationId || crypto.randomUUID();
|
||||
const parentMessageId = opts.parentMessageId || crypto.randomUUID();
|
||||
|
||||
let conversation =
|
||||
typeof opts.conversation === 'object'
|
||||
? opts.conversation
|
||||
: await this.conversationsCache.get(conversationId);
|
||||
|
||||
let isNewConversation = false;
|
||||
if (!conversation) {
|
||||
conversation = {
|
||||
messages: [],
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
isNewConversation = true;
|
||||
}
|
||||
|
||||
const shouldGenerateTitle = opts.shouldGenerateTitle && isNewConversation;
|
||||
|
||||
const userMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
parentMessageId,
|
||||
role: 'User',
|
||||
message,
|
||||
};
|
||||
conversation.messages.push(userMessage);
|
||||
|
||||
// Doing it this way instead of having each message be a separate element in the array seems to be more reliable,
|
||||
// especially when it comes to keeping the AI in character. It also seems to improve coherency and context retention.
|
||||
const { prompt: payload, context } = await this.buildPrompt(
|
||||
conversation.messages,
|
||||
userMessage.id,
|
||||
{
|
||||
isChatGptModel: this.isChatGptModel,
|
||||
promptPrefix: opts.promptPrefix,
|
||||
},
|
||||
);
|
||||
|
||||
if (this.options.keepNecessaryMessagesOnly) {
|
||||
conversation.messages = context;
|
||||
}
|
||||
|
||||
let reply = '';
|
||||
let result = null;
|
||||
if (typeof opts.onProgress === 'function') {
|
||||
await this.getCompletion(
|
||||
payload,
|
||||
(progressMessage) => {
|
||||
if (progressMessage === '[DONE]') {
|
||||
return;
|
||||
}
|
||||
const token = this.isChatGptModel
|
||||
? progressMessage.choices[0].delta.content
|
||||
: progressMessage.choices[0].text;
|
||||
// first event's delta content is always undefined
|
||||
if (!token) {
|
||||
return;
|
||||
}
|
||||
if (this.options.debug) {
|
||||
console.debug(token);
|
||||
}
|
||||
if (token === this.endToken) {
|
||||
return;
|
||||
}
|
||||
opts.onProgress(token);
|
||||
reply += token;
|
||||
},
|
||||
opts.abortController || new AbortController(),
|
||||
);
|
||||
} else {
|
||||
result = await this.getCompletion(
|
||||
payload,
|
||||
null,
|
||||
opts.abortController || new AbortController(),
|
||||
);
|
||||
if (this.options.debug) {
|
||||
console.debug(JSON.stringify(result));
|
||||
}
|
||||
if (this.isChatGptModel) {
|
||||
reply = result.choices[0].message.content;
|
||||
} else {
|
||||
reply = result.choices[0].text.replace(this.endToken, '');
|
||||
}
|
||||
}
|
||||
|
||||
// avoids some rendering issues when using the CLI app
|
||||
if (this.options.debug) {
|
||||
console.debug();
|
||||
}
|
||||
|
||||
reply = reply.trim();
|
||||
|
||||
const replyMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
parentMessageId: userMessage.id,
|
||||
role: 'ChatGPT',
|
||||
message: reply,
|
||||
};
|
||||
conversation.messages.push(replyMessage);
|
||||
|
||||
const returnData = {
|
||||
response: replyMessage.message,
|
||||
conversationId,
|
||||
parentMessageId: replyMessage.parentMessageId,
|
||||
messageId: replyMessage.id,
|
||||
details: result || {},
|
||||
};
|
||||
|
||||
if (shouldGenerateTitle) {
|
||||
conversation.title = await this.generateTitle(userMessage, replyMessage);
|
||||
returnData.title = conversation.title;
|
||||
}
|
||||
|
||||
await this.conversationsCache.set(conversationId, conversation);
|
||||
|
||||
if (this.options.returnConversation) {
|
||||
returnData.conversation = conversation;
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
||||
|
||||
async buildPrompt(messages, { isChatGptModel = false, promptPrefix = null }) {
|
||||
promptPrefix = (promptPrefix || this.options.promptPrefix || '').trim();
|
||||
|
||||
// Handle attachments and create augmentedPrompt
|
||||
if (this.options.attachments) {
|
||||
const attachments = await this.options.attachments;
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
|
||||
if (this.message_file_map) {
|
||||
this.message_file_map[lastMessage.messageId] = attachments;
|
||||
} else {
|
||||
this.message_file_map = {
|
||||
[lastMessage.messageId]: attachments,
|
||||
};
|
||||
}
|
||||
|
||||
const files = await this.addImageURLs(lastMessage, attachments);
|
||||
this.options.attachments = files;
|
||||
|
||||
this.contextHandlers = createContextHandlers(this.options.req, lastMessage.text);
|
||||
}
|
||||
|
||||
if (this.message_file_map) {
|
||||
this.contextHandlers = createContextHandlers(
|
||||
this.options.req,
|
||||
messages[messages.length - 1].text,
|
||||
);
|
||||
}
|
||||
|
||||
// Calculate image token cost and process embedded files
|
||||
messages.forEach((message, i) => {
|
||||
if (this.message_file_map && this.message_file_map[message.messageId]) {
|
||||
const attachments = this.message_file_map[message.messageId];
|
||||
for (const file of attachments) {
|
||||
if (file.embedded) {
|
||||
this.contextHandlers?.processFile(file);
|
||||
continue;
|
||||
}
|
||||
|
||||
messages[i].tokenCount =
|
||||
(messages[i].tokenCount || 0) +
|
||||
this.calculateImageTokenCost({
|
||||
width: file.width,
|
||||
height: file.height,
|
||||
detail: this.options.imageDetail ?? ImageDetail.auto,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (this.contextHandlers) {
|
||||
this.augmentedPrompt = await this.contextHandlers.createContext();
|
||||
promptPrefix = this.augmentedPrompt + promptPrefix;
|
||||
}
|
||||
|
||||
if (promptPrefix) {
|
||||
// If the prompt prefix doesn't end with the end token, add it.
|
||||
if (!promptPrefix.endsWith(`${this.endToken}`)) {
|
||||
promptPrefix = `${promptPrefix.trim()}${this.endToken}\n\n`;
|
||||
}
|
||||
promptPrefix = `${this.startToken}Instructions:\n${promptPrefix}`;
|
||||
}
|
||||
const promptSuffix = `${this.startToken}${this.chatGptLabel}:\n`; // Prompt ChatGPT to respond.
|
||||
|
||||
const instructionsPayload = {
|
||||
role: 'system',
|
||||
content: promptPrefix,
|
||||
};
|
||||
|
||||
const messagePayload = {
|
||||
role: 'system',
|
||||
content: promptSuffix,
|
||||
};
|
||||
|
||||
let currentTokenCount;
|
||||
if (isChatGptModel) {
|
||||
currentTokenCount =
|
||||
this.getTokenCountForMessage(instructionsPayload) +
|
||||
this.getTokenCountForMessage(messagePayload);
|
||||
} else {
|
||||
currentTokenCount = this.getTokenCount(`${promptPrefix}${promptSuffix}`);
|
||||
}
|
||||
let promptBody = '';
|
||||
const maxTokenCount = this.maxPromptTokens;
|
||||
|
||||
const context = [];
|
||||
|
||||
// Iterate backwards through the messages, adding them to the prompt until we reach the max token count.
|
||||
// Do this within a recursive async function so that it doesn't block the event loop for too long.
|
||||
const buildPromptBody = async () => {
|
||||
if (currentTokenCount < maxTokenCount && messages.length > 0) {
|
||||
const message = messages.pop();
|
||||
const roleLabel =
|
||||
message?.isCreatedByUser || message?.role?.toLowerCase() === 'user'
|
||||
? this.userLabel
|
||||
: this.chatGptLabel;
|
||||
const messageString = `${this.startToken}${roleLabel}:\n${
|
||||
message?.text ?? message?.message
|
||||
}${this.endToken}\n`;
|
||||
let newPromptBody;
|
||||
if (promptBody || isChatGptModel) {
|
||||
newPromptBody = `${messageString}${promptBody}`;
|
||||
} else {
|
||||
// Always insert prompt prefix before the last user message, if not gpt-3.5-turbo.
|
||||
// This makes the AI obey the prompt instructions better, which is important for custom instructions.
|
||||
// After a bunch of testing, it doesn't seem to cause the AI any confusion, even if you ask it things
|
||||
// like "what's the last thing I wrote?".
|
||||
newPromptBody = `${promptPrefix}${messageString}${promptBody}`;
|
||||
}
|
||||
|
||||
context.unshift(message);
|
||||
|
||||
const tokenCountForMessage = this.getTokenCount(messageString);
|
||||
const newTokenCount = currentTokenCount + tokenCountForMessage;
|
||||
if (newTokenCount > maxTokenCount) {
|
||||
if (promptBody) {
|
||||
// This message would put us over the token limit, so don't add it.
|
||||
return false;
|
||||
}
|
||||
// This is the first message, so we can't add it. Just throw an error.
|
||||
throw new Error(
|
||||
`Prompt is too long. Max token count is ${maxTokenCount}, but prompt is ${newTokenCount} tokens long.`,
|
||||
);
|
||||
}
|
||||
promptBody = newPromptBody;
|
||||
currentTokenCount = newTokenCount;
|
||||
// wait for next tick to avoid blocking the event loop
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
return buildPromptBody();
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
await buildPromptBody();
|
||||
|
||||
const prompt = `${promptBody}${promptSuffix}`;
|
||||
if (isChatGptModel) {
|
||||
messagePayload.content = prompt;
|
||||
// Add 3 tokens for Assistant Label priming after all messages have been counted.
|
||||
currentTokenCount += 3;
|
||||
}
|
||||
|
||||
// Use up to `this.maxContextTokens` tokens (prompt + response), but try to leave `this.maxTokens` tokens for the response.
|
||||
this.modelOptions.max_tokens = Math.min(
|
||||
this.maxContextTokens - currentTokenCount,
|
||||
this.maxResponseTokens,
|
||||
);
|
||||
|
||||
if (isChatGptModel) {
|
||||
return { prompt: [instructionsPayload, messagePayload], context };
|
||||
}
|
||||
return { prompt, context, promptTokens: currentTokenCount };
|
||||
}
|
||||
|
||||
getTokenCount(text) {
|
||||
return this.gptEncoder.encode(text, 'all').length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Algorithm adapted from "6. Counting tokens for chat API calls" of
|
||||
* https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
|
||||
*
|
||||
* An additional 3 tokens need to be added for assistant label priming after all messages have been counted.
|
||||
*
|
||||
* @param {Object} message
|
||||
*/
|
||||
getTokenCountForMessage(message) {
|
||||
// Note: gpt-3.5-turbo and gpt-4 may update over time. Use default for these as well as for unknown models
|
||||
let tokensPerMessage = 3;
|
||||
let tokensPerName = 1;
|
||||
|
||||
if (this.modelOptions.model === 'gpt-3.5-turbo-0301') {
|
||||
tokensPerMessage = 4;
|
||||
tokensPerName = -1;
|
||||
}
|
||||
|
||||
let numTokens = tokensPerMessage;
|
||||
for (let [key, value] of Object.entries(message)) {
|
||||
numTokens += this.getTokenCount(value);
|
||||
if (key === 'name') {
|
||||
numTokens += tokensPerName;
|
||||
}
|
||||
}
|
||||
|
||||
return numTokens;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ChatGPTClient;
|
||||
@@ -5,6 +5,7 @@ const {
|
||||
isEnabled,
|
||||
Tokenizer,
|
||||
createFetch,
|
||||
resolveHeaders,
|
||||
constructAzureURL,
|
||||
genAzureChatCompletion,
|
||||
createStreamEventHandlers,
|
||||
@@ -15,7 +16,6 @@ const {
|
||||
ContentTypes,
|
||||
parseTextParts,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
KnownEndpoints,
|
||||
openAISettings,
|
||||
ImageDetailCost,
|
||||
@@ -37,7 +37,6 @@ const { addSpaceIfNeeded, sleep } = require('~/server/utils');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const { handleOpenAIErrors } = require('./tools/util');
|
||||
const { createLLM, RunManager } = require('./llm');
|
||||
const ChatGPTClient = require('./ChatGPTClient');
|
||||
const { summaryBuffer } = require('./memory');
|
||||
const { runTitleChain } = require('./chains');
|
||||
const { tokenSplit } = require('./document');
|
||||
@@ -47,12 +46,6 @@ const { logger } = require('~/config');
|
||||
class OpenAIClient extends BaseClient {
|
||||
constructor(apiKey, options = {}) {
|
||||
super(apiKey, options);
|
||||
this.ChatGPTClient = new ChatGPTClient();
|
||||
this.buildPrompt = this.ChatGPTClient.buildPrompt.bind(this);
|
||||
/** @type {getCompletion} */
|
||||
this.getCompletion = this.ChatGPTClient.getCompletion.bind(this);
|
||||
/** @type {cohereChatCompletion} */
|
||||
this.cohereChatCompletion = this.ChatGPTClient.cohereChatCompletion.bind(this);
|
||||
this.contextStrategy = options.contextStrategy
|
||||
? options.contextStrategy.toLowerCase()
|
||||
: 'discard';
|
||||
@@ -379,23 +372,12 @@ class OpenAIClient extends BaseClient {
|
||||
return files;
|
||||
}
|
||||
|
||||
async buildMessages(
|
||||
messages,
|
||||
parentMessageId,
|
||||
{ isChatCompletion = false, promptPrefix = null },
|
||||
opts,
|
||||
) {
|
||||
async buildMessages(messages, parentMessageId, { promptPrefix = null }, opts) {
|
||||
let orderedMessages = this.constructor.getMessagesForConversation({
|
||||
messages,
|
||||
parentMessageId,
|
||||
summary: this.shouldSummarize,
|
||||
});
|
||||
if (!isChatCompletion) {
|
||||
return await this.buildPrompt(orderedMessages, {
|
||||
isChatGptModel: isChatCompletion,
|
||||
promptPrefix,
|
||||
});
|
||||
}
|
||||
|
||||
let payload;
|
||||
let instructions;
|
||||
|
||||
@@ -1,542 +0,0 @@
|
||||
const OpenAIClient = require('./OpenAIClient');
|
||||
const { CallbackManager } = require('@langchain/core/callbacks/manager');
|
||||
const { BufferMemory, ChatMessageHistory } = require('langchain/memory');
|
||||
const { addImages, buildErrorInput, buildPromptPrefix } = require('./output_parsers');
|
||||
const { initializeCustomAgent, initializeFunctionsAgent } = require('./agents');
|
||||
const { processFileURL } = require('~/server/services/Files/process');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { formatLangChainMessages } = require('./prompts');
|
||||
const { extractBaseURL } = require('~/utils');
|
||||
const { loadTools } = require('./tools/util');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
class PluginsClient extends OpenAIClient {
|
||||
constructor(apiKey, options = {}) {
|
||||
super(apiKey, options);
|
||||
this.sender = options.sender ?? 'Assistant';
|
||||
this.tools = [];
|
||||
this.actions = [];
|
||||
this.setOptions(options);
|
||||
this.openAIApiKey = this.apiKey;
|
||||
this.executor = null;
|
||||
}
|
||||
|
||||
setOptions(options) {
|
||||
this.agentOptions = { ...options.agentOptions };
|
||||
this.functionsAgent = this.agentOptions?.agent === 'functions';
|
||||
this.agentIsGpt3 = this.agentOptions?.model?.includes('gpt-3');
|
||||
|
||||
super.setOptions(options);
|
||||
|
||||
this.isGpt3 = this.modelOptions?.model?.includes('gpt-3');
|
||||
|
||||
if (this.options.reverseProxyUrl) {
|
||||
this.langchainProxy = extractBaseURL(this.options.reverseProxyUrl);
|
||||
}
|
||||
}
|
||||
|
||||
getSaveOptions() {
|
||||
return {
|
||||
artifacts: this.options.artifacts,
|
||||
chatGptLabel: this.options.chatGptLabel,
|
||||
modelLabel: this.options.modelLabel,
|
||||
promptPrefix: this.options.promptPrefix,
|
||||
tools: this.options.tools,
|
||||
...this.modelOptions,
|
||||
agentOptions: this.agentOptions,
|
||||
iconURL: this.options.iconURL,
|
||||
greeting: this.options.greeting,
|
||||
spec: this.options.spec,
|
||||
};
|
||||
}
|
||||
|
||||
saveLatestAction(action) {
|
||||
this.actions.push(action);
|
||||
}
|
||||
|
||||
getFunctionModelName(input) {
|
||||
if (/-(?!0314)\d{4}/.test(input)) {
|
||||
return input;
|
||||
} else if (input.includes('gpt-3.5-turbo')) {
|
||||
return 'gpt-3.5-turbo';
|
||||
} else if (input.includes('gpt-4')) {
|
||||
return 'gpt-4';
|
||||
} else {
|
||||
return 'gpt-3.5-turbo';
|
||||
}
|
||||
}
|
||||
|
||||
getBuildMessagesOptions(opts) {
|
||||
return {
|
||||
isChatCompletion: true,
|
||||
promptPrefix: opts.promptPrefix,
|
||||
abortController: opts.abortController,
|
||||
};
|
||||
}
|
||||
|
||||
async initialize({ user, message, onAgentAction, onChainEnd, signal }) {
|
||||
const modelOptions = {
|
||||
modelName: this.agentOptions.model,
|
||||
temperature: this.agentOptions.temperature,
|
||||
};
|
||||
|
||||
const model = this.initializeLLM({
|
||||
...modelOptions,
|
||||
context: 'plugins',
|
||||
initialMessageCount: this.currentMessages.length + 1,
|
||||
});
|
||||
|
||||
logger.debug(
|
||||
`[PluginsClient] Agent Model: ${model.modelName} | Temp: ${model.temperature} | Functions: ${this.functionsAgent}`,
|
||||
);
|
||||
|
||||
// Map Messages to Langchain format
|
||||
const pastMessages = formatLangChainMessages(this.currentMessages.slice(0, -1), {
|
||||
userName: this.options?.name,
|
||||
});
|
||||
logger.debug('[PluginsClient] pastMessages: ' + pastMessages.length);
|
||||
|
||||
// TODO: use readOnly memory, TokenBufferMemory? (both unavailable in LangChainJS)
|
||||
const memory = new BufferMemory({
|
||||
llm: model,
|
||||
chatHistory: new ChatMessageHistory(pastMessages),
|
||||
});
|
||||
|
||||
const { loadedTools } = await loadTools({
|
||||
user,
|
||||
model,
|
||||
tools: this.options.tools,
|
||||
functions: this.functionsAgent,
|
||||
options: {
|
||||
memory,
|
||||
signal: this.abortController.signal,
|
||||
openAIApiKey: this.openAIApiKey,
|
||||
conversationId: this.conversationId,
|
||||
fileStrategy: this.options.req.app.locals.fileStrategy,
|
||||
processFileURL,
|
||||
message,
|
||||
},
|
||||
useSpecs: true,
|
||||
});
|
||||
|
||||
if (loadedTools.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.tools = loadedTools;
|
||||
|
||||
logger.debug('[PluginsClient] Requested Tools', this.options.tools);
|
||||
logger.debug(
|
||||
'[PluginsClient] Loaded Tools',
|
||||
this.tools.map((tool) => tool.name),
|
||||
);
|
||||
|
||||
const handleAction = (action, runId, callback = null) => {
|
||||
this.saveLatestAction(action);
|
||||
|
||||
logger.debug('[PluginsClient] Latest Agent Action ', this.actions[this.actions.length - 1]);
|
||||
|
||||
if (typeof callback === 'function') {
|
||||
callback(action, runId);
|
||||
}
|
||||
};
|
||||
|
||||
// initialize agent
|
||||
const initializer = this.functionsAgent ? initializeFunctionsAgent : initializeCustomAgent;
|
||||
|
||||
let customInstructions = (this.options.promptPrefix ?? '').trim();
|
||||
if (typeof this.options.artifactsPrompt === 'string' && this.options.artifactsPrompt) {
|
||||
customInstructions = `${customInstructions ?? ''}\n${this.options.artifactsPrompt}`.trim();
|
||||
}
|
||||
|
||||
this.executor = await initializer({
|
||||
model,
|
||||
signal,
|
||||
pastMessages,
|
||||
tools: this.tools,
|
||||
customInstructions,
|
||||
verbose: this.options.debug,
|
||||
returnIntermediateSteps: true,
|
||||
customName: this.options.chatGptLabel,
|
||||
currentDateString: this.currentDateString,
|
||||
callbackManager: CallbackManager.fromHandlers({
|
||||
async handleAgentAction(action, runId) {
|
||||
handleAction(action, runId, onAgentAction);
|
||||
},
|
||||
async handleChainEnd(action) {
|
||||
if (typeof onChainEnd === 'function') {
|
||||
onChainEnd(action);
|
||||
}
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
logger.debug('[PluginsClient] Loaded agent.');
|
||||
}
|
||||
|
||||
async executorCall(message, { signal, stream, onToolStart, onToolEnd }) {
|
||||
let errorMessage = '';
|
||||
const maxAttempts = 1;
|
||||
|
||||
for (let attempts = 1; attempts <= maxAttempts; attempts++) {
|
||||
const errorInput = buildErrorInput({
|
||||
message,
|
||||
errorMessage,
|
||||
actions: this.actions,
|
||||
functionsAgent: this.functionsAgent,
|
||||
});
|
||||
const input = attempts > 1 ? errorInput : message;
|
||||
|
||||
logger.debug(`[PluginsClient] Attempt ${attempts} of ${maxAttempts}`);
|
||||
|
||||
if (errorMessage.length > 0) {
|
||||
logger.debug('[PluginsClient] Caught error, input: ' + JSON.stringify(input));
|
||||
}
|
||||
|
||||
try {
|
||||
this.result = await this.executor.call({ input, signal }, [
|
||||
{
|
||||
async handleToolStart(...args) {
|
||||
await onToolStart(...args);
|
||||
},
|
||||
async handleToolEnd(...args) {
|
||||
await onToolEnd(...args);
|
||||
},
|
||||
async handleLLMEnd(output) {
|
||||
const { generations } = output;
|
||||
const { text } = generations[0][0];
|
||||
if (text && typeof stream === 'function') {
|
||||
await stream(text);
|
||||
}
|
||||
},
|
||||
},
|
||||
]);
|
||||
break; // Exit the loop if the function call is successful
|
||||
} catch (err) {
|
||||
logger.error('[PluginsClient] executorCall error:', err);
|
||||
if (attempts === maxAttempts) {
|
||||
const { run } = this.runManager.getRunByConversationId(this.conversationId);
|
||||
const defaultOutput = `Encountered an error while attempting to respond: ${err.message}`;
|
||||
this.result.output = run && run.error ? run.error : defaultOutput;
|
||||
this.result.errorMessage = run && run.error ? run.error : err.message;
|
||||
this.result.intermediateSteps = this.actions;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TMessage} responseMessage
|
||||
* @param {Partial<TMessage>} saveOptions
|
||||
* @param {string} user
|
||||
* @returns
|
||||
*/
|
||||
async handleResponseMessage(responseMessage, saveOptions, user) {
|
||||
const { output, errorMessage, ...result } = this.result;
|
||||
logger.debug('[PluginsClient][handleResponseMessage] Output:', {
|
||||
output,
|
||||
errorMessage,
|
||||
...result,
|
||||
});
|
||||
const { error } = responseMessage;
|
||||
if (!error) {
|
||||
responseMessage.tokenCount = this.getTokenCountForResponse(responseMessage);
|
||||
responseMessage.completionTokens = this.getTokenCount(responseMessage.text);
|
||||
}
|
||||
|
||||
// Record usage only when completion is skipped as it is already recorded in the agent phase.
|
||||
if (!this.agentOptions.skipCompletion && !error) {
|
||||
await this.recordTokenUsage(responseMessage);
|
||||
}
|
||||
|
||||
const databasePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
|
||||
delete responseMessage.tokenCount;
|
||||
return { ...responseMessage, ...result, databasePromise };
|
||||
}
|
||||
|
||||
async sendMessage(message, opts = {}) {
|
||||
/** @type {Promise<TMessage>} */
|
||||
let userMessagePromise;
|
||||
/** @type {{ filteredTools: string[], includedTools: string[] }} */
|
||||
const { filteredTools = [], includedTools = [] } = this.options.req.app.locals;
|
||||
|
||||
if (includedTools.length > 0) {
|
||||
const tools = this.options.tools.filter((plugin) => includedTools.includes(plugin));
|
||||
this.options.tools = tools;
|
||||
} else {
|
||||
const tools = this.options.tools.filter((plugin) => !filteredTools.includes(plugin));
|
||||
this.options.tools = tools;
|
||||
}
|
||||
|
||||
// If a message is edited, no tools can be used.
|
||||
const completionMode = this.options.tools.length === 0 || opts.isEdited;
|
||||
if (completionMode) {
|
||||
this.setOptions(opts);
|
||||
return super.sendMessage(message, opts);
|
||||
}
|
||||
|
||||
logger.debug('[PluginsClient] sendMessage', { userMessageText: message, opts });
|
||||
const {
|
||||
user,
|
||||
conversationId,
|
||||
responseMessageId,
|
||||
saveOptions,
|
||||
userMessage,
|
||||
onAgentAction,
|
||||
onChainEnd,
|
||||
onToolStart,
|
||||
onToolEnd,
|
||||
} = await this.handleStartMethods(message, opts);
|
||||
|
||||
if (opts.progressCallback) {
|
||||
opts.onProgress = opts.progressCallback.call(null, {
|
||||
...(opts.progressOptions ?? {}),
|
||||
parentMessageId: userMessage.messageId,
|
||||
messageId: responseMessageId,
|
||||
});
|
||||
}
|
||||
|
||||
this.currentMessages.push(userMessage);
|
||||
|
||||
let {
|
||||
prompt: payload,
|
||||
tokenCountMap,
|
||||
promptTokens,
|
||||
} = await this.buildMessages(
|
||||
this.currentMessages,
|
||||
userMessage.messageId,
|
||||
this.getBuildMessagesOptions({
|
||||
promptPrefix: null,
|
||||
abortController: this.abortController,
|
||||
}),
|
||||
);
|
||||
|
||||
if (tokenCountMap) {
|
||||
logger.debug('[PluginsClient] tokenCountMap', { tokenCountMap });
|
||||
if (tokenCountMap[userMessage.messageId]) {
|
||||
userMessage.tokenCount = tokenCountMap[userMessage.messageId];
|
||||
logger.debug('[PluginsClient] userMessage.tokenCount', userMessage.tokenCount);
|
||||
}
|
||||
this.handleTokenCountMap(tokenCountMap);
|
||||
}
|
||||
|
||||
this.result = {};
|
||||
if (payload) {
|
||||
this.currentMessages = payload;
|
||||
}
|
||||
|
||||
if (!this.skipSaveUserMessage) {
|
||||
userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user);
|
||||
if (typeof opts?.getReqData === 'function') {
|
||||
opts.getReqData({
|
||||
userMessagePromise,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const balance = this.options.req?.app?.locals?.balance;
|
||||
if (balance?.enabled) {
|
||||
await checkBalance({
|
||||
req: this.options.req,
|
||||
res: this.options.res,
|
||||
txData: {
|
||||
user: this.user,
|
||||
tokenType: 'prompt',
|
||||
amount: promptTokens,
|
||||
debug: this.options.debug,
|
||||
model: this.modelOptions.model,
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const responseMessage = {
|
||||
endpoint: EModelEndpoint.gptPlugins,
|
||||
iconURL: this.options.iconURL,
|
||||
messageId: responseMessageId,
|
||||
conversationId,
|
||||
parentMessageId: userMessage.messageId,
|
||||
isCreatedByUser: false,
|
||||
model: this.modelOptions.model,
|
||||
sender: this.sender,
|
||||
promptTokens,
|
||||
};
|
||||
|
||||
await this.initialize({
|
||||
user,
|
||||
message,
|
||||
onAgentAction,
|
||||
onChainEnd,
|
||||
signal: this.abortController.signal,
|
||||
onProgress: opts.onProgress,
|
||||
});
|
||||
|
||||
// const stream = async (text) => {
|
||||
// await this.generateTextStream.call(this, text, opts.onProgress, { delay: 1 });
|
||||
// };
|
||||
await this.executorCall(message, {
|
||||
signal: this.abortController.signal,
|
||||
// stream,
|
||||
onToolStart,
|
||||
onToolEnd,
|
||||
});
|
||||
|
||||
// If message was aborted mid-generation
|
||||
if (this.result?.errorMessage?.length > 0 && this.result?.errorMessage?.includes('cancel')) {
|
||||
responseMessage.text = 'Cancelled.';
|
||||
return await this.handleResponseMessage(responseMessage, saveOptions, user);
|
||||
}
|
||||
|
||||
// If error occurred during generation (likely token_balance)
|
||||
if (this.result?.errorMessage?.length > 0) {
|
||||
responseMessage.error = true;
|
||||
responseMessage.text = this.result.output;
|
||||
return await this.handleResponseMessage(responseMessage, saveOptions, user);
|
||||
}
|
||||
|
||||
if (this.agentOptions.skipCompletion && this.result.output && this.functionsAgent) {
|
||||
const partialText = opts.getPartialText();
|
||||
const trimmedPartial = opts.getPartialText().replaceAll(':::plugin:::\n', '');
|
||||
responseMessage.text =
|
||||
trimmedPartial.length === 0 ? `${partialText}${this.result.output}` : partialText;
|
||||
addImages(this.result.intermediateSteps, responseMessage);
|
||||
await this.generateTextStream(this.result.output, opts.onProgress, { delay: 5 });
|
||||
return await this.handleResponseMessage(responseMessage, saveOptions, user);
|
||||
}
|
||||
|
||||
if (this.agentOptions.skipCompletion && this.result.output) {
|
||||
responseMessage.text = this.result.output;
|
||||
addImages(this.result.intermediateSteps, responseMessage);
|
||||
await this.generateTextStream(this.result.output, opts.onProgress, { delay: 5 });
|
||||
return await this.handleResponseMessage(responseMessage, saveOptions, user);
|
||||
}
|
||||
|
||||
logger.debug('[PluginsClient] Completion phase: this.result', this.result);
|
||||
|
||||
const promptPrefix = buildPromptPrefix({
|
||||
result: this.result,
|
||||
message,
|
||||
functionsAgent: this.functionsAgent,
|
||||
});
|
||||
|
||||
logger.debug('[PluginsClient]', { promptPrefix });
|
||||
|
||||
payload = await this.buildCompletionPrompt({
|
||||
messages: this.currentMessages,
|
||||
promptPrefix,
|
||||
});
|
||||
|
||||
logger.debug('[PluginsClient] buildCompletionPrompt Payload', payload);
|
||||
responseMessage.text = await this.sendCompletion(payload, opts);
|
||||
return await this.handleResponseMessage(responseMessage, saveOptions, user);
|
||||
}
|
||||
|
||||
async buildCompletionPrompt({ messages, promptPrefix: _promptPrefix }) {
|
||||
logger.debug('[PluginsClient] buildCompletionPrompt messages', messages);
|
||||
|
||||
const orderedMessages = messages;
|
||||
let promptPrefix = _promptPrefix.trim();
|
||||
// If the prompt prefix doesn't end with the end token, add it.
|
||||
if (!promptPrefix.endsWith(`${this.endToken}`)) {
|
||||
promptPrefix = `${promptPrefix.trim()}${this.endToken}\n\n`;
|
||||
}
|
||||
promptPrefix = `${this.startToken}Instructions:\n${promptPrefix}`;
|
||||
const promptSuffix = `${this.startToken}${this.chatGptLabel ?? 'Assistant'}:\n`;
|
||||
|
||||
const instructionsPayload = {
|
||||
role: 'system',
|
||||
content: promptPrefix,
|
||||
};
|
||||
|
||||
const messagePayload = {
|
||||
role: 'system',
|
||||
content: promptSuffix,
|
||||
};
|
||||
|
||||
if (this.isGpt3) {
|
||||
instructionsPayload.role = 'user';
|
||||
messagePayload.role = 'user';
|
||||
instructionsPayload.content += `\n${promptSuffix}`;
|
||||
}
|
||||
|
||||
// testing if this works with browser endpoint
|
||||
if (!this.isGpt3 && this.options.reverseProxyUrl) {
|
||||
instructionsPayload.role = 'user';
|
||||
}
|
||||
|
||||
let currentTokenCount =
|
||||
this.getTokenCountForMessage(instructionsPayload) +
|
||||
this.getTokenCountForMessage(messagePayload);
|
||||
|
||||
let promptBody = '';
|
||||
const maxTokenCount = this.maxPromptTokens;
|
||||
// Iterate backwards through the messages, adding them to the prompt until we reach the max token count.
|
||||
// Do this within a recursive async function so that it doesn't block the event loop for too long.
|
||||
const buildPromptBody = async () => {
|
||||
if (currentTokenCount < maxTokenCount && orderedMessages.length > 0) {
|
||||
const message = orderedMessages.pop();
|
||||
const isCreatedByUser = message.isCreatedByUser || message.role?.toLowerCase() === 'user';
|
||||
const roleLabel = isCreatedByUser ? this.userLabel : this.chatGptLabel;
|
||||
let messageString = `${this.startToken}${roleLabel}:\n${
|
||||
message.text ?? message.content ?? ''
|
||||
}${this.endToken}\n`;
|
||||
let newPromptBody = `${messageString}${promptBody}`;
|
||||
|
||||
const tokenCountForMessage = this.getTokenCount(messageString);
|
||||
const newTokenCount = currentTokenCount + tokenCountForMessage;
|
||||
if (newTokenCount > maxTokenCount) {
|
||||
if (promptBody) {
|
||||
// This message would put us over the token limit, so don't add it.
|
||||
return false;
|
||||
}
|
||||
// This is the first message, so we can't add it. Just throw an error.
|
||||
throw new Error(
|
||||
`Prompt is too long. Max token count is ${maxTokenCount}, but prompt is ${newTokenCount} tokens long.`,
|
||||
);
|
||||
}
|
||||
promptBody = newPromptBody;
|
||||
currentTokenCount = newTokenCount;
|
||||
// wait for next tick to avoid blocking the event loop
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
return buildPromptBody();
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
await buildPromptBody();
|
||||
const prompt = promptBody;
|
||||
messagePayload.content = prompt;
|
||||
// Add 2 tokens for metadata after all messages have been counted.
|
||||
currentTokenCount += 2;
|
||||
|
||||
if (this.isGpt3 && messagePayload.content.length > 0) {
|
||||
const context = 'Chat History:\n';
|
||||
messagePayload.content = `${context}${prompt}`;
|
||||
currentTokenCount += this.getTokenCount(context);
|
||||
}
|
||||
|
||||
// Use up to `this.maxContextTokens` tokens (prompt + response), but try to leave `this.maxTokens` tokens for the response.
|
||||
this.modelOptions.max_tokens = Math.min(
|
||||
this.maxContextTokens - currentTokenCount,
|
||||
this.maxResponseTokens,
|
||||
);
|
||||
|
||||
if (this.isGpt3) {
|
||||
messagePayload.content += promptSuffix;
|
||||
return [instructionsPayload, messagePayload];
|
||||
}
|
||||
|
||||
const result = [messagePayload, instructionsPayload];
|
||||
|
||||
if (this.functionsAgent && !this.isGpt3) {
|
||||
result[1].content = `${result[1].content}\n${this.startToken}${this.chatGptLabel}:\nSure thing! Here is the output you requested:\n`;
|
||||
}
|
||||
|
||||
return result.filter((message) => message.content.length > 0);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = PluginsClient;
|
||||
@@ -1,15 +1,11 @@
|
||||
const ChatGPTClient = require('./ChatGPTClient');
|
||||
const OpenAIClient = require('./OpenAIClient');
|
||||
const PluginsClient = require('./PluginsClient');
|
||||
const GoogleClient = require('./GoogleClient');
|
||||
const TextStream = require('./TextStream');
|
||||
const AnthropicClient = require('./AnthropicClient');
|
||||
const toolUtils = require('./tools/util');
|
||||
|
||||
module.exports = {
|
||||
ChatGPTClient,
|
||||
OpenAIClient,
|
||||
PluginsClient,
|
||||
GoogleClient,
|
||||
TextStream,
|
||||
AnthropicClient,
|
||||
|
||||
@@ -96,35 +96,35 @@ function createContextHandlers(req, userMessageContent) {
|
||||
resolvedQueries.length === 0
|
||||
? '\n\tThe semantic search did not return any results.'
|
||||
: resolvedQueries
|
||||
.map((queryResult, index) => {
|
||||
const file = processedFiles[index];
|
||||
let contextItems = queryResult.data;
|
||||
.map((queryResult, index) => {
|
||||
const file = processedFiles[index];
|
||||
let contextItems = queryResult.data;
|
||||
|
||||
const generateContext = (currentContext) =>
|
||||
`
|
||||
const generateContext = (currentContext) =>
|
||||
`
|
||||
<file>
|
||||
<filename>${file.filename}</filename>
|
||||
<context>${currentContext}
|
||||
</context>
|
||||
</file>`;
|
||||
|
||||
if (useFullContext) {
|
||||
return generateContext(`\n${contextItems}`);
|
||||
}
|
||||
if (useFullContext) {
|
||||
return generateContext(`\n${contextItems}`);
|
||||
}
|
||||
|
||||
contextItems = queryResult.data
|
||||
.map((item) => {
|
||||
const pageContent = item[0].page_content;
|
||||
return `
|
||||
contextItems = queryResult.data
|
||||
.map((item) => {
|
||||
const pageContent = item[0].page_content;
|
||||
return `
|
||||
<contextItem>
|
||||
<![CDATA[${pageContent?.trim()}]]>
|
||||
</contextItem>`;
|
||||
})
|
||||
.join('');
|
||||
})
|
||||
.join('');
|
||||
|
||||
return generateContext(contextItems);
|
||||
})
|
||||
.join('');
|
||||
return generateContext(contextItems);
|
||||
})
|
||||
.join('');
|
||||
|
||||
if (useFullContext) {
|
||||
const prompt = `${header}
|
||||
|
||||
@@ -531,44 +531,6 @@ describe('OpenAIClient', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('sendMessage/getCompletion/chatCompletion', () => {
|
||||
afterEach(() => {
|
||||
delete process.env.AZURE_OPENAI_DEFAULT_MODEL;
|
||||
delete process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME;
|
||||
});
|
||||
|
||||
it('should call getCompletion and fetchEventSource when using a text/instruct model', async () => {
|
||||
const model = 'text-davinci-003';
|
||||
const onProgress = jest.fn().mockImplementation(() => ({}));
|
||||
|
||||
const testClient = new OpenAIClient('test-api-key', {
|
||||
...defaultOptions,
|
||||
modelOptions: { model },
|
||||
});
|
||||
|
||||
const getCompletion = jest.spyOn(testClient, 'getCompletion');
|
||||
await testClient.sendMessage('Hi mom!', { onProgress });
|
||||
|
||||
expect(getCompletion).toHaveBeenCalled();
|
||||
expect(getCompletion.mock.calls.length).toBe(1);
|
||||
|
||||
expect(getCompletion.mock.calls[0][0]).toBe('||>User:\nHi mom!\n||>Assistant:\n');
|
||||
|
||||
expect(fetchEventSource).toHaveBeenCalled();
|
||||
expect(fetchEventSource.mock.calls.length).toBe(1);
|
||||
|
||||
// Check if the first argument (url) is correct
|
||||
const firstCallArgs = fetchEventSource.mock.calls[0];
|
||||
|
||||
const expectedURL = 'https://api.openai.com/v1/completions';
|
||||
expect(firstCallArgs[0]).toBe(expectedURL);
|
||||
|
||||
const requestBody = JSON.parse(firstCallArgs[1].body);
|
||||
expect(requestBody).toHaveProperty('model');
|
||||
expect(requestBody.model).toBe(model);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkVisionRequest functionality', () => {
|
||||
let client;
|
||||
const attachments = [{ type: 'image/png' }];
|
||||
|
||||
@@ -1,314 +0,0 @@
|
||||
const crypto = require('crypto');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { HumanMessage, AIMessage } = require('@langchain/core/messages');
|
||||
const PluginsClient = require('../PluginsClient');
|
||||
|
||||
jest.mock('~/db/connect');
|
||||
jest.mock('~/models/Conversation', () => {
|
||||
return function () {
|
||||
return {
|
||||
save: jest.fn(),
|
||||
deleteConvos: jest.fn(),
|
||||
};
|
||||
};
|
||||
});
|
||||
|
||||
const defaultAzureOptions = {
|
||||
azureOpenAIApiInstanceName: 'your-instance-name',
|
||||
azureOpenAIApiDeploymentName: 'your-deployment-name',
|
||||
azureOpenAIApiVersion: '2020-07-01-preview',
|
||||
};
|
||||
|
||||
describe('PluginsClient', () => {
|
||||
let TestAgent;
|
||||
let options = {
|
||||
tools: [],
|
||||
modelOptions: {
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0,
|
||||
max_tokens: 2,
|
||||
},
|
||||
agentOptions: {
|
||||
model: 'gpt-3.5-turbo',
|
||||
},
|
||||
};
|
||||
let parentMessageId;
|
||||
let conversationId;
|
||||
const fakeMessages = [];
|
||||
const userMessage = 'Hello, ChatGPT!';
|
||||
const apiKey = 'fake-api-key';
|
||||
|
||||
beforeEach(() => {
|
||||
TestAgent = new PluginsClient(apiKey, options);
|
||||
TestAgent.loadHistory = jest
|
||||
.fn()
|
||||
.mockImplementation((conversationId, parentMessageId = null) => {
|
||||
if (!conversationId) {
|
||||
TestAgent.currentMessages = [];
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
|
||||
const orderedMessages = TestAgent.constructor.getMessagesForConversation({
|
||||
messages: fakeMessages,
|
||||
parentMessageId,
|
||||
});
|
||||
|
||||
const chatMessages = orderedMessages.map((msg) =>
|
||||
msg?.isCreatedByUser || msg?.role?.toLowerCase() === 'user'
|
||||
? new HumanMessage(msg.text)
|
||||
: new AIMessage(msg.text),
|
||||
);
|
||||
|
||||
TestAgent.currentMessages = orderedMessages;
|
||||
return Promise.resolve(chatMessages);
|
||||
});
|
||||
TestAgent.sendMessage = jest.fn().mockImplementation(async (message, opts = {}) => {
|
||||
if (opts && typeof opts === 'object') {
|
||||
TestAgent.setOptions(opts);
|
||||
}
|
||||
const conversationId = opts.conversationId || crypto.randomUUID();
|
||||
const parentMessageId = opts.parentMessageId || Constants.NO_PARENT;
|
||||
const userMessageId = opts.overrideParentMessageId || crypto.randomUUID();
|
||||
this.pastMessages = await TestAgent.loadHistory(
|
||||
conversationId,
|
||||
TestAgent.options?.parentMessageId,
|
||||
);
|
||||
|
||||
const userMessage = {
|
||||
text: message,
|
||||
sender: 'ChatGPT',
|
||||
isCreatedByUser: true,
|
||||
messageId: userMessageId,
|
||||
parentMessageId,
|
||||
conversationId,
|
||||
};
|
||||
|
||||
const response = {
|
||||
sender: 'ChatGPT',
|
||||
text: 'Hello, User!',
|
||||
isCreatedByUser: false,
|
||||
messageId: crypto.randomUUID(),
|
||||
parentMessageId: userMessage.messageId,
|
||||
conversationId,
|
||||
};
|
||||
|
||||
fakeMessages.push(userMessage);
|
||||
fakeMessages.push(response);
|
||||
return response;
|
||||
});
|
||||
});
|
||||
|
||||
test('initializes PluginsClient without crashing', () => {
|
||||
expect(TestAgent).toBeInstanceOf(PluginsClient);
|
||||
});
|
||||
|
||||
test('check setOptions function', () => {
|
||||
expect(TestAgent.agentIsGpt3).toBe(true);
|
||||
});
|
||||
|
||||
describe('sendMessage', () => {
|
||||
test('sendMessage should return a response message', async () => {
|
||||
const expectedResult = expect.objectContaining({
|
||||
sender: 'ChatGPT',
|
||||
text: expect.any(String),
|
||||
isCreatedByUser: false,
|
||||
messageId: expect.any(String),
|
||||
parentMessageId: expect.any(String),
|
||||
conversationId: expect.any(String),
|
||||
});
|
||||
|
||||
const response = await TestAgent.sendMessage(userMessage);
|
||||
parentMessageId = response.messageId;
|
||||
conversationId = response.conversationId;
|
||||
expect(response).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
test('sendMessage should work with provided conversationId and parentMessageId', async () => {
|
||||
const userMessage = 'Second message in the conversation';
|
||||
const opts = {
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
};
|
||||
|
||||
const expectedResult = expect.objectContaining({
|
||||
sender: 'ChatGPT',
|
||||
text: expect.any(String),
|
||||
isCreatedByUser: false,
|
||||
messageId: expect.any(String),
|
||||
parentMessageId: expect.any(String),
|
||||
conversationId: opts.conversationId,
|
||||
});
|
||||
|
||||
const response = await TestAgent.sendMessage(userMessage, opts);
|
||||
parentMessageId = response.messageId;
|
||||
expect(response.conversationId).toEqual(conversationId);
|
||||
expect(response).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
test('should return chat history', async () => {
|
||||
const chatMessages = await TestAgent.loadHistory(conversationId, parentMessageId);
|
||||
expect(TestAgent.currentMessages).toHaveLength(4);
|
||||
expect(chatMessages[0].text).toEqual(userMessage);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFunctionModelName', () => {
|
||||
let client;
|
||||
|
||||
beforeEach(() => {
|
||||
client = new PluginsClient('dummy_api_key');
|
||||
});
|
||||
|
||||
test('should return the input when it includes a dash followed by four digits', () => {
|
||||
expect(client.getFunctionModelName('-1234')).toBe('-1234');
|
||||
expect(client.getFunctionModelName('gpt-4-5678-preview')).toBe('gpt-4-5678-preview');
|
||||
});
|
||||
|
||||
test('should return the input for all function-capable models (`0613` models and above)', () => {
|
||||
expect(client.getFunctionModelName('gpt-4-0613')).toBe('gpt-4-0613');
|
||||
expect(client.getFunctionModelName('gpt-4-32k-0613')).toBe('gpt-4-32k-0613');
|
||||
expect(client.getFunctionModelName('gpt-3.5-turbo-0613')).toBe('gpt-3.5-turbo-0613');
|
||||
expect(client.getFunctionModelName('gpt-3.5-turbo-16k-0613')).toBe('gpt-3.5-turbo-16k-0613');
|
||||
expect(client.getFunctionModelName('gpt-3.5-turbo-1106')).toBe('gpt-3.5-turbo-1106');
|
||||
expect(client.getFunctionModelName('gpt-4-1106-preview')).toBe('gpt-4-1106-preview');
|
||||
expect(client.getFunctionModelName('gpt-4-1106')).toBe('gpt-4-1106');
|
||||
});
|
||||
|
||||
test('should return the corresponding model if input is non-function capable (`0314` models)', () => {
|
||||
expect(client.getFunctionModelName('gpt-4-0314')).toBe('gpt-4');
|
||||
expect(client.getFunctionModelName('gpt-4-32k-0314')).toBe('gpt-4');
|
||||
expect(client.getFunctionModelName('gpt-3.5-turbo-0314')).toBe('gpt-3.5-turbo');
|
||||
expect(client.getFunctionModelName('gpt-3.5-turbo-16k-0314')).toBe('gpt-3.5-turbo');
|
||||
});
|
||||
|
||||
test('should return "gpt-3.5-turbo" when the input includes "gpt-3.5-turbo"', () => {
|
||||
expect(client.getFunctionModelName('test gpt-3.5-turbo model')).toBe('gpt-3.5-turbo');
|
||||
});
|
||||
|
||||
test('should return "gpt-4" when the input includes "gpt-4"', () => {
|
||||
expect(client.getFunctionModelName('testing gpt-4')).toBe('gpt-4');
|
||||
});
|
||||
|
||||
test('should return "gpt-3.5-turbo" for input that does not meet any specific condition', () => {
|
||||
expect(client.getFunctionModelName('random string')).toBe('gpt-3.5-turbo');
|
||||
expect(client.getFunctionModelName('')).toBe('gpt-3.5-turbo');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Azure OpenAI tests specific to Plugins', () => {
|
||||
// TODO: add more tests for Azure OpenAI integration with Plugins
|
||||
// let client;
|
||||
// beforeEach(() => {
|
||||
// client = new PluginsClient('dummy_api_key');
|
||||
// });
|
||||
|
||||
test('should not call getFunctionModelName when azure options are set', () => {
|
||||
const spy = jest.spyOn(PluginsClient.prototype, 'getFunctionModelName');
|
||||
const model = 'gpt-4-turbo';
|
||||
|
||||
// note, without the azure change in PR #1766, `getFunctionModelName` is called twice
|
||||
const testClient = new PluginsClient('dummy_api_key', {
|
||||
agentOptions: {
|
||||
model,
|
||||
agent: 'functions',
|
||||
},
|
||||
azure: defaultAzureOptions,
|
||||
});
|
||||
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
expect(testClient.agentOptions.model).toBe(model);
|
||||
|
||||
spy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('sendMessage with filtered tools', () => {
|
||||
let TestAgent;
|
||||
const apiKey = 'fake-api-key';
|
||||
const mockTools = [{ name: 'tool1' }, { name: 'tool2' }, { name: 'tool3' }, { name: 'tool4' }];
|
||||
|
||||
beforeEach(() => {
|
||||
TestAgent = new PluginsClient(apiKey, {
|
||||
tools: mockTools,
|
||||
modelOptions: {
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0,
|
||||
max_tokens: 2,
|
||||
},
|
||||
agentOptions: {
|
||||
model: 'gpt-3.5-turbo',
|
||||
},
|
||||
});
|
||||
|
||||
TestAgent.options.req = {
|
||||
app: {
|
||||
locals: {},
|
||||
},
|
||||
};
|
||||
|
||||
TestAgent.sendMessage = jest.fn().mockImplementation(async () => {
|
||||
const { filteredTools = [], includedTools = [] } = TestAgent.options.req.app.locals;
|
||||
|
||||
if (includedTools.length > 0) {
|
||||
const tools = TestAgent.options.tools.filter((plugin) =>
|
||||
includedTools.includes(plugin.name),
|
||||
);
|
||||
TestAgent.options.tools = tools;
|
||||
} else {
|
||||
const tools = TestAgent.options.tools.filter(
|
||||
(plugin) => !filteredTools.includes(plugin.name),
|
||||
);
|
||||
TestAgent.options.tools = tools;
|
||||
}
|
||||
|
||||
return {
|
||||
text: 'Mocked response',
|
||||
tools: TestAgent.options.tools,
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
test('should filter out tools when filteredTools is provided', async () => {
|
||||
TestAgent.options.req.app.locals.filteredTools = ['tool1', 'tool3'];
|
||||
const response = await TestAgent.sendMessage('Test message');
|
||||
expect(response.tools).toHaveLength(2);
|
||||
expect(response.tools).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ name: 'tool2' }),
|
||||
expect.objectContaining({ name: 'tool4' }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
test('should only include specified tools when includedTools is provided', async () => {
|
||||
TestAgent.options.req.app.locals.includedTools = ['tool2', 'tool4'];
|
||||
const response = await TestAgent.sendMessage('Test message');
|
||||
expect(response.tools).toHaveLength(2);
|
||||
expect(response.tools).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ name: 'tool2' }),
|
||||
expect.objectContaining({ name: 'tool4' }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
test('should prioritize includedTools over filteredTools', async () => {
|
||||
TestAgent.options.req.app.locals.filteredTools = ['tool1', 'tool3'];
|
||||
TestAgent.options.req.app.locals.includedTools = ['tool1', 'tool2'];
|
||||
const response = await TestAgent.sendMessage('Test message');
|
||||
expect(response.tools).toHaveLength(2);
|
||||
expect(response.tools).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ name: 'tool1' }),
|
||||
expect.objectContaining({ name: 'tool2' }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
test('should not modify tools when no filters are provided', async () => {
|
||||
const response = await TestAgent.sendMessage('Test message');
|
||||
expect(response.tools).toHaveLength(4);
|
||||
expect(response.tools).toEqual(expect.arrayContaining(mockTools));
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -107,6 +107,12 @@ const getImageEditPromptDescription = () => {
|
||||
return process.env.IMAGE_EDIT_OAI_PROMPT_DESCRIPTION || DEFAULT_IMAGE_EDIT_PROMPT_DESCRIPTION;
|
||||
};
|
||||
|
||||
function createAbortHandler() {
|
||||
return function () {
|
||||
logger.debug('[ImageGenOAI] Image generation aborted');
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates OpenAI Image tools (generation and editing)
|
||||
* @param {Object} fields - Configuration fields
|
||||
@@ -201,10 +207,18 @@ function createOpenAIImageTools(fields = {}) {
|
||||
}
|
||||
|
||||
let resp;
|
||||
/** @type {AbortSignal} */
|
||||
let derivedSignal = null;
|
||||
/** @type {() => void} */
|
||||
let abortHandler = null;
|
||||
|
||||
try {
|
||||
const derivedSignal = runnableConfig?.signal
|
||||
? AbortSignal.any([runnableConfig.signal])
|
||||
: undefined;
|
||||
if (runnableConfig?.signal) {
|
||||
derivedSignal = AbortSignal.any([runnableConfig.signal]);
|
||||
abortHandler = createAbortHandler();
|
||||
derivedSignal.addEventListener('abort', abortHandler, { once: true });
|
||||
}
|
||||
|
||||
resp = await openai.images.generate(
|
||||
{
|
||||
model: 'gpt-image-1',
|
||||
@@ -228,6 +242,10 @@ function createOpenAIImageTools(fields = {}) {
|
||||
logAxiosError({ error, message });
|
||||
return returnValue(`Something went wrong when trying to generate the image. The OpenAI API may be unavailable:
|
||||
Error Message: ${error.message}`);
|
||||
} finally {
|
||||
if (abortHandler && derivedSignal) {
|
||||
derivedSignal.removeEventListener('abort', abortHandler);
|
||||
}
|
||||
}
|
||||
|
||||
if (!resp) {
|
||||
@@ -409,10 +427,17 @@ Error Message: ${error.message}`);
|
||||
headers['Authorization'] = `Bearer ${apiKey}`;
|
||||
}
|
||||
|
||||
/** @type {AbortSignal} */
|
||||
let derivedSignal = null;
|
||||
/** @type {() => void} */
|
||||
let abortHandler = null;
|
||||
|
||||
try {
|
||||
const derivedSignal = runnableConfig?.signal
|
||||
? AbortSignal.any([runnableConfig.signal])
|
||||
: undefined;
|
||||
if (runnableConfig?.signal) {
|
||||
derivedSignal = AbortSignal.any([runnableConfig.signal]);
|
||||
abortHandler = createAbortHandler();
|
||||
derivedSignal.addEventListener('abort', abortHandler, { once: true });
|
||||
}
|
||||
|
||||
/** @type {import('axios').AxiosRequestConfig} */
|
||||
const axiosConfig = {
|
||||
@@ -467,6 +492,10 @@ Error Message: ${error.message}`);
|
||||
logAxiosError({ error, message });
|
||||
return returnValue(`Something went wrong when trying to edit the image. The OpenAI API may be unavailable:
|
||||
Error Message: ${error.message || 'Unknown error'}`);
|
||||
} finally {
|
||||
if (abortHandler && derivedSignal) {
|
||||
derivedSignal.removeEventListener('abort', abortHandler);
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MeiliSearch } = require('meilisearch');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { FlowStateManager } = require('@librechat/api');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
const Conversation = mongoose.models.Conversation;
|
||||
const Message = mongoose.models.Message;
|
||||
@@ -28,43 +31,123 @@ class MeiliSearchClient {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the actual sync operations for messages and conversations
|
||||
*/
|
||||
async function performSync() {
|
||||
const client = MeiliSearchClient.getInstance();
|
||||
|
||||
const { status } = await client.health();
|
||||
if (status !== 'available') {
|
||||
throw new Error('Meilisearch not available');
|
||||
}
|
||||
|
||||
if (indexingDisabled === true) {
|
||||
logger.info('[indexSync] Indexing is disabled, skipping...');
|
||||
return { messagesSync: false, convosSync: false };
|
||||
}
|
||||
|
||||
let messagesSync = false;
|
||||
let convosSync = false;
|
||||
|
||||
// Check if we need to sync messages
|
||||
const messageProgress = await Message.getSyncProgress();
|
||||
if (!messageProgress.isComplete) {
|
||||
logger.info(
|
||||
`[indexSync] Messages need syncing: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments} indexed`,
|
||||
);
|
||||
|
||||
// Check if we should do a full sync or incremental
|
||||
const messageCount = await Message.countDocuments();
|
||||
const messagesIndexed = messageProgress.totalProcessed;
|
||||
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
|
||||
|
||||
if (messageCount - messagesIndexed > syncThreshold) {
|
||||
logger.info('[indexSync] Starting full message sync due to large difference');
|
||||
await Message.syncWithMeili();
|
||||
messagesSync = true;
|
||||
} else if (messageCount !== messagesIndexed) {
|
||||
logger.warn('[indexSync] Messages out of sync, performing incremental sync');
|
||||
await Message.syncWithMeili();
|
||||
messagesSync = true;
|
||||
}
|
||||
} else {
|
||||
logger.info(
|
||||
`[indexSync] Messages are fully synced: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Check if we need to sync conversations
|
||||
const convoProgress = await Conversation.getSyncProgress();
|
||||
if (!convoProgress.isComplete) {
|
||||
logger.info(
|
||||
`[indexSync] Conversations need syncing: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments} indexed`,
|
||||
);
|
||||
|
||||
const convoCount = await Conversation.countDocuments();
|
||||
const convosIndexed = convoProgress.totalProcessed;
|
||||
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
|
||||
|
||||
if (convoCount - convosIndexed > syncThreshold) {
|
||||
logger.info('[indexSync] Starting full conversation sync due to large difference');
|
||||
await Conversation.syncWithMeili();
|
||||
convosSync = true;
|
||||
} else if (convoCount !== convosIndexed) {
|
||||
logger.warn('[indexSync] Convos out of sync, performing incremental sync');
|
||||
await Conversation.syncWithMeili();
|
||||
convosSync = true;
|
||||
}
|
||||
} else {
|
||||
logger.info(
|
||||
`[indexSync] Conversations are fully synced: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments}`,
|
||||
);
|
||||
}
|
||||
|
||||
return { messagesSync, convosSync };
|
||||
}
|
||||
|
||||
/**
|
||||
* Main index sync function that uses FlowStateManager to prevent concurrent execution
|
||||
*/
|
||||
async function indexSync() {
|
||||
if (!searchEnabled) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const client = MeiliSearchClient.getInstance();
|
||||
|
||||
const { status } = await client.health();
|
||||
if (status !== 'available') {
|
||||
throw new Error('Meilisearch not available');
|
||||
logger.info('[indexSync] Starting index synchronization check...');
|
||||
|
||||
try {
|
||||
// Get or create FlowStateManager instance
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
if (!flowsCache) {
|
||||
logger.warn('[indexSync] Flows cache not available, falling back to direct sync');
|
||||
return await performSync();
|
||||
}
|
||||
|
||||
if (indexingDisabled === true) {
|
||||
logger.info('[indexSync] Indexing is disabled, skipping...');
|
||||
const flowManager = new FlowStateManager(flowsCache, {
|
||||
ttl: 60000 * 10, // 10 minutes TTL for sync operations
|
||||
});
|
||||
|
||||
// Use a unique flow ID for the sync operation
|
||||
const flowId = 'meili-index-sync';
|
||||
const flowType = 'MEILI_SYNC';
|
||||
|
||||
// This will only execute the handler if no other instance is running the sync
|
||||
const result = await flowManager.createFlowWithHandler(flowId, flowType, performSync);
|
||||
|
||||
if (result.messagesSync || result.convosSync) {
|
||||
logger.info('[indexSync] Sync completed successfully');
|
||||
} else {
|
||||
logger.debug('[indexSync] No sync was needed');
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (err) {
|
||||
if (err.message.includes('flow already exists')) {
|
||||
logger.info('[indexSync] Sync already running on another instance');
|
||||
return;
|
||||
}
|
||||
|
||||
const messageCount = await Message.countDocuments();
|
||||
const convoCount = await Conversation.countDocuments();
|
||||
const messages = await client.index('messages').getStats();
|
||||
const convos = await client.index('convos').getStats();
|
||||
const messagesIndexed = messages.numberOfDocuments;
|
||||
const convosIndexed = convos.numberOfDocuments;
|
||||
|
||||
logger.debug(`[indexSync] There are ${messageCount} messages and ${messagesIndexed} indexed`);
|
||||
logger.debug(`[indexSync] There are ${convoCount} convos and ${convosIndexed} indexed`);
|
||||
|
||||
if (messageCount !== messagesIndexed) {
|
||||
logger.debug('[indexSync] Messages out of sync, indexing');
|
||||
Message.syncWithMeili();
|
||||
}
|
||||
|
||||
if (convoCount !== convosIndexed) {
|
||||
logger.debug('[indexSync] Convos out of sync, indexing');
|
||||
Conversation.syncWithMeili();
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.message.includes('not found')) {
|
||||
logger.debug('[indexSync] Creating indices...');
|
||||
currentTimeout = setTimeout(async () => {
|
||||
|
||||
@@ -70,6 +70,9 @@ const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _
|
||||
if (ephemeralAgent?.execute_code === true) {
|
||||
tools.push(Tools.execute_code);
|
||||
}
|
||||
if (ephemeralAgent?.file_search === true) {
|
||||
tools.push(Tools.file_search);
|
||||
}
|
||||
if (ephemeralAgent?.web_search === true) {
|
||||
tools.push(Tools.web_search);
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@ describe('models/Agent', () => {
|
||||
const mongoUri = mongoServer.getUri();
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
}, 20000);
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
@@ -413,7 +413,7 @@ describe('models/Agent', () => {
|
||||
const mongoUri = mongoServer.getUri();
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
}, 20000);
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
@@ -670,7 +670,7 @@ describe('models/Agent', () => {
|
||||
const mongoUri = mongoServer.getUri();
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
}, 20000);
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
@@ -1332,7 +1332,7 @@ describe('models/Agent', () => {
|
||||
const mongoUri = mongoServer.getUri();
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
}, 20000);
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
@@ -1514,7 +1514,7 @@ describe('models/Agent', () => {
|
||||
const mongoUri = mongoServer.getUri();
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
}, 20000);
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
@@ -1798,7 +1798,7 @@ describe('models/Agent', () => {
|
||||
const mongoUri = mongoServer.getUri();
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
}, 20000);
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
@@ -2350,7 +2350,7 @@ describe('models/Agent', () => {
|
||||
const mongoUri = mongoServer.getUri();
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
}, 20000);
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { EToolResources } = require('librechat-data-provider');
|
||||
const { EToolResources, FileContext } = require('librechat-data-provider');
|
||||
const { File } = require('~/db/models');
|
||||
|
||||
/**
|
||||
@@ -32,19 +32,19 @@ const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
|
||||
* @returns {Promise<Array<MongoFile>>} Files that match the criteria
|
||||
*/
|
||||
const getToolFilesByIds = async (fileIds, toolResourceSet) => {
|
||||
if (!fileIds || !fileIds.length) {
|
||||
if (!fileIds || !fileIds.length || !toolResourceSet?.size) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const filter = {
|
||||
file_id: { $in: fileIds },
|
||||
$or: [],
|
||||
};
|
||||
|
||||
if (toolResourceSet.size) {
|
||||
filter.$or = [];
|
||||
if (toolResourceSet.has(EToolResources.ocr)) {
|
||||
filter.$or.push({ text: { $exists: true, $ne: null }, context: FileContext.agents });
|
||||
}
|
||||
|
||||
if (toolResourceSet.has(EToolResources.file_search)) {
|
||||
filter.$or.push({ embedded: true });
|
||||
}
|
||||
|
||||
@@ -48,14 +48,13 @@
|
||||
"@langchain/google-genai": "^0.2.13",
|
||||
"@langchain/google-vertexai": "^0.2.13",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.4.41",
|
||||
"@librechat/agents": "^2.4.42",
|
||||
"@librechat/api": "*",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@node-saml/passport-saml": "^5.0.0",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"cohere-ai": "^7.9.1",
|
||||
"compression": "^1.7.4",
|
||||
"connect-redis": "^7.1.0",
|
||||
"cookie": "^0.7.2",
|
||||
|
||||
@@ -169,9 +169,6 @@ function disposeClient(client) {
|
||||
client.isGenerativeModel = null;
|
||||
}
|
||||
// Properties specific to OpenAIClient
|
||||
if (client.ChatGPTClient) {
|
||||
client.ChatGPTClient = null;
|
||||
}
|
||||
if (client.completionsUrl) {
|
||||
client.completionsUrl = null;
|
||||
}
|
||||
|
||||
@@ -1,282 +0,0 @@
|
||||
const { getResponseSender, Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
handleAbortError,
|
||||
createAbortController,
|
||||
cleanupAbortController,
|
||||
} = require('~/server/middleware');
|
||||
const {
|
||||
disposeClient,
|
||||
processReqData,
|
||||
clientRegistry,
|
||||
requestDataMap,
|
||||
} = require('~/server/cleanup');
|
||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||
const { saveMessage } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
let {
|
||||
text,
|
||||
endpointOption,
|
||||
conversationId,
|
||||
modelDisplayLabel,
|
||||
parentMessageId = null,
|
||||
overrideParentMessageId = null,
|
||||
} = req.body;
|
||||
|
||||
let client = null;
|
||||
let abortKey = null;
|
||||
let cleanupHandlers = [];
|
||||
let clientRef = null;
|
||||
|
||||
logger.debug('[AskController]', {
|
||||
text,
|
||||
conversationId,
|
||||
...endpointOption,
|
||||
modelsConfig: endpointOption?.modelsConfig ? 'exists' : '',
|
||||
});
|
||||
|
||||
let userMessage = null;
|
||||
let userMessagePromise = null;
|
||||
let promptTokens = null;
|
||||
let userMessageId = null;
|
||||
let responseMessageId = null;
|
||||
let getAbortData = null;
|
||||
|
||||
const sender = getResponseSender({
|
||||
...endpointOption,
|
||||
model: endpointOption.modelOptions.model,
|
||||
modelDisplayLabel,
|
||||
});
|
||||
const initialConversationId = conversationId;
|
||||
const newConvo = !initialConversationId;
|
||||
const userId = req.user.id;
|
||||
|
||||
let reqDataContext = {
|
||||
userMessage,
|
||||
userMessagePromise,
|
||||
responseMessageId,
|
||||
promptTokens,
|
||||
conversationId,
|
||||
userMessageId,
|
||||
};
|
||||
|
||||
const updateReqData = (data = {}) => {
|
||||
reqDataContext = processReqData(data, reqDataContext);
|
||||
abortKey = reqDataContext.abortKey;
|
||||
userMessage = reqDataContext.userMessage;
|
||||
userMessagePromise = reqDataContext.userMessagePromise;
|
||||
responseMessageId = reqDataContext.responseMessageId;
|
||||
promptTokens = reqDataContext.promptTokens;
|
||||
conversationId = reqDataContext.conversationId;
|
||||
userMessageId = reqDataContext.userMessageId;
|
||||
};
|
||||
|
||||
let { onProgress: progressCallback, getPartialText } = createOnProgress();
|
||||
|
||||
const performCleanup = () => {
|
||||
logger.debug('[AskController] Performing cleanup');
|
||||
if (Array.isArray(cleanupHandlers)) {
|
||||
for (const handler of cleanupHandlers) {
|
||||
try {
|
||||
if (typeof handler === 'function') {
|
||||
handler();
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (abortKey) {
|
||||
logger.debug('[AskController] Cleaning up abort controller');
|
||||
cleanupAbortController(abortKey);
|
||||
abortKey = null;
|
||||
}
|
||||
|
||||
if (client) {
|
||||
disposeClient(client);
|
||||
client = null;
|
||||
}
|
||||
|
||||
reqDataContext = null;
|
||||
userMessage = null;
|
||||
userMessagePromise = null;
|
||||
promptTokens = null;
|
||||
getAbortData = null;
|
||||
progressCallback = null;
|
||||
endpointOption = null;
|
||||
cleanupHandlers = null;
|
||||
addTitle = null;
|
||||
|
||||
if (requestDataMap.has(req)) {
|
||||
requestDataMap.delete(req);
|
||||
}
|
||||
logger.debug('[AskController] Cleanup completed');
|
||||
};
|
||||
|
||||
try {
|
||||
({ client } = await initializeClient({ req, res, endpointOption }));
|
||||
if (clientRegistry && client) {
|
||||
clientRegistry.register(client, { userId }, client);
|
||||
}
|
||||
|
||||
if (client) {
|
||||
requestDataMap.set(req, { client });
|
||||
}
|
||||
|
||||
clientRef = new WeakRef(client);
|
||||
|
||||
getAbortData = () => {
|
||||
const currentClient = clientRef?.deref();
|
||||
const currentText =
|
||||
currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText();
|
||||
|
||||
return {
|
||||
sender,
|
||||
conversationId,
|
||||
messageId: reqDataContext.responseMessageId,
|
||||
parentMessageId: overrideParentMessageId ?? userMessageId,
|
||||
text: currentText,
|
||||
userMessage: userMessage,
|
||||
userMessagePromise: userMessagePromise,
|
||||
promptTokens: reqDataContext.promptTokens,
|
||||
};
|
||||
};
|
||||
|
||||
const { onStart, abortController } = createAbortController(
|
||||
req,
|
||||
res,
|
||||
getAbortData,
|
||||
updateReqData,
|
||||
);
|
||||
|
||||
const closeHandler = () => {
|
||||
logger.debug('[AskController] Request closed');
|
||||
if (!abortController || abortController.signal.aborted || abortController.requestCompleted) {
|
||||
return;
|
||||
}
|
||||
abortController.abort();
|
||||
logger.debug('[AskController] Request aborted on close');
|
||||
};
|
||||
|
||||
res.on('close', closeHandler);
|
||||
cleanupHandlers.push(() => {
|
||||
try {
|
||||
res.removeListener('close', closeHandler);
|
||||
} catch (e) {
|
||||
// Ignore
|
||||
}
|
||||
});
|
||||
|
||||
const messageOptions = {
|
||||
user: userId,
|
||||
parentMessageId,
|
||||
conversationId: reqDataContext.conversationId,
|
||||
overrideParentMessageId,
|
||||
getReqData: updateReqData,
|
||||
onStart,
|
||||
abortController,
|
||||
progressCallback,
|
||||
progressOptions: {
|
||||
res,
|
||||
},
|
||||
};
|
||||
|
||||
/** @type {TMessage} */
|
||||
let response = await client.sendMessage(text, messageOptions);
|
||||
response.endpoint = endpointOption.endpoint;
|
||||
|
||||
const databasePromise = response.databasePromise;
|
||||
delete response.databasePromise;
|
||||
|
||||
const { conversation: convoData = {} } = await databasePromise;
|
||||
const conversation = { ...convoData };
|
||||
conversation.title =
|
||||
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
|
||||
|
||||
const latestUserMessage = reqDataContext.userMessage;
|
||||
|
||||
if (client?.options?.attachments && latestUserMessage) {
|
||||
latestUserMessage.files = client.options.attachments;
|
||||
if (endpointOption?.modelOptions?.model) {
|
||||
conversation.model = endpointOption.modelOptions.model;
|
||||
}
|
||||
delete latestUserMessage.image_urls;
|
||||
}
|
||||
|
||||
if (!abortController.signal.aborted) {
|
||||
const finalResponseMessage = { ...response };
|
||||
|
||||
sendMessage(res, {
|
||||
final: true,
|
||||
conversation,
|
||||
title: conversation.title,
|
||||
requestMessage: latestUserMessage,
|
||||
responseMessage: finalResponseMessage,
|
||||
});
|
||||
res.end();
|
||||
|
||||
if (client?.savedMessageIds && !client.savedMessageIds.has(response.messageId)) {
|
||||
await saveMessage(
|
||||
req,
|
||||
{ ...finalResponseMessage, user: userId },
|
||||
{ context: 'api/server/controllers/AskController.js - response end' },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!client?.skipSaveUserMessage && latestUserMessage) {
|
||||
await saveMessage(req, latestUserMessage, {
|
||||
context: "api/server/controllers/AskController.js - don't skip saving user message",
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof addTitle === 'function' && parentMessageId === Constants.NO_PARENT && newConvo) {
|
||||
addTitle(req, {
|
||||
text,
|
||||
response: { ...response },
|
||||
client,
|
||||
})
|
||||
.then(() => {
|
||||
logger.debug('[AskController] Title generation started');
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error('[AskController] Error in title generation', err);
|
||||
})
|
||||
.finally(() => {
|
||||
logger.debug('[AskController] Title generation completed');
|
||||
performCleanup();
|
||||
});
|
||||
} else {
|
||||
performCleanup();
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[AskController] Error handling request', error);
|
||||
let partialText = '';
|
||||
try {
|
||||
const currentClient = clientRef?.deref();
|
||||
partialText =
|
||||
currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText();
|
||||
} catch (getTextError) {
|
||||
logger.error('[AskController] Error calling getText() during error handling', getTextError);
|
||||
}
|
||||
|
||||
handleAbortError(res, req, error, {
|
||||
sender,
|
||||
partialText,
|
||||
conversationId: reqDataContext.conversationId,
|
||||
messageId: reqDataContext.responseMessageId,
|
||||
parentMessageId: overrideParentMessageId ?? reqDataContext.userMessageId ?? parentMessageId,
|
||||
userMessageId: reqDataContext.userMessageId,
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error('[AskController] Error in `handleAbortError` during catch block', err);
|
||||
})
|
||||
.finally(() => {
|
||||
performCleanup();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = AskController;
|
||||
@@ -84,7 +84,7 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
}
|
||||
|
||||
if (abortKey) {
|
||||
logger.debug('[AskController] Cleaning up abort controller');
|
||||
logger.debug('[EditController] Cleaning up abort controller');
|
||||
cleanupAbortController(abortKey);
|
||||
abortKey = null;
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ const {
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
Callback,
|
||||
Providers,
|
||||
GraphEvents,
|
||||
formatMessage,
|
||||
formatAgentMessages,
|
||||
@@ -31,17 +32,13 @@ const {
|
||||
} = require('librechat-data-provider');
|
||||
const { DynamicStructuredTool } = require('@langchain/core/tools');
|
||||
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
|
||||
const {
|
||||
getCustomEndpointConfig,
|
||||
createGetMCPAuthMap,
|
||||
checkCapability,
|
||||
} = require('~/server/services/Config');
|
||||
const { createGetMCPAuthMap, checkCapability } = require('~/server/services/Config');
|
||||
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
|
||||
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
const { getFormattedMemories, deleteMemory, setMemory } = require('~/models');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const initOpenAI = require('~/server/services/Endpoints/openAI/initialize');
|
||||
const { getProviderConfig } = require('~/server/services/Endpoints');
|
||||
const { checkAccess } = require('~/server/middleware/roles/access');
|
||||
const BaseClient = require('~/app/clients/BaseClient');
|
||||
const { loadAgent } = require('~/models/Agent');
|
||||
@@ -677,7 +674,7 @@ class AgentClient extends BaseClient {
|
||||
hide_sequential_outputs: this.options.agent.hide_sequential_outputs,
|
||||
user: this.options.req.user,
|
||||
},
|
||||
recursionLimit: agentsEConfig?.recursionLimit,
|
||||
recursionLimit: agentsEConfig?.recursionLimit ?? 25,
|
||||
signal: abortController.signal,
|
||||
streamMode: 'values',
|
||||
version: 'v2',
|
||||
@@ -983,23 +980,26 @@ class AgentClient extends BaseClient {
|
||||
throw new Error('Run not initialized');
|
||||
}
|
||||
const { handleLLMEnd, collected: collectedMetadata } = createMetadataAggregator();
|
||||
const endpoint = this.options.agent.endpoint;
|
||||
const { req, res } = this.options;
|
||||
const { req, res, agent } = this.options;
|
||||
const endpoint = agent.endpoint;
|
||||
|
||||
/** @type {import('@librechat/agents').ClientOptions} */
|
||||
let clientOptions = {
|
||||
maxTokens: 75,
|
||||
model: agent.model_parameters.model,
|
||||
};
|
||||
let endpointConfig = req.app.locals[endpoint];
|
||||
|
||||
const { getOptions, overrideProvider, customEndpointConfig } =
|
||||
await getProviderConfig(endpoint);
|
||||
|
||||
/** @type {TEndpoint | undefined} */
|
||||
const endpointConfig = req.app.locals[endpoint] ?? customEndpointConfig;
|
||||
if (!endpointConfig) {
|
||||
try {
|
||||
endpointConfig = await getCustomEndpointConfig(endpoint);
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #titleConvo] Error getting custom endpoint config',
|
||||
err,
|
||||
);
|
||||
}
|
||||
logger.warn(
|
||||
'[api/server/controllers/agents/client.js #titleConvo] Error getting endpoint config',
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
endpointConfig &&
|
||||
endpointConfig.titleModel &&
|
||||
@@ -1007,30 +1007,40 @@ class AgentClient extends BaseClient {
|
||||
) {
|
||||
clientOptions.model = endpointConfig.titleModel;
|
||||
}
|
||||
|
||||
const options = await getOptions({
|
||||
req,
|
||||
res,
|
||||
optionsOnly: true,
|
||||
overrideEndpoint: endpoint,
|
||||
overrideModel: clientOptions.model,
|
||||
endpointOption: { model_parameters: clientOptions },
|
||||
});
|
||||
|
||||
let provider = options.provider ?? overrideProvider ?? agent.provider;
|
||||
if (
|
||||
endpoint === EModelEndpoint.azureOpenAI &&
|
||||
clientOptions.model &&
|
||||
this.options.agent.model_parameters.model !== clientOptions.model
|
||||
options.llmConfig?.azureOpenAIApiInstanceName == null
|
||||
) {
|
||||
clientOptions =
|
||||
(
|
||||
await initOpenAI({
|
||||
req,
|
||||
res,
|
||||
optionsOnly: true,
|
||||
overrideModel: clientOptions.model,
|
||||
overrideEndpoint: endpoint,
|
||||
endpointOption: {
|
||||
model_parameters: clientOptions,
|
||||
},
|
||||
})
|
||||
)?.llmConfig ?? clientOptions;
|
||||
provider = Providers.OPENAI;
|
||||
}
|
||||
if (/\b(o\d)\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
|
||||
|
||||
/** @type {import('@librechat/agents').ClientOptions} */
|
||||
clientOptions = { ...options.llmConfig };
|
||||
if (options.configOptions) {
|
||||
clientOptions.configuration = options.configOptions;
|
||||
}
|
||||
|
||||
// Ensure maxTokens is set for non-o1 models
|
||||
if (!/\b(o\d)\b/i.test(clientOptions.model) && !clientOptions.maxTokens) {
|
||||
clientOptions.maxTokens = 75;
|
||||
} else if (/\b(o\d)\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
|
||||
delete clientOptions.maxTokens;
|
||||
}
|
||||
|
||||
try {
|
||||
const titleResult = await this.run.generateTitle({
|
||||
provider,
|
||||
inputText: text,
|
||||
contentParts: this.contentParts,
|
||||
clientOptions,
|
||||
@@ -1048,8 +1058,10 @@ class AgentClient extends BaseClient {
|
||||
let input_tokens, output_tokens;
|
||||
|
||||
if (item.usage) {
|
||||
input_tokens = item.usage.input_tokens || item.usage.inputTokens;
|
||||
output_tokens = item.usage.output_tokens || item.usage.outputTokens;
|
||||
input_tokens =
|
||||
item.usage.prompt_tokens || item.usage.input_tokens || item.usage.inputTokens;
|
||||
output_tokens =
|
||||
item.usage.completion_tokens || item.usage.output_tokens || item.usage.outputTokens;
|
||||
} else if (item.tokenUsage) {
|
||||
input_tokens = item.tokenUsage.promptTokens;
|
||||
output_tokens = item.tokenUsage.completionTokens;
|
||||
|
||||
@@ -1,106 +0,0 @@
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { resolveHeaders } = require('librechat-data-provider');
|
||||
const { createLLM } = require('~/app/clients/llm');
|
||||
|
||||
/**
|
||||
* Initializes and returns a Language Learning Model (LLM) instance.
|
||||
*
|
||||
* @param {Object} options - Configuration options for the LLM.
|
||||
* @param {string} options.model - The model identifier.
|
||||
* @param {string} options.modelName - The specific name of the model.
|
||||
* @param {number} options.temperature - The temperature setting for the model.
|
||||
* @param {number} options.presence_penalty - The presence penalty for the model.
|
||||
* @param {number} options.frequency_penalty - The frequency penalty for the model.
|
||||
* @param {number} options.max_tokens - The maximum number of tokens for the model output.
|
||||
* @param {boolean} options.streaming - Whether to use streaming for the model output.
|
||||
* @param {Object} options.context - The context for the conversation.
|
||||
* @param {number} options.tokenBuffer - The token buffer size.
|
||||
* @param {number} options.initialMessageCount - The initial message count.
|
||||
* @param {string} options.conversationId - The ID of the conversation.
|
||||
* @param {string} options.user - The user identifier.
|
||||
* @param {string} options.langchainProxy - The langchain proxy URL.
|
||||
* @param {boolean} options.useOpenRouter - Whether to use OpenRouter.
|
||||
* @param {Object} options.options - Additional options.
|
||||
* @param {Object} options.options.headers - Custom headers for the request.
|
||||
* @param {string} options.options.proxy - Proxy URL.
|
||||
* @param {Object} options.options.req - The request object.
|
||||
* @param {Object} options.options.res - The response object.
|
||||
* @param {boolean} options.options.debug - Whether to enable debug mode.
|
||||
* @param {string} options.apiKey - The API key for authentication.
|
||||
* @param {Object} options.azure - Azure-specific configuration.
|
||||
* @param {Object} options.abortController - The AbortController instance.
|
||||
* @returns {Object} The initialized LLM instance.
|
||||
*/
|
||||
function initializeLLM(options) {
|
||||
const {
|
||||
model,
|
||||
modelName,
|
||||
temperature,
|
||||
presence_penalty,
|
||||
frequency_penalty,
|
||||
max_tokens,
|
||||
streaming,
|
||||
user,
|
||||
langchainProxy,
|
||||
useOpenRouter,
|
||||
options: { headers, proxy },
|
||||
apiKey,
|
||||
azure,
|
||||
} = options;
|
||||
|
||||
const modelOptions = {
|
||||
modelName: modelName || model,
|
||||
temperature,
|
||||
presence_penalty,
|
||||
frequency_penalty,
|
||||
user,
|
||||
};
|
||||
|
||||
if (max_tokens) {
|
||||
modelOptions.max_tokens = max_tokens;
|
||||
}
|
||||
|
||||
const configOptions = {};
|
||||
|
||||
if (langchainProxy) {
|
||||
configOptions.basePath = langchainProxy;
|
||||
}
|
||||
|
||||
if (useOpenRouter) {
|
||||
configOptions.basePath = 'https://openrouter.ai/api/v1';
|
||||
configOptions.baseOptions = {
|
||||
headers: {
|
||||
'HTTP-Referer': 'https://librechat.ai',
|
||||
'X-Title': 'LibreChat',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (headers && typeof headers === 'object' && !Array.isArray(headers)) {
|
||||
configOptions.baseOptions = {
|
||||
headers: resolveHeaders({
|
||||
...headers,
|
||||
...configOptions?.baseOptions?.headers,
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
if (proxy) {
|
||||
configOptions.httpAgent = new HttpsProxyAgent(proxy);
|
||||
configOptions.httpsAgent = new HttpsProxyAgent(proxy);
|
||||
}
|
||||
|
||||
const llm = createLLM({
|
||||
modelOptions,
|
||||
configOptions,
|
||||
openAIApiKey: apiKey,
|
||||
azure,
|
||||
streaming,
|
||||
});
|
||||
|
||||
return llm;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
initializeLLM,
|
||||
};
|
||||
@@ -39,7 +39,9 @@ const startServer = async () => {
|
||||
await connectDb();
|
||||
|
||||
logger.info('Connected to MongoDB');
|
||||
await indexSync();
|
||||
indexSync().catch((err) => {
|
||||
logger.error('[indexSync] Background sync failed:', err);
|
||||
});
|
||||
|
||||
app.disable('x-powered-by');
|
||||
app.set('trust proxy', trusted_proxy);
|
||||
@@ -95,7 +97,6 @@ const startServer = async () => {
|
||||
app.use('/api/actions', routes.actions);
|
||||
app.use('/api/keys', routes.keys);
|
||||
app.use('/api/user', routes.user);
|
||||
app.use('/api/ask', routes.ask);
|
||||
app.use('/api/search', routes.search);
|
||||
app.use('/api/edit', routes.edit);
|
||||
app.use('/api/messages', routes.messages);
|
||||
@@ -116,7 +117,6 @@ const startServer = async () => {
|
||||
app.use('/api/roles', routes.roles);
|
||||
app.use('/api/agents', routes.agents);
|
||||
app.use('/api/banner', routes.banner);
|
||||
app.use('/api/bedrock', routes.bedrock);
|
||||
app.use('/api/memories', routes.memories);
|
||||
app.use('/api/tags', routes.tags);
|
||||
app.use('/api/mcp', routes.mcp);
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
parseCompactConvo,
|
||||
EndpointURLs,
|
||||
EModelEndpoint,
|
||||
isAgentsEndpoint,
|
||||
EndpointURLs,
|
||||
parseCompactConvo,
|
||||
} = require('librechat-data-provider');
|
||||
const azureAssistants = require('~/server/services/Endpoints/azureAssistants');
|
||||
const { getModelsConfig } = require('~/server/controllers/ModelController');
|
||||
const assistants = require('~/server/services/Endpoints/assistants');
|
||||
const gptPlugins = require('~/server/services/Endpoints/gptPlugins');
|
||||
const { processFiles } = require('~/server/services/Files/process');
|
||||
const anthropic = require('~/server/services/Endpoints/anthropic');
|
||||
const bedrock = require('~/server/services/Endpoints/bedrock');
|
||||
@@ -25,7 +24,6 @@ const buildFunction = {
|
||||
[EModelEndpoint.bedrock]: bedrock.buildOptions,
|
||||
[EModelEndpoint.azureOpenAI]: openAI.buildOptions,
|
||||
[EModelEndpoint.anthropic]: anthropic.buildOptions,
|
||||
[EModelEndpoint.gptPlugins]: gptPlugins.buildOptions,
|
||||
[EModelEndpoint.assistants]: assistants.buildOptions,
|
||||
[EModelEndpoint.azureAssistants]: azureAssistants.buildOptions,
|
||||
};
|
||||
@@ -36,6 +34,9 @@ async function buildEndpointOption(req, res, next) {
|
||||
try {
|
||||
parsedBody = parseCompactConvo({ endpoint, endpointType, conversation: req.body });
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`Error parsing conversation for endpoint ${endpoint}${error?.message ? `: ${error.message}` : ''}`,
|
||||
);
|
||||
return handleError(res, { text: 'Error parsing conversation' });
|
||||
}
|
||||
|
||||
@@ -57,15 +58,6 @@ async function buildEndpointOption(req, res, next) {
|
||||
return handleError(res, { text: 'Model spec mismatch' });
|
||||
}
|
||||
|
||||
if (
|
||||
currentModelSpec.preset.endpoint !== EModelEndpoint.gptPlugins &&
|
||||
currentModelSpec.preset.tools
|
||||
) {
|
||||
return handleError(res, {
|
||||
text: `Only the "${EModelEndpoint.gptPlugins}" endpoint can have tools defined in the preset`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
currentModelSpec.preset.spec = spec;
|
||||
if (currentModelSpec.iconURL != null && currentModelSpec.iconURL !== '') {
|
||||
@@ -77,6 +69,7 @@ async function buildEndpointOption(req, res, next) {
|
||||
conversation: currentModelSpec.preset,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(`Error parsing model spec for endpoint ${endpoint}`, error);
|
||||
return handleError(res, { text: 'Error parsing model spec' });
|
||||
}
|
||||
}
|
||||
@@ -84,20 +77,23 @@ async function buildEndpointOption(req, res, next) {
|
||||
try {
|
||||
const isAgents =
|
||||
isAgentsEndpoint(endpoint) || req.baseUrl.startsWith(EndpointURLs[EModelEndpoint.agents]);
|
||||
const endpointFn = buildFunction[isAgents ? EModelEndpoint.agents : (endpointType ?? endpoint)];
|
||||
const builder = isAgents ? (...args) => endpointFn(req, ...args) : endpointFn;
|
||||
const builder = isAgents
|
||||
? (...args) => buildFunction[EModelEndpoint.agents](req, ...args)
|
||||
: buildFunction[endpointType ?? endpoint];
|
||||
|
||||
// TODO: use object params
|
||||
req.body.endpointOption = await builder(endpoint, parsedBody, endpointType);
|
||||
|
||||
// TODO: use `getModelsConfig` only when necessary
|
||||
const modelsConfig = await getModelsConfig(req);
|
||||
req.body.endpointOption.modelsConfig = modelsConfig;
|
||||
if (req.body.files && !isAgents) {
|
||||
req.body.endpointOption.attachments = processFiles(req.body.files);
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Error building endpoint option for endpoint ${endpoint} with type ${endpointType}`,
|
||||
error,
|
||||
);
|
||||
return handleError(res, { text: 'Error building endpoint option' });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
const { Keyv } = require('keyv');
|
||||
const { KeyvFile } = require('keyv-file');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const addToCache = async ({ endpoint, endpointOption, userMessage, responseMessage }) => {
|
||||
try {
|
||||
const conversationsCache = new Keyv({
|
||||
store: new KeyvFile({ filename: './data/cache.json' }),
|
||||
namespace: 'chatgpt', // should be 'bing' for bing/sydney
|
||||
});
|
||||
|
||||
const {
|
||||
conversationId,
|
||||
messageId: userMessageId,
|
||||
parentMessageId: userParentMessageId,
|
||||
text: userText,
|
||||
} = userMessage;
|
||||
const {
|
||||
messageId: responseMessageId,
|
||||
parentMessageId: responseParentMessageId,
|
||||
text: responseText,
|
||||
} = responseMessage;
|
||||
|
||||
let conversation = await conversationsCache.get(conversationId);
|
||||
// used to generate a title for the conversation if none exists
|
||||
// let isNewConversation = false;
|
||||
if (!conversation) {
|
||||
conversation = {
|
||||
messages: [],
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
// isNewConversation = true;
|
||||
}
|
||||
|
||||
const roles = (options) => {
|
||||
if (endpoint === 'openAI') {
|
||||
return options?.chatGptLabel || 'ChatGPT';
|
||||
}
|
||||
};
|
||||
|
||||
let _userMessage = {
|
||||
id: userMessageId,
|
||||
parentMessageId: userParentMessageId,
|
||||
role: 'User',
|
||||
message: userText,
|
||||
};
|
||||
|
||||
let _responseMessage = {
|
||||
id: responseMessageId,
|
||||
parentMessageId: responseParentMessageId,
|
||||
role: roles(endpointOption),
|
||||
message: responseText,
|
||||
};
|
||||
|
||||
conversation.messages.push(_userMessage, _responseMessage);
|
||||
|
||||
await conversationsCache.set(conversationId, conversation);
|
||||
} catch (error) {
|
||||
logger.error('[addToCache] Error adding conversation to cache', error);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = addToCache;
|
||||
@@ -1,25 +0,0 @@
|
||||
const express = require('express');
|
||||
const AskController = require('~/server/controllers/AskController');
|
||||
const { addTitle, initializeClient } = require('~/server/services/Endpoints/anthropic');
|
||||
const {
|
||||
setHeaders,
|
||||
handleAbort,
|
||||
validateModel,
|
||||
validateEndpoint,
|
||||
buildEndpointOption,
|
||||
} = require('~/server/middleware');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
validateEndpoint,
|
||||
validateModel,
|
||||
buildEndpointOption,
|
||||
setHeaders,
|
||||
async (req, res, next) => {
|
||||
await AskController(req, res, next, initializeClient, addTitle);
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,25 +0,0 @@
|
||||
const express = require('express');
|
||||
const AskController = require('~/server/controllers/AskController');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/custom');
|
||||
const { addTitle } = require('~/server/services/Endpoints/openAI');
|
||||
const {
|
||||
setHeaders,
|
||||
validateModel,
|
||||
validateEndpoint,
|
||||
buildEndpointOption,
|
||||
} = require('~/server/middleware');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
validateEndpoint,
|
||||
validateModel,
|
||||
buildEndpointOption,
|
||||
setHeaders,
|
||||
async (req, res, next) => {
|
||||
await AskController(req, res, next, initializeClient, addTitle);
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,24 +0,0 @@
|
||||
const express = require('express');
|
||||
const AskController = require('~/server/controllers/AskController');
|
||||
const { initializeClient, addTitle } = require('~/server/services/Endpoints/google');
|
||||
const {
|
||||
setHeaders,
|
||||
validateModel,
|
||||
validateEndpoint,
|
||||
buildEndpointOption,
|
||||
} = require('~/server/middleware');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
validateEndpoint,
|
||||
validateModel,
|
||||
buildEndpointOption,
|
||||
setHeaders,
|
||||
async (req, res, next) => {
|
||||
await AskController(req, res, next, initializeClient, addTitle);
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,241 +0,0 @@
|
||||
const express = require('express');
|
||||
const { getResponseSender, Constants } = require('librechat-data-provider');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/gptPlugins');
|
||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||
const { addTitle } = require('~/server/services/Endpoints/openAI');
|
||||
const { saveMessage, updateMessage } = require('~/models');
|
||||
const {
|
||||
handleAbort,
|
||||
createAbortController,
|
||||
handleAbortError,
|
||||
setHeaders,
|
||||
validateModel,
|
||||
validateEndpoint,
|
||||
buildEndpointOption,
|
||||
moderateText,
|
||||
} = require('~/server/middleware');
|
||||
const { validateTools } = require('~/app');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(moderateText);
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
validateEndpoint,
|
||||
validateModel,
|
||||
buildEndpointOption,
|
||||
setHeaders,
|
||||
async (req, res) => {
|
||||
let {
|
||||
text,
|
||||
endpointOption,
|
||||
conversationId,
|
||||
parentMessageId = null,
|
||||
overrideParentMessageId = null,
|
||||
} = req.body;
|
||||
|
||||
logger.debug('[/ask/gptPlugins]', { text, conversationId, ...endpointOption });
|
||||
|
||||
let userMessage;
|
||||
let userMessagePromise;
|
||||
let promptTokens;
|
||||
let userMessageId;
|
||||
let responseMessageId;
|
||||
const sender = getResponseSender({
|
||||
...endpointOption,
|
||||
model: endpointOption.modelOptions.model,
|
||||
});
|
||||
const newConvo = !conversationId;
|
||||
const user = req.user.id;
|
||||
|
||||
const plugins = [];
|
||||
|
||||
const getReqData = (data = {}) => {
|
||||
for (let key in data) {
|
||||
if (key === 'userMessage') {
|
||||
userMessage = data[key];
|
||||
userMessageId = data[key].messageId;
|
||||
} else if (key === 'userMessagePromise') {
|
||||
userMessagePromise = data[key];
|
||||
} else if (key === 'responseMessageId') {
|
||||
responseMessageId = data[key];
|
||||
} else if (key === 'promptTokens') {
|
||||
promptTokens = data[key];
|
||||
} else if (!conversationId && key === 'conversationId') {
|
||||
conversationId = data[key];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let streaming = null;
|
||||
let timer = null;
|
||||
|
||||
const {
|
||||
onProgress: progressCallback,
|
||||
sendIntermediateMessage,
|
||||
getPartialText,
|
||||
} = createOnProgress({
|
||||
onProgress: () => {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
|
||||
streaming = new Promise((resolve) => {
|
||||
timer = setTimeout(() => {
|
||||
resolve();
|
||||
}, 250);
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const pluginMap = new Map();
|
||||
const onAgentAction = async (action, runId) => {
|
||||
pluginMap.set(runId, action.tool);
|
||||
sendIntermediateMessage(res, {
|
||||
plugins,
|
||||
parentMessageId: userMessage.messageId,
|
||||
messageId: responseMessageId,
|
||||
});
|
||||
};
|
||||
|
||||
const onToolStart = async (tool, input, runId, parentRunId) => {
|
||||
const pluginName = pluginMap.get(parentRunId);
|
||||
const latestPlugin = {
|
||||
runId,
|
||||
loading: true,
|
||||
inputs: [input],
|
||||
latest: pluginName,
|
||||
outputs: null,
|
||||
};
|
||||
|
||||
if (streaming) {
|
||||
await streaming;
|
||||
}
|
||||
const extraTokens = ':::plugin:::\n';
|
||||
plugins.push(latestPlugin);
|
||||
sendIntermediateMessage(
|
||||
res,
|
||||
{ plugins, parentMessageId: userMessage.messageId, messageId: responseMessageId },
|
||||
extraTokens,
|
||||
);
|
||||
};
|
||||
|
||||
const onToolEnd = async (output, runId) => {
|
||||
if (streaming) {
|
||||
await streaming;
|
||||
}
|
||||
|
||||
const pluginIndex = plugins.findIndex((plugin) => plugin.runId === runId);
|
||||
|
||||
if (pluginIndex !== -1) {
|
||||
plugins[pluginIndex].loading = false;
|
||||
plugins[pluginIndex].outputs = output;
|
||||
}
|
||||
};
|
||||
|
||||
const getAbortData = () => ({
|
||||
sender,
|
||||
conversationId,
|
||||
userMessagePromise,
|
||||
messageId: responseMessageId,
|
||||
parentMessageId: overrideParentMessageId ?? userMessageId,
|
||||
text: getPartialText(),
|
||||
plugins: plugins.map((p) => ({ ...p, loading: false })),
|
||||
userMessage,
|
||||
promptTokens,
|
||||
});
|
||||
const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData);
|
||||
|
||||
try {
|
||||
endpointOption.tools = await validateTools(user, endpointOption.tools);
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
const onChainEnd = () => {
|
||||
if (!client.skipSaveUserMessage) {
|
||||
saveMessage(
|
||||
req,
|
||||
{ ...userMessage, user },
|
||||
{ context: 'api/server/routes/ask/gptPlugins.js - onChainEnd' },
|
||||
);
|
||||
}
|
||||
sendIntermediateMessage(res, {
|
||||
plugins,
|
||||
parentMessageId: userMessage.messageId,
|
||||
messageId: responseMessageId,
|
||||
});
|
||||
};
|
||||
|
||||
let response = await client.sendMessage(text, {
|
||||
user,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
overrideParentMessageId,
|
||||
getReqData,
|
||||
onAgentAction,
|
||||
onChainEnd,
|
||||
onToolStart,
|
||||
onToolEnd,
|
||||
onStart,
|
||||
getPartialText,
|
||||
...endpointOption,
|
||||
progressCallback,
|
||||
progressOptions: {
|
||||
res,
|
||||
// parentMessageId: overrideParentMessageId || userMessageId,
|
||||
plugins,
|
||||
},
|
||||
abortController,
|
||||
});
|
||||
|
||||
if (overrideParentMessageId) {
|
||||
response.parentMessageId = overrideParentMessageId;
|
||||
}
|
||||
|
||||
logger.debug('[/ask/gptPlugins]', response);
|
||||
|
||||
const { conversation = {} } = await response.databasePromise;
|
||||
delete response.databasePromise;
|
||||
conversation.title =
|
||||
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
|
||||
|
||||
sendMessage(res, {
|
||||
title: conversation.title,
|
||||
final: true,
|
||||
conversation,
|
||||
requestMessage: userMessage,
|
||||
responseMessage: response,
|
||||
});
|
||||
res.end();
|
||||
|
||||
if (parentMessageId === Constants.NO_PARENT && newConvo) {
|
||||
addTitle(req, {
|
||||
text,
|
||||
response,
|
||||
client,
|
||||
});
|
||||
}
|
||||
|
||||
response.plugins = plugins.map((p) => ({ ...p, loading: false }));
|
||||
if (response.plugins?.length > 0) {
|
||||
await updateMessage(
|
||||
req,
|
||||
{ ...response, user },
|
||||
{ context: 'api/server/routes/ask/gptPlugins.js - save plugins used' },
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
const partialText = getPartialText();
|
||||
handleAbortError(res, req, error, {
|
||||
partialText,
|
||||
conversationId,
|
||||
sender,
|
||||
messageId: responseMessageId,
|
||||
parentMessageId: userMessageId ?? parentMessageId,
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,47 +0,0 @@
|
||||
const express = require('express');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const {
|
||||
uaParser,
|
||||
checkBan,
|
||||
requireJwtAuth,
|
||||
messageIpLimiter,
|
||||
concurrentLimiter,
|
||||
messageUserLimiter,
|
||||
validateConvoAccess,
|
||||
} = require('~/server/middleware');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const gptPlugins = require('./gptPlugins');
|
||||
const anthropic = require('./anthropic');
|
||||
const custom = require('./custom');
|
||||
const google = require('./google');
|
||||
const openAI = require('./openAI');
|
||||
|
||||
const { LIMIT_CONCURRENT_MESSAGES, LIMIT_MESSAGE_IP, LIMIT_MESSAGE_USER } = process.env ?? {};
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(requireJwtAuth);
|
||||
router.use(checkBan);
|
||||
router.use(uaParser);
|
||||
|
||||
if (isEnabled(LIMIT_CONCURRENT_MESSAGES)) {
|
||||
router.use(concurrentLimiter);
|
||||
}
|
||||
|
||||
if (isEnabled(LIMIT_MESSAGE_IP)) {
|
||||
router.use(messageIpLimiter);
|
||||
}
|
||||
|
||||
if (isEnabled(LIMIT_MESSAGE_USER)) {
|
||||
router.use(messageUserLimiter);
|
||||
}
|
||||
|
||||
router.use(validateConvoAccess);
|
||||
|
||||
router.use([`/${EModelEndpoint.azureOpenAI}`, `/${EModelEndpoint.openAI}`], openAI);
|
||||
router.use(`/${EModelEndpoint.gptPlugins}`, gptPlugins);
|
||||
router.use(`/${EModelEndpoint.anthropic}`, anthropic);
|
||||
router.use(`/${EModelEndpoint.google}`, google);
|
||||
router.use(`/${EModelEndpoint.custom}`, custom);
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,27 +0,0 @@
|
||||
const express = require('express');
|
||||
const AskController = require('~/server/controllers/AskController');
|
||||
const { addTitle, initializeClient } = require('~/server/services/Endpoints/openAI');
|
||||
const {
|
||||
handleAbort,
|
||||
setHeaders,
|
||||
validateModel,
|
||||
validateEndpoint,
|
||||
buildEndpointOption,
|
||||
moderateText,
|
||||
} = require('~/server/middleware');
|
||||
|
||||
const router = express.Router();
|
||||
router.use(moderateText);
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
validateEndpoint,
|
||||
validateModel,
|
||||
buildEndpointOption,
|
||||
setHeaders,
|
||||
async (req, res, next) => {
|
||||
await AskController(req, res, next, initializeClient, addTitle);
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,37 +0,0 @@
|
||||
const express = require('express');
|
||||
|
||||
const router = express.Router();
|
||||
const {
|
||||
setHeaders,
|
||||
handleAbort,
|
||||
moderateText,
|
||||
// validateModel,
|
||||
// validateEndpoint,
|
||||
buildEndpointOption,
|
||||
} = require('~/server/middleware');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/bedrock');
|
||||
const AgentController = require('~/server/controllers/agents/request');
|
||||
const addTitle = require('~/server/services/Endpoints/agents/title');
|
||||
|
||||
router.use(moderateText);
|
||||
|
||||
/**
|
||||
* @route POST /
|
||||
* @desc Chat with an assistant
|
||||
* @access Public
|
||||
* @param {express.Request} req - The request object, containing the request data.
|
||||
* @param {express.Response} res - The response object, used to send back a response.
|
||||
* @returns {void}
|
||||
*/
|
||||
router.post(
|
||||
'/',
|
||||
// validateModel,
|
||||
// validateEndpoint,
|
||||
buildEndpointOption,
|
||||
setHeaders,
|
||||
async (req, res, next) => {
|
||||
await AgentController(req, res, next, initializeClient, addTitle);
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,35 +0,0 @@
|
||||
const express = require('express');
|
||||
const {
|
||||
uaParser,
|
||||
checkBan,
|
||||
requireJwtAuth,
|
||||
messageIpLimiter,
|
||||
concurrentLimiter,
|
||||
messageUserLimiter,
|
||||
} = require('~/server/middleware');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const chat = require('./chat');
|
||||
|
||||
const { LIMIT_CONCURRENT_MESSAGES, LIMIT_MESSAGE_IP, LIMIT_MESSAGE_USER } = process.env ?? {};
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(requireJwtAuth);
|
||||
router.use(checkBan);
|
||||
router.use(uaParser);
|
||||
|
||||
if (isEnabled(LIMIT_CONCURRENT_MESSAGES)) {
|
||||
router.use(concurrentLimiter);
|
||||
}
|
||||
|
||||
if (isEnabled(LIMIT_MESSAGE_IP)) {
|
||||
router.use(messageIpLimiter);
|
||||
}
|
||||
|
||||
if (isEnabled(LIMIT_MESSAGE_USER)) {
|
||||
router.use(messageUserLimiter);
|
||||
}
|
||||
|
||||
router.use('/chat', chat);
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,207 +0,0 @@
|
||||
const express = require('express');
|
||||
const { getResponseSender } = require('librechat-data-provider');
|
||||
const {
|
||||
setHeaders,
|
||||
moderateText,
|
||||
validateModel,
|
||||
handleAbortError,
|
||||
validateEndpoint,
|
||||
buildEndpointOption,
|
||||
createAbortController,
|
||||
} = require('~/server/middleware');
|
||||
const { sendMessage, createOnProgress, formatSteps, formatAction } = require('~/server/utils');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/gptPlugins');
|
||||
const { saveMessage, updateMessage } = require('~/models');
|
||||
const { validateTools } = require('~/app');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(moderateText);
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
validateEndpoint,
|
||||
validateModel,
|
||||
buildEndpointOption,
|
||||
setHeaders,
|
||||
async (req, res) => {
|
||||
let {
|
||||
text,
|
||||
generation,
|
||||
endpointOption,
|
||||
conversationId,
|
||||
responseMessageId,
|
||||
isContinued = false,
|
||||
parentMessageId = null,
|
||||
overrideParentMessageId = null,
|
||||
} = req.body;
|
||||
|
||||
logger.debug('[/edit/gptPlugins]', {
|
||||
text,
|
||||
generation,
|
||||
isContinued,
|
||||
conversationId,
|
||||
...endpointOption,
|
||||
});
|
||||
|
||||
let userMessage;
|
||||
let userMessagePromise;
|
||||
let promptTokens;
|
||||
const sender = getResponseSender({
|
||||
...endpointOption,
|
||||
model: endpointOption.modelOptions.model,
|
||||
});
|
||||
const userMessageId = parentMessageId;
|
||||
const user = req.user.id;
|
||||
|
||||
const plugin = {
|
||||
loading: true,
|
||||
inputs: [],
|
||||
latest: null,
|
||||
outputs: null,
|
||||
};
|
||||
|
||||
const getReqData = (data = {}) => {
|
||||
for (let key in data) {
|
||||
if (key === 'userMessage') {
|
||||
userMessage = data[key];
|
||||
} else if (key === 'userMessagePromise') {
|
||||
userMessagePromise = data[key];
|
||||
} else if (key === 'responseMessageId') {
|
||||
responseMessageId = data[key];
|
||||
} else if (key === 'promptTokens') {
|
||||
promptTokens = data[key];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const {
|
||||
onProgress: progressCallback,
|
||||
sendIntermediateMessage,
|
||||
getPartialText,
|
||||
} = createOnProgress({
|
||||
generation,
|
||||
onProgress: () => {
|
||||
if (plugin.loading === true) {
|
||||
plugin.loading = false;
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const onChainEnd = (data) => {
|
||||
let { intermediateSteps: steps } = data;
|
||||
plugin.outputs = steps && steps[0].action ? formatSteps(steps) : 'An error occurred.';
|
||||
plugin.loading = false;
|
||||
saveMessage(
|
||||
req,
|
||||
{ ...userMessage, user },
|
||||
{ context: 'api/server/routes/ask/gptPlugins.js - onChainEnd' },
|
||||
);
|
||||
sendIntermediateMessage(res, {
|
||||
plugin,
|
||||
parentMessageId: userMessage.messageId,
|
||||
messageId: responseMessageId,
|
||||
});
|
||||
// logger.debug('CHAIN END', plugin.outputs);
|
||||
};
|
||||
|
||||
const getAbortData = () => ({
|
||||
sender,
|
||||
conversationId,
|
||||
userMessagePromise,
|
||||
messageId: responseMessageId,
|
||||
parentMessageId: overrideParentMessageId ?? userMessageId,
|
||||
text: getPartialText(),
|
||||
plugin: { ...plugin, loading: false },
|
||||
userMessage,
|
||||
promptTokens,
|
||||
});
|
||||
const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData);
|
||||
|
||||
try {
|
||||
endpointOption.tools = await validateTools(user, endpointOption.tools);
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
const onAgentAction = (action, start = false) => {
|
||||
const formattedAction = formatAction(action);
|
||||
plugin.inputs.push(formattedAction);
|
||||
plugin.latest = formattedAction.plugin;
|
||||
if (!start && !client.skipSaveUserMessage) {
|
||||
saveMessage(
|
||||
req,
|
||||
{ ...userMessage, user },
|
||||
{ context: 'api/server/routes/ask/gptPlugins.js - onAgentAction' },
|
||||
);
|
||||
}
|
||||
sendIntermediateMessage(res, {
|
||||
plugin,
|
||||
parentMessageId: userMessage.messageId,
|
||||
messageId: responseMessageId,
|
||||
});
|
||||
// logger.debug('PLUGIN ACTION', formattedAction);
|
||||
};
|
||||
|
||||
let response = await client.sendMessage(text, {
|
||||
user,
|
||||
generation,
|
||||
isContinued,
|
||||
isEdited: true,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
responseMessageId,
|
||||
overrideParentMessageId,
|
||||
getReqData,
|
||||
onAgentAction,
|
||||
onChainEnd,
|
||||
onStart,
|
||||
...endpointOption,
|
||||
progressCallback,
|
||||
progressOptions: {
|
||||
res,
|
||||
plugin,
|
||||
// parentMessageId: overrideParentMessageId || userMessageId,
|
||||
},
|
||||
abortController,
|
||||
});
|
||||
|
||||
if (overrideParentMessageId) {
|
||||
response.parentMessageId = overrideParentMessageId;
|
||||
}
|
||||
|
||||
logger.debug('[/edit/gptPlugins] CLIENT RESPONSE', response);
|
||||
|
||||
const { conversation = {} } = await response.databasePromise;
|
||||
delete response.databasePromise;
|
||||
conversation.title =
|
||||
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
|
||||
|
||||
sendMessage(res, {
|
||||
title: conversation.title,
|
||||
final: true,
|
||||
conversation,
|
||||
requestMessage: userMessage,
|
||||
responseMessage: response,
|
||||
});
|
||||
res.end();
|
||||
|
||||
response.plugin = { ...plugin, loading: false };
|
||||
await updateMessage(
|
||||
req,
|
||||
{ ...response, user },
|
||||
{ context: 'api/server/routes/edit/gptPlugins.js' },
|
||||
);
|
||||
} catch (error) {
|
||||
const partialText = getPartialText();
|
||||
handleAbortError(res, req, error, {
|
||||
partialText,
|
||||
conversationId,
|
||||
sender,
|
||||
messageId: responseMessageId,
|
||||
parentMessageId: userMessageId ?? parentMessageId,
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
@@ -3,7 +3,6 @@ const openAI = require('./openAI');
|
||||
const custom = require('./custom');
|
||||
const google = require('./google');
|
||||
const anthropic = require('./anthropic');
|
||||
const gptPlugins = require('./gptPlugins');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const {
|
||||
@@ -39,7 +38,6 @@ if (isEnabled(LIMIT_MESSAGE_USER)) {
|
||||
router.use(validateConvoAccess);
|
||||
|
||||
router.use([`/${EModelEndpoint.azureOpenAI}`, `/${EModelEndpoint.openAI}`], openAI);
|
||||
router.use(`/${EModelEndpoint.gptPlugins}`, gptPlugins);
|
||||
router.use(`/${EModelEndpoint.anthropic}`, anthropic);
|
||||
router.use(`/${EModelEndpoint.google}`, google);
|
||||
router.use(`/${EModelEndpoint.custom}`, custom);
|
||||
|
||||
@@ -283,7 +283,10 @@ router.post('/', async (req, res) => {
|
||||
message += ': ' + error.message;
|
||||
}
|
||||
|
||||
if (error.message?.includes('Invalid file format')) {
|
||||
if (
|
||||
error.message?.includes('Invalid file format') ||
|
||||
error.message?.includes('No OCR result')
|
||||
) {
|
||||
message = error.message;
|
||||
}
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ const presets = require('./presets');
|
||||
const prompts = require('./prompts');
|
||||
const balance = require('./balance');
|
||||
const plugins = require('./plugins');
|
||||
const bedrock = require('./bedrock');
|
||||
const actions = require('./actions');
|
||||
const banner = require('./banner');
|
||||
const search = require('./search');
|
||||
@@ -26,11 +25,9 @@ const auth = require('./auth');
|
||||
const edit = require('./edit');
|
||||
const keys = require('./keys');
|
||||
const user = require('./user');
|
||||
const ask = require('./ask');
|
||||
const mcp = require('./mcp');
|
||||
|
||||
module.exports = {
|
||||
ask,
|
||||
edit,
|
||||
auth,
|
||||
keys,
|
||||
@@ -46,7 +43,6 @@ module.exports = {
|
||||
search,
|
||||
config,
|
||||
models,
|
||||
bedrock,
|
||||
prompts,
|
||||
plugins,
|
||||
actions,
|
||||
|
||||
@@ -40,6 +40,7 @@ async function getBalanceConfig() {
|
||||
/**
|
||||
*
|
||||
* @param {string | EModelEndpoint} endpoint
|
||||
* @returns {Promise<TEndpoint | undefined>}
|
||||
*/
|
||||
const getCustomEndpointConfig = async (endpoint) => {
|
||||
const customConfig = await getCustomConfig();
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
const { Providers } = require('@librechat/agents');
|
||||
const { primeResources, optionalChainWithEmptyCheck } = require('@librechat/api');
|
||||
const {
|
||||
primeResources,
|
||||
extractLibreChatParams,
|
||||
optionalChainWithEmptyCheck,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
ErrorTypes,
|
||||
EModelEndpoint,
|
||||
@@ -7,30 +11,12 @@ const {
|
||||
replaceSpecialVars,
|
||||
providerEndpointMap,
|
||||
} = require('librechat-data-provider');
|
||||
const initAnthropic = require('~/server/services/Endpoints/anthropic/initialize');
|
||||
const getBedrockOptions = require('~/server/services/Endpoints/bedrock/options');
|
||||
const initOpenAI = require('~/server/services/Endpoints/openAI/initialize');
|
||||
const initCustom = require('~/server/services/Endpoints/custom/initialize');
|
||||
const initGoogle = require('~/server/services/Endpoints/google/initialize');
|
||||
const { getProviderConfig } = require('~/server/services/Endpoints');
|
||||
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
|
||||
const { getCustomEndpointConfig } = require('~/server/services/Config');
|
||||
const { processFiles } = require('~/server/services/Files/process');
|
||||
const { getFiles, getToolFilesByIds } = require('~/models/File');
|
||||
const { getConvoFiles } = require('~/models/Conversation');
|
||||
const { getToolFilesByIds } = require('~/models/File');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
const { getFiles } = require('~/models/File');
|
||||
|
||||
const providerConfigMap = {
|
||||
[Providers.XAI]: initCustom,
|
||||
[Providers.OLLAMA]: initCustom,
|
||||
[Providers.DEEPSEEK]: initCustom,
|
||||
[Providers.OPENROUTER]: initCustom,
|
||||
[EModelEndpoint.openAI]: initOpenAI,
|
||||
[EModelEndpoint.google]: initGoogle,
|
||||
[EModelEndpoint.azureOpenAI]: initOpenAI,
|
||||
[EModelEndpoint.anthropic]: initAnthropic,
|
||||
[EModelEndpoint.bedrock]: getBedrockOptions,
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {object} params
|
||||
@@ -71,7 +57,7 @@ const initializeAgent = async ({
|
||||
),
|
||||
);
|
||||
|
||||
const { resendFiles = true, ...modelOptions } = _modelOptions;
|
||||
const { resendFiles, maxContextTokens, modelOptions } = extractLibreChatParams(_modelOptions);
|
||||
|
||||
if (isInitialAgent && conversationId != null && resendFiles) {
|
||||
const fileIds = (await getConvoFiles(conversationId)) ?? [];
|
||||
@@ -111,17 +97,9 @@ const initializeAgent = async ({
|
||||
})) ?? {};
|
||||
|
||||
agent.endpoint = provider;
|
||||
let getOptions = providerConfigMap[provider];
|
||||
if (!getOptions && providerConfigMap[provider.toLowerCase()] != null) {
|
||||
agent.provider = provider.toLowerCase();
|
||||
getOptions = providerConfigMap[agent.provider];
|
||||
} else if (!getOptions) {
|
||||
const customEndpointConfig = await getCustomEndpointConfig(provider);
|
||||
if (!customEndpointConfig) {
|
||||
throw new Error(`Provider ${provider} not supported`);
|
||||
}
|
||||
getOptions = initCustom;
|
||||
agent.provider = Providers.OPENAI;
|
||||
const { getOptions, overrideProvider } = await getProviderConfig(provider);
|
||||
if (overrideProvider) {
|
||||
agent.provider = overrideProvider;
|
||||
}
|
||||
|
||||
const _endpointOption =
|
||||
@@ -145,9 +123,8 @@ const initializeAgent = async ({
|
||||
modelOptions.maxTokens,
|
||||
0,
|
||||
);
|
||||
const maxContextTokens = optionalChainWithEmptyCheck(
|
||||
modelOptions.maxContextTokens,
|
||||
modelOptions.max_context_tokens,
|
||||
const agentMaxContextTokens = optionalChainWithEmptyCheck(
|
||||
maxContextTokens,
|
||||
getModelMaxTokens(tokensModel, providerEndpointMap[provider]),
|
||||
4096,
|
||||
);
|
||||
@@ -189,7 +166,7 @@ const initializeAgent = async ({
|
||||
attachments,
|
||||
resendFiles,
|
||||
toolContextMap,
|
||||
maxContextTokens: (maxContextTokens - maxTokens) * 0.9,
|
||||
maxContextTokens: (agentMaxContextTokens - maxTokens) * 0.9,
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
const { isAgentsEndpoint, Constants } = require('librechat-data-provider');
|
||||
const { isAgentsEndpoint, removeNullishValues, Constants } = require('librechat-data-provider');
|
||||
const { loadAgent } = require('~/models/Agent');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const buildOptions = (req, endpoint, parsedBody, endpointType) => {
|
||||
const { spec, iconURL, agent_id, instructions, maxContextTokens, ...model_parameters } =
|
||||
parsedBody;
|
||||
const { spec, iconURL, agent_id, instructions, ...model_parameters } = parsedBody;
|
||||
const agentPromise = loadAgent({
|
||||
req,
|
||||
agent_id: isAgentsEndpoint(endpoint) ? agent_id : Constants.EPHEMERAL_AGENT_ID,
|
||||
@@ -15,19 +14,16 @@ const buildOptions = (req, endpoint, parsedBody, endpointType) => {
|
||||
return undefined;
|
||||
});
|
||||
|
||||
const endpointOption = {
|
||||
return removeNullishValues({
|
||||
spec,
|
||||
iconURL,
|
||||
endpoint,
|
||||
agent_id,
|
||||
endpointType,
|
||||
instructions,
|
||||
maxContextTokens,
|
||||
model_parameters,
|
||||
agent: agentPromise,
|
||||
};
|
||||
|
||||
return endpointOption;
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = { buildOptions };
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { createContentAggregator } = require('@librechat/agents');
|
||||
const { Constants, EModelEndpoint, getResponseSender } = require('librechat-data-provider');
|
||||
const {
|
||||
getDefaultHandlers,
|
||||
Constants,
|
||||
EModelEndpoint,
|
||||
isAgentsEndpoint,
|
||||
getResponseSender,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
createToolEndCallback,
|
||||
getDefaultHandlers,
|
||||
} = require('~/server/controllers/agents/callbacks');
|
||||
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
|
||||
const { getCustomEndpointConfig } = require('~/server/services/Config');
|
||||
const { loadAgentTools } = require('~/server/services/ToolService');
|
||||
const AgentClient = require('~/server/controllers/agents/client');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
@@ -61,6 +67,7 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
}
|
||||
|
||||
const primaryAgent = await endpointOption.agent;
|
||||
delete endpointOption.agent;
|
||||
if (!primaryAgent) {
|
||||
throw new Error('Agent not found');
|
||||
}
|
||||
@@ -108,11 +115,25 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
}
|
||||
}
|
||||
|
||||
let endpointConfig = req.app.locals[primaryConfig.endpoint];
|
||||
if (!isAgentsEndpoint(primaryConfig.endpoint) && !endpointConfig) {
|
||||
try {
|
||||
endpointConfig = await getCustomEndpointConfig(primaryConfig.endpoint);
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #titleConvo] Error getting custom endpoint config',
|
||||
err,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const sender =
|
||||
primaryAgent.name ??
|
||||
getResponseSender({
|
||||
...endpointOption,
|
||||
model: endpointOption.model_parameters.model,
|
||||
modelDisplayLabel: endpointConfig?.modelDisplayLabel,
|
||||
modelLabel: endpointOption.model_parameters.modelLabel,
|
||||
});
|
||||
|
||||
const client = new AgentClient({
|
||||
|
||||
@@ -23,7 +23,7 @@ const addTitle = async (req, { text, response, client }) => {
|
||||
let timeoutId;
|
||||
try {
|
||||
const timeoutPromise = new Promise((_, reject) => {
|
||||
timeoutId = setTimeout(() => reject(new Error('Title generation timeout')), 25000);
|
||||
timeoutId = setTimeout(() => reject(new Error('Title generation timeout')), 45000);
|
||||
}).catch((error) => {
|
||||
logger.error('Title error:', error);
|
||||
});
|
||||
|
||||
@@ -41,7 +41,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
|
||||
{
|
||||
reverseProxyUrl: ANTHROPIC_REVERSE_PROXY ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
modelOptions: endpointOption.model_parameters,
|
||||
modelOptions: endpointOption?.model_parameters ?? {},
|
||||
},
|
||||
clientOptions,
|
||||
);
|
||||
|
||||
@@ -75,6 +75,7 @@ function getLLMConfig(apiKey, options = {}) {
|
||||
|
||||
if (options.reverseProxyUrl) {
|
||||
requestOptions.clientOptions.baseURL = options.reverseProxyUrl;
|
||||
requestOptions.anthropicApiUrl = options.reverseProxyUrl;
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,11 +1,45 @@
|
||||
const { anthropicSettings } = require('librechat-data-provider');
|
||||
const { anthropicSettings, removeNullishValues } = require('librechat-data-provider');
|
||||
const { getLLMConfig } = require('~/server/services/Endpoints/anthropic/llm');
|
||||
const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers');
|
||||
|
||||
jest.mock('https-proxy-agent', () => ({
|
||||
HttpsProxyAgent: jest.fn().mockImplementation((proxy) => ({ proxy })),
|
||||
}));
|
||||
|
||||
jest.mock('./helpers', () => ({
|
||||
checkPromptCacheSupport: jest.fn(),
|
||||
getClaudeHeaders: jest.fn(),
|
||||
configureReasoning: jest.fn((requestOptions) => requestOptions),
|
||||
}));
|
||||
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
anthropicSettings: {
|
||||
model: { default: 'claude-3-opus-20240229' },
|
||||
maxOutputTokens: { default: 4096, reset: jest.fn(() => 4096) },
|
||||
thinking: { default: false },
|
||||
promptCache: { default: false },
|
||||
thinkingBudget: { default: null },
|
||||
},
|
||||
removeNullishValues: jest.fn((obj) => {
|
||||
const result = {};
|
||||
for (const key in obj) {
|
||||
if (obj[key] !== null && obj[key] !== undefined) {
|
||||
result[key] = obj[key];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('getLLMConfig', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
checkPromptCacheSupport.mockReturnValue(false);
|
||||
getClaudeHeaders.mockReturnValue(undefined);
|
||||
configureReasoning.mockImplementation((requestOptions) => requestOptions);
|
||||
anthropicSettings.maxOutputTokens.reset.mockReturnValue(4096);
|
||||
});
|
||||
|
||||
it('should create a basic configuration with default values', () => {
|
||||
const result = getLLMConfig('test-api-key', { modelOptions: {} });
|
||||
|
||||
@@ -36,6 +70,7 @@ describe('getLLMConfig', () => {
|
||||
});
|
||||
|
||||
expect(result.llmConfig.clientOptions).toHaveProperty('baseURL', 'http://reverse-proxy');
|
||||
expect(result.llmConfig).toHaveProperty('anthropicApiUrl', 'http://reverse-proxy');
|
||||
});
|
||||
|
||||
it('should include topK and topP for non-Claude-3.7 models', () => {
|
||||
@@ -65,6 +100,11 @@ describe('getLLMConfig', () => {
|
||||
});
|
||||
|
||||
it('should NOT include topK and topP for Claude-3-7 models (hyphen notation)', () => {
|
||||
configureReasoning.mockImplementation((requestOptions) => {
|
||||
requestOptions.thinking = { type: 'enabled' };
|
||||
return requestOptions;
|
||||
});
|
||||
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
@@ -78,6 +118,11 @@ describe('getLLMConfig', () => {
|
||||
});
|
||||
|
||||
it('should NOT include topK and topP for Claude-3.7 models (decimal notation)', () => {
|
||||
configureReasoning.mockImplementation((requestOptions) => {
|
||||
requestOptions.thinking = { type: 'enabled' };
|
||||
return requestOptions;
|
||||
});
|
||||
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3.7-sonnet',
|
||||
@@ -154,4 +199,160 @@ describe('getLLMConfig', () => {
|
||||
expect(result3.llmConfig).toHaveProperty('topK', 10);
|
||||
expect(result3.llmConfig).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle missing apiKey', () => {
|
||||
const result = getLLMConfig(undefined, { modelOptions: {} });
|
||||
expect(result.llmConfig).not.toHaveProperty('apiKey');
|
||||
});
|
||||
|
||||
it('should handle empty modelOptions', () => {
|
||||
expect(() => {
|
||||
getLLMConfig('test-api-key', {});
|
||||
}).toThrow("Cannot read properties of undefined (reading 'thinking')");
|
||||
});
|
||||
|
||||
it('should handle no options parameter', () => {
|
||||
expect(() => {
|
||||
getLLMConfig('test-api-key');
|
||||
}).toThrow("Cannot read properties of undefined (reading 'thinking')");
|
||||
});
|
||||
|
||||
it('should handle temperature, stop sequences, and stream settings', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
temperature: 0.7,
|
||||
stop: ['\n\n', 'END'],
|
||||
stream: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).toHaveProperty('temperature', 0.7);
|
||||
expect(result.llmConfig).toHaveProperty('stopSequences', ['\n\n', 'END']);
|
||||
expect(result.llmConfig).toHaveProperty('stream', false);
|
||||
});
|
||||
|
||||
it('should handle maxOutputTokens when explicitly set to falsy value', () => {
|
||||
anthropicSettings.maxOutputTokens.reset.mockReturnValue(8192);
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-opus',
|
||||
maxOutputTokens: null,
|
||||
},
|
||||
});
|
||||
|
||||
expect(anthropicSettings.maxOutputTokens.reset).toHaveBeenCalledWith('claude-3-opus');
|
||||
expect(result.llmConfig).toHaveProperty('maxTokens', 8192);
|
||||
});
|
||||
|
||||
it('should handle both proxy and reverseProxyUrl', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {},
|
||||
proxy: 'http://proxy:8080',
|
||||
reverseProxyUrl: 'https://reverse-proxy.com',
|
||||
});
|
||||
|
||||
expect(result.llmConfig.clientOptions).toHaveProperty('fetchOptions');
|
||||
expect(result.llmConfig.clientOptions.fetchOptions).toHaveProperty('dispatcher');
|
||||
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher).toBeDefined();
|
||||
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher.constructor.name).toBe(
|
||||
'ProxyAgent',
|
||||
);
|
||||
expect(result.llmConfig.clientOptions).toHaveProperty('baseURL', 'https://reverse-proxy.com');
|
||||
expect(result.llmConfig).toHaveProperty('anthropicApiUrl', 'https://reverse-proxy.com');
|
||||
});
|
||||
|
||||
it('should handle prompt cache with supported model', () => {
|
||||
checkPromptCacheSupport.mockReturnValue(true);
|
||||
getClaudeHeaders.mockReturnValue({ 'anthropic-beta': 'prompt-caching-2024-07-31' });
|
||||
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-5-sonnet',
|
||||
promptCache: true,
|
||||
},
|
||||
});
|
||||
|
||||
expect(checkPromptCacheSupport).toHaveBeenCalledWith('claude-3-5-sonnet');
|
||||
expect(getClaudeHeaders).toHaveBeenCalledWith('claude-3-5-sonnet', true);
|
||||
expect(result.llmConfig.clientOptions.defaultHeaders).toEqual({
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle thinking and thinkingBudget options', () => {
|
||||
configureReasoning.mockImplementation((requestOptions, systemOptions) => {
|
||||
if (systemOptions.thinking) {
|
||||
requestOptions.thinking = { type: 'enabled' };
|
||||
}
|
||||
if (systemOptions.thinkingBudget) {
|
||||
requestOptions.thinking = {
|
||||
...requestOptions.thinking,
|
||||
budget_tokens: systemOptions.thinkingBudget,
|
||||
};
|
||||
}
|
||||
return requestOptions;
|
||||
});
|
||||
|
||||
getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
thinking: true,
|
||||
thinkingBudget: 5000,
|
||||
},
|
||||
});
|
||||
|
||||
expect(configureReasoning).toHaveBeenCalledWith(
|
||||
expect.any(Object),
|
||||
expect.objectContaining({
|
||||
thinking: true,
|
||||
promptCache: false,
|
||||
thinkingBudget: 5000,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove system options from modelOptions', () => {
|
||||
const modelOptions = {
|
||||
model: 'claude-3-opus',
|
||||
thinking: true,
|
||||
promptCache: true,
|
||||
thinkingBudget: 1000,
|
||||
temperature: 0.5,
|
||||
};
|
||||
|
||||
getLLMConfig('test-api-key', { modelOptions });
|
||||
|
||||
expect(modelOptions).not.toHaveProperty('thinking');
|
||||
expect(modelOptions).not.toHaveProperty('promptCache');
|
||||
expect(modelOptions).not.toHaveProperty('thinkingBudget');
|
||||
expect(modelOptions).toHaveProperty('temperature', 0.5);
|
||||
});
|
||||
|
||||
it('should handle all nullish values removal', () => {
|
||||
removeNullishValues.mockImplementation((obj) => {
|
||||
const cleaned = {};
|
||||
Object.entries(obj).forEach(([key, value]) => {
|
||||
if (value !== null && value !== undefined) {
|
||||
cleaned[key] = value;
|
||||
}
|
||||
});
|
||||
return cleaned;
|
||||
});
|
||||
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
temperature: null,
|
||||
topP: undefined,
|
||||
topK: 0,
|
||||
stop: [],
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).not.toHaveProperty('temperature');
|
||||
expect(result.llmConfig).not.toHaveProperty('topP');
|
||||
expect(result.llmConfig).toHaveProperty('topK', 0);
|
||||
expect(result.llmConfig).toHaveProperty('stopSequences', []);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { constructAzureURL, isUserProvided } = require('@librechat/api');
|
||||
const {
|
||||
ErrorTypes,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { constructAzureURL, isUserProvided, resolveHeaders } = require('@librechat/api');
|
||||
const { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } = require('librechat-data-provider');
|
||||
const {
|
||||
getUserKeyValues,
|
||||
getUserKeyExpiry,
|
||||
@@ -114,11 +109,14 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie
|
||||
|
||||
apiKey = azureOptions.azureOpenAIApiKey;
|
||||
opts.defaultQuery = { 'api-version': azureOptions.azureOpenAIApiVersion };
|
||||
opts.defaultHeaders = resolveHeaders({
|
||||
...headers,
|
||||
'api-key': apiKey,
|
||||
'OpenAI-Beta': `assistants=${version}`,
|
||||
});
|
||||
opts.defaultHeaders = resolveHeaders(
|
||||
{
|
||||
...headers,
|
||||
'api-key': apiKey,
|
||||
'OpenAI-Beta': `assistants=${version}`,
|
||||
},
|
||||
req.user,
|
||||
);
|
||||
opts.model = azureOptions.azureOpenAIApiDeploymentName;
|
||||
|
||||
if (initAppClient) {
|
||||
|
||||
@@ -64,7 +64,7 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => {
|
||||
|
||||
/** @type {BedrockClientOptions} */
|
||||
const requestOptions = {
|
||||
model: overrideModel ?? endpointOption.model,
|
||||
model: overrideModel ?? endpointOption?.model,
|
||||
region: BEDROCK_AWS_DEFAULT_REGION,
|
||||
};
|
||||
|
||||
@@ -76,7 +76,7 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => {
|
||||
|
||||
const llmConfig = bedrockOutputParser(
|
||||
bedrockInputParser.parse(
|
||||
removeNullishValues(Object.assign(requestOptions, endpointOption.model_parameters)),
|
||||
removeNullishValues(Object.assign(requestOptions, endpointOption?.model_parameters ?? {})),
|
||||
),
|
||||
);
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ const {
|
||||
extractEnvVariable,
|
||||
} = require('librechat-data-provider');
|
||||
const { Providers } = require('@librechat/agents');
|
||||
const { getOpenAIConfig, createHandleLLMNewToken } = require('@librechat/api');
|
||||
const { getOpenAIConfig, createHandleLLMNewToken, resolveHeaders } = require('@librechat/api');
|
||||
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { getCustomEndpointConfig } = require('~/server/services/Config');
|
||||
const { fetchModels } = require('~/server/services/ModelService');
|
||||
@@ -28,12 +28,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
||||
const CUSTOM_API_KEY = extractEnvVariable(endpointConfig.apiKey);
|
||||
const CUSTOM_BASE_URL = extractEnvVariable(endpointConfig.baseURL);
|
||||
|
||||
let resolvedHeaders = {};
|
||||
if (endpointConfig.headers && typeof endpointConfig.headers === 'object') {
|
||||
Object.keys(endpointConfig.headers).forEach((key) => {
|
||||
resolvedHeaders[key] = extractEnvVariable(endpointConfig.headers[key]);
|
||||
});
|
||||
}
|
||||
let resolvedHeaders = resolveHeaders(endpointConfig.headers, req.user);
|
||||
|
||||
if (CUSTOM_API_KEY.match(envVarRegex)) {
|
||||
throw new Error(`Missing API Key for ${endpoint}.`);
|
||||
@@ -134,7 +129,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
||||
};
|
||||
|
||||
if (optionsOnly) {
|
||||
const modelOptions = endpointOption.model_parameters;
|
||||
const modelOptions = endpointOption?.model_parameters ?? {};
|
||||
if (endpoint !== Providers.OLLAMA) {
|
||||
clientOptions = Object.assign(
|
||||
{
|
||||
|
||||
@@ -18,7 +18,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
|
||||
let serviceKey = {};
|
||||
try {
|
||||
serviceKey = require('~/data/auth.json');
|
||||
} catch (e) {
|
||||
} catch (_e) {
|
||||
// Do nothing
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
|
||||
if (optionsOnly) {
|
||||
clientOptions = Object.assign(
|
||||
{
|
||||
modelOptions: endpointOption.model_parameters,
|
||||
modelOptions: endpointOption?.model_parameters ?? {},
|
||||
},
|
||||
clientOptions,
|
||||
);
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
const { removeNullishValues } = require('librechat-data-provider');
|
||||
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
|
||||
|
||||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const {
|
||||
modelLabel,
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
agentOptions,
|
||||
tools = [],
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
maxContextTokens,
|
||||
artifacts,
|
||||
...modelOptions
|
||||
} = parsedBody;
|
||||
const endpointOption = removeNullishValues({
|
||||
endpoint,
|
||||
tools: tools
|
||||
.map((tool) => tool?.pluginKey ?? tool)
|
||||
.filter((toolName) => typeof toolName === 'string'),
|
||||
modelLabel,
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
agentOptions,
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
maxContextTokens,
|
||||
modelOptions,
|
||||
});
|
||||
|
||||
if (typeof artifacts === 'string') {
|
||||
endpointOption.artifactsPrompt = generateArtifactsPrompt({ endpoint, artifacts });
|
||||
}
|
||||
|
||||
return endpointOption;
|
||||
};
|
||||
|
||||
module.exports = buildOptions;
|
||||
@@ -1,7 +0,0 @@
|
||||
const buildOptions = require('./build');
|
||||
const initializeClient = require('./initialize');
|
||||
|
||||
module.exports = {
|
||||
buildOptions,
|
||||
initializeClient,
|
||||
};
|
||||
@@ -1,134 +0,0 @@
|
||||
const {
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { isEnabled, isUserProvided, getAzureCredentials } = require('@librechat/api');
|
||||
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { PluginsClient } = require('~/app');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const {
|
||||
PROXY,
|
||||
OPENAI_API_KEY,
|
||||
AZURE_API_KEY,
|
||||
PLUGINS_USE_AZURE,
|
||||
OPENAI_REVERSE_PROXY,
|
||||
AZURE_OPENAI_BASEURL,
|
||||
OPENAI_SUMMARIZE,
|
||||
DEBUG_PLUGINS,
|
||||
} = process.env;
|
||||
|
||||
const { key: expiresAt, model: modelName } = req.body;
|
||||
const contextStrategy = isEnabled(OPENAI_SUMMARIZE) ? 'summarize' : null;
|
||||
|
||||
let useAzure = isEnabled(PLUGINS_USE_AZURE);
|
||||
let endpoint = useAzure ? EModelEndpoint.azureOpenAI : EModelEndpoint.openAI;
|
||||
|
||||
/** @type {false | TAzureConfig} */
|
||||
const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI];
|
||||
useAzure = useAzure || azureConfig?.plugins;
|
||||
|
||||
if (useAzure && endpoint !== EModelEndpoint.azureOpenAI) {
|
||||
endpoint = EModelEndpoint.azureOpenAI;
|
||||
}
|
||||
|
||||
const credentials = {
|
||||
[EModelEndpoint.openAI]: OPENAI_API_KEY,
|
||||
[EModelEndpoint.azureOpenAI]: AZURE_API_KEY,
|
||||
};
|
||||
|
||||
const baseURLOptions = {
|
||||
[EModelEndpoint.openAI]: OPENAI_REVERSE_PROXY,
|
||||
[EModelEndpoint.azureOpenAI]: AZURE_OPENAI_BASEURL,
|
||||
};
|
||||
|
||||
const userProvidesKey = isUserProvided(credentials[endpoint]);
|
||||
const userProvidesURL = isUserProvided(baseURLOptions[endpoint]);
|
||||
|
||||
let userValues = null;
|
||||
if (expiresAt && (userProvidesKey || userProvidesURL)) {
|
||||
checkUserKeyExpiry(expiresAt, endpoint);
|
||||
userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint });
|
||||
}
|
||||
|
||||
let apiKey = userProvidesKey ? userValues?.apiKey : credentials[endpoint];
|
||||
let baseURL = userProvidesURL ? userValues?.baseURL : baseURLOptions[endpoint];
|
||||
|
||||
const clientOptions = {
|
||||
contextStrategy,
|
||||
debug: isEnabled(DEBUG_PLUGINS),
|
||||
reverseProxyUrl: baseURL ? baseURL : null,
|
||||
proxy: PROXY ?? null,
|
||||
req,
|
||||
res,
|
||||
...endpointOption,
|
||||
};
|
||||
|
||||
if (useAzure && azureConfig) {
|
||||
const { modelGroupMap, groupMap } = azureConfig;
|
||||
const {
|
||||
azureOptions,
|
||||
baseURL,
|
||||
headers = {},
|
||||
serverless,
|
||||
} = mapModelToAzureConfig({
|
||||
modelName,
|
||||
modelGroupMap,
|
||||
groupMap,
|
||||
});
|
||||
|
||||
clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl;
|
||||
clientOptions.headers = resolveHeaders({ ...headers, ...(clientOptions.headers ?? {}) });
|
||||
|
||||
clientOptions.titleConvo = azureConfig.titleConvo;
|
||||
clientOptions.titleModel = azureConfig.titleModel;
|
||||
clientOptions.titleMethod = azureConfig.titleMethod ?? 'completion';
|
||||
|
||||
const azureRate = modelName.includes('gpt-4') ? 30 : 17;
|
||||
clientOptions.streamRate = azureConfig.streamRate ?? azureRate;
|
||||
|
||||
const groupName = modelGroupMap[modelName].group;
|
||||
clientOptions.addParams = azureConfig.groupMap[groupName].addParams;
|
||||
clientOptions.dropParams = azureConfig.groupMap[groupName].dropParams;
|
||||
clientOptions.forcePrompt = azureConfig.groupMap[groupName].forcePrompt;
|
||||
|
||||
apiKey = azureOptions.azureOpenAIApiKey;
|
||||
clientOptions.azure = !serverless && azureOptions;
|
||||
if (serverless === true) {
|
||||
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
|
||||
? { 'api-version': azureOptions.azureOpenAIApiVersion }
|
||||
: undefined;
|
||||
clientOptions.headers['api-key'] = apiKey;
|
||||
}
|
||||
} else if (useAzure || (apiKey && apiKey.includes('{"azure') && !clientOptions.azure)) {
|
||||
clientOptions.azure = userProvidesKey ? JSON.parse(userValues.apiKey) : getAzureCredentials();
|
||||
apiKey = clientOptions.azure.azureOpenAIApiKey;
|
||||
}
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const pluginsConfig = req.app.locals[EModelEndpoint.gptPlugins];
|
||||
|
||||
if (!useAzure && pluginsConfig) {
|
||||
clientOptions.streamRate = pluginsConfig.streamRate;
|
||||
}
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
const allConfig = req.app.locals.all;
|
||||
if (allConfig) {
|
||||
clientOptions.streamRate = allConfig.streamRate;
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error(`${endpoint} API key not provided. Please provide it again.`);
|
||||
}
|
||||
|
||||
const client = new PluginsClient(apiKey, clientOptions);
|
||||
return {
|
||||
client,
|
||||
azure: clientOptions.azure,
|
||||
openAIApiKey: apiKey,
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = initializeClient;
|
||||
@@ -1,410 +0,0 @@
|
||||
// gptPlugins/initializeClient.spec.js
|
||||
jest.mock('~/cache/getLogStores');
|
||||
const { EModelEndpoint, ErrorTypes, validateAzureGroups } = require('librechat-data-provider');
|
||||
const { getUserKey, getUserKeyValues } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initialize');
|
||||
const { PluginsClient } = require('~/app');
|
||||
|
||||
// Mock getUserKey since it's the only function we want to mock
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKey: jest.fn(),
|
||||
getUserKeyValues: jest.fn(),
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
}));
|
||||
|
||||
describe('gptPlugins/initializeClient', () => {
|
||||
// Set up environment variables
|
||||
const originalEnvironment = process.env;
|
||||
const app = {
|
||||
locals: {},
|
||||
};
|
||||
|
||||
const validAzureConfigs = [
|
||||
{
|
||||
group: 'librechat-westus',
|
||||
apiKey: 'WESTUS_API_KEY',
|
||||
instanceName: 'librechat-westus',
|
||||
version: '2023-12-01-preview',
|
||||
models: {
|
||||
'gpt-4-vision-preview': {
|
||||
deploymentName: 'gpt-4-vision-preview',
|
||||
version: '2024-02-15-preview',
|
||||
},
|
||||
'gpt-3.5-turbo': {
|
||||
deploymentName: 'gpt-35-turbo',
|
||||
},
|
||||
'gpt-3.5-turbo-1106': {
|
||||
deploymentName: 'gpt-35-turbo-1106',
|
||||
},
|
||||
'gpt-4': {
|
||||
deploymentName: 'gpt-4',
|
||||
},
|
||||
'gpt-4-1106-preview': {
|
||||
deploymentName: 'gpt-4-1106-preview',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
group: 'librechat-eastus',
|
||||
apiKey: 'EASTUS_API_KEY',
|
||||
instanceName: 'librechat-eastus',
|
||||
deploymentName: 'gpt-4-turbo',
|
||||
version: '2024-02-15-preview',
|
||||
models: {
|
||||
'gpt-4-turbo': true,
|
||||
},
|
||||
baseURL: 'https://eastus.example.com',
|
||||
additionalHeaders: {
|
||||
'x-api-key': 'x-api-key-value',
|
||||
},
|
||||
},
|
||||
{
|
||||
group: 'mistral-inference',
|
||||
apiKey: 'AZURE_MISTRAL_API_KEY',
|
||||
baseURL:
|
||||
'https://Mistral-large-vnpet-serverless.region.inference.ai.azure.com/v1/chat/completions',
|
||||
serverless: true,
|
||||
models: {
|
||||
'mistral-large': true,
|
||||
},
|
||||
},
|
||||
{
|
||||
group: 'llama-70b-chat',
|
||||
apiKey: 'AZURE_LLAMA2_70B_API_KEY',
|
||||
baseURL:
|
||||
'https://Llama-2-70b-chat-qmvyb-serverless.region.inference.ai.azure.com/v1/chat/completions',
|
||||
serverless: true,
|
||||
models: {
|
||||
'llama-70b-chat': true,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const { modelNames, modelGroupMap, groupMap } = validateAzureGroups(validAzureConfigs);
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules(); // Clears the cache
|
||||
process.env = { ...originalEnvironment }; // Make a copy
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env = originalEnvironment; // Restore original env vars
|
||||
});
|
||||
|
||||
test('should initialize PluginsClient with OpenAI API key and default options', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.PLUGINS_USE_AZURE = 'false';
|
||||
process.env.DEBUG_PLUGINS = 'false';
|
||||
process.env.OPENAI_SUMMARIZE = 'false';
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
const { client, openAIApiKey } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(openAIApiKey).toBe('test-openai-api-key');
|
||||
expect(client).toBeInstanceOf(PluginsClient);
|
||||
});
|
||||
|
||||
test('should initialize PluginsClient with Azure credentials when PLUGINS_USE_AZURE is true', async () => {
|
||||
process.env.AZURE_API_KEY = 'test-azure-api-key';
|
||||
(process.env.AZURE_OPENAI_API_INSTANCE_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_VERSION = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_COMPLETIONS_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.PLUGINS_USE_AZURE = 'true');
|
||||
process.env.DEBUG_PLUGINS = 'false';
|
||||
process.env.OPENAI_SUMMARIZE = 'false';
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'test-model' } };
|
||||
|
||||
const { client, azure } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(azure.azureOpenAIApiKey).toBe('test-azure-api-key');
|
||||
expect(client).toBeInstanceOf(PluginsClient);
|
||||
});
|
||||
|
||||
test('should use the debug option when DEBUG_PLUGINS is enabled', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.DEBUG_PLUGINS = 'true';
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.options.debug).toBe(true);
|
||||
});
|
||||
|
||||
test('should set contextStrategy to summarize when OPENAI_SUMMARIZE is enabled', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.OPENAI_SUMMARIZE = 'true';
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.options.contextStrategy).toBe('summarize');
|
||||
});
|
||||
|
||||
// ... additional tests for reverseProxyUrl, proxy, user-provided keys, etc.
|
||||
|
||||
test('should throw an error if no API keys are provided in the environment', async () => {
|
||||
// Clear the environment variables for API keys
|
||||
delete process.env.OPENAI_API_KEY;
|
||||
delete process.env.AZURE_API_KEY;
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
`${EModelEndpoint.openAI} API key not provided.`,
|
||||
);
|
||||
});
|
||||
|
||||
// Additional tests for gptPlugins/initializeClient.spec.js
|
||||
|
||||
// ... (previous test setup code)
|
||||
|
||||
test('should handle user-provided OpenAI keys and check expiry', async () => {
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
process.env.PLUGINS_USE_AZURE = 'false';
|
||||
|
||||
const futureDate = new Date(Date.now() + 10000).toISOString();
|
||||
const req = {
|
||||
body: { key: futureDate },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
getUserKeyValues.mockResolvedValue({ apiKey: 'test-user-provided-openai-api-key' });
|
||||
|
||||
const { openAIApiKey } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(openAIApiKey).toBe('test-user-provided-openai-api-key');
|
||||
});
|
||||
|
||||
test('should handle user-provided Azure keys and check expiry', async () => {
|
||||
process.env.AZURE_API_KEY = 'user_provided';
|
||||
process.env.PLUGINS_USE_AZURE = 'true';
|
||||
|
||||
const futureDate = new Date(Date.now() + 10000).toISOString();
|
||||
const req = {
|
||||
body: { key: futureDate },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'test-model' } };
|
||||
|
||||
getUserKeyValues.mockResolvedValue({
|
||||
apiKey: JSON.stringify({
|
||||
azureOpenAIApiKey: 'test-user-provided-azure-api-key',
|
||||
azureOpenAIApiDeploymentName: 'test-deployment',
|
||||
}),
|
||||
});
|
||||
|
||||
const { azure } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(azure.azureOpenAIApiKey).toBe('test-user-provided-azure-api-key');
|
||||
});
|
||||
|
||||
test('should throw an error if the user-provided key has expired', async () => {
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
process.env.PLUGINS_USE_AZURE = 'FALSE';
|
||||
const expiresAt = new Date(Date.now() - 10000).toISOString(); // Expired
|
||||
const req = {
|
||||
body: { key: expiresAt },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/expired_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw an error if the user-provided Azure key is invalid JSON', async () => {
|
||||
process.env.AZURE_API_KEY = 'user_provided';
|
||||
process.env.PLUGINS_USE_AZURE = 'true';
|
||||
|
||||
const req = {
|
||||
body: { key: new Date(Date.now() + 10000).toISOString() },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
// Simulate an invalid JSON string returned from getUserKey
|
||||
getUserKey.mockResolvedValue('invalid-json');
|
||||
getUserKeyValues.mockImplementation(() => {
|
||||
let userValues = getUserKey();
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
});
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/invalid_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
test('should correctly handle the presence of a reverse proxy', async () => {
|
||||
process.env.OPENAI_REVERSE_PROXY = 'http://reverse.proxy';
|
||||
process.env.PROXY = 'http://proxy';
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.options.reverseProxyUrl).toBe('http://reverse.proxy');
|
||||
expect(client.options.proxy).toBe('http://proxy');
|
||||
});
|
||||
|
||||
test('should throw an error when user-provided values are not valid JSON', async () => {
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
const req = {
|
||||
body: { key: new Date(Date.now() + 10000).toISOString(), endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
// Mock getUserKey to return a non-JSON string
|
||||
getUserKey.mockResolvedValue('not-a-json');
|
||||
getUserKeyValues.mockImplementation(() => {
|
||||
let userValues = getUserKey();
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
});
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/invalid_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
test('should initialize client correctly for Azure OpenAI with valid configuration', async () => {
|
||||
const req = {
|
||||
body: {
|
||||
key: null,
|
||||
endpoint: EModelEndpoint.gptPlugins,
|
||||
model: modelNames[0],
|
||||
},
|
||||
user: { id: '123' },
|
||||
app: {
|
||||
locals: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
plugins: true,
|
||||
modelNames,
|
||||
modelGroupMap,
|
||||
groupMap,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
expect(client.client.options.azure).toBeDefined();
|
||||
});
|
||||
|
||||
test('should initialize client with default options when certain env vars are not set', async () => {
|
||||
delete process.env.OPENAI_SUMMARIZE;
|
||||
process.env.OPENAI_API_KEY = 'some-api-key';
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: EModelEndpoint.gptPlugins },
|
||||
user: { id: '123' },
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
expect(client.client.options.contextStrategy).toBe(null);
|
||||
});
|
||||
|
||||
test('should correctly use user-provided apiKey and baseURL when provided', async () => {
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
process.env.OPENAI_REVERSE_PROXY = 'user_provided';
|
||||
const req = {
|
||||
body: {
|
||||
key: new Date(Date.now() + 10000).toISOString(),
|
||||
endpoint: 'openAI',
|
||||
},
|
||||
user: {
|
||||
id: '123',
|
||||
},
|
||||
app,
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
getUserKeyValues.mockResolvedValue({
|
||||
apiKey: 'test',
|
||||
baseURL: 'https://user-provided-url.com',
|
||||
});
|
||||
|
||||
const result = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(result.openAIApiKey).toBe('test');
|
||||
expect(result.client.options.reverseProxyUrl).toBe('https://user-provided-url.com');
|
||||
});
|
||||
});
|
||||
58
api/server/services/Endpoints/index.js
Normal file
58
api/server/services/Endpoints/index.js
Normal file
@@ -0,0 +1,58 @@
|
||||
const { Providers } = require('@librechat/agents');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const initAnthropic = require('~/server/services/Endpoints/anthropic/initialize');
|
||||
const getBedrockOptions = require('~/server/services/Endpoints/bedrock/options');
|
||||
const initOpenAI = require('~/server/services/Endpoints/openAI/initialize');
|
||||
const initCustom = require('~/server/services/Endpoints/custom/initialize');
|
||||
const initGoogle = require('~/server/services/Endpoints/google/initialize');
|
||||
const { getCustomEndpointConfig } = require('~/server/services/Config');
|
||||
|
||||
const providerConfigMap = {
|
||||
[Providers.XAI]: initCustom,
|
||||
[Providers.OLLAMA]: initCustom,
|
||||
[Providers.DEEPSEEK]: initCustom,
|
||||
[Providers.OPENROUTER]: initCustom,
|
||||
[EModelEndpoint.openAI]: initOpenAI,
|
||||
[EModelEndpoint.google]: initGoogle,
|
||||
[EModelEndpoint.azureOpenAI]: initOpenAI,
|
||||
[EModelEndpoint.anthropic]: initAnthropic,
|
||||
[EModelEndpoint.bedrock]: getBedrockOptions,
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the provider configuration and override endpoint based on the provider string
|
||||
* @param {string} provider - The provider string
|
||||
* @returns {Promise<{
|
||||
* getOptions: Function,
|
||||
* overrideProvider?: string,
|
||||
* customEndpointConfig?: TEndpoint
|
||||
* }>}
|
||||
*/
|
||||
async function getProviderConfig(provider) {
|
||||
let getOptions = providerConfigMap[provider];
|
||||
let overrideProvider;
|
||||
/** @type {TEndpoint | undefined} */
|
||||
let customEndpointConfig;
|
||||
|
||||
if (!getOptions && providerConfigMap[provider.toLowerCase()] != null) {
|
||||
overrideProvider = provider.toLowerCase();
|
||||
getOptions = providerConfigMap[overrideProvider];
|
||||
} else if (!getOptions) {
|
||||
customEndpointConfig = await getCustomEndpointConfig(provider);
|
||||
if (!customEndpointConfig) {
|
||||
throw new Error(`Provider ${provider} not supported`);
|
||||
}
|
||||
getOptions = initCustom;
|
||||
overrideProvider = Providers.OPENAI;
|
||||
}
|
||||
|
||||
return {
|
||||
getOptions,
|
||||
overrideProvider,
|
||||
customEndpointConfig,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getProviderConfig,
|
||||
};
|
||||
@@ -1,11 +1,7 @@
|
||||
const {
|
||||
ErrorTypes,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } = require('librechat-data-provider');
|
||||
const {
|
||||
isEnabled,
|
||||
resolveHeaders,
|
||||
isUserProvided,
|
||||
getOpenAIConfig,
|
||||
getAzureCredentials,
|
||||
@@ -84,7 +80,10 @@ const initializeClient = async ({
|
||||
});
|
||||
|
||||
clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl;
|
||||
clientOptions.headers = resolveHeaders({ ...headers, ...(clientOptions.headers ?? {}) });
|
||||
clientOptions.headers = resolveHeaders(
|
||||
{ ...headers, ...(clientOptions.headers ?? {}) },
|
||||
req.user,
|
||||
);
|
||||
|
||||
clientOptions.titleConvo = azureConfig.titleConvo;
|
||||
clientOptions.titleModel = azureConfig.titleModel;
|
||||
@@ -139,7 +138,7 @@ const initializeClient = async ({
|
||||
}
|
||||
|
||||
if (optionsOnly) {
|
||||
const modelOptions = endpointOption.model_parameters;
|
||||
const modelOptions = endpointOption?.model_parameters ?? {};
|
||||
modelOptions.model = modelName;
|
||||
clientOptions = Object.assign({ modelOptions }, clientOptions);
|
||||
clientOptions.modelOptions.user = req.user.id;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, processMCPEnv } = require('librechat-data-provider');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { findToken, updateToken, createToken, deleteTokens } = require('~/models');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { getCachedTools, setCachedTools } = require('./Config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { findToken, updateToken, createToken, deleteTokens } = require('~/models');
|
||||
|
||||
/**
|
||||
* Initialize MCP servers
|
||||
@@ -30,7 +30,6 @@ async function initializeMCP(app) {
|
||||
createToken,
|
||||
deleteTokens,
|
||||
},
|
||||
processMCPEnv,
|
||||
});
|
||||
|
||||
delete app.locals.mcpConfig;
|
||||
|
||||
@@ -41,6 +41,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
||||
sidePanel: interfaceConfig?.sidePanel ?? defaults.sidePanel,
|
||||
privacyPolicy: interfaceConfig?.privacyPolicy ?? defaults.privacyPolicy,
|
||||
termsOfService: interfaceConfig?.termsOfService ?? defaults.termsOfService,
|
||||
mcpServers: interfaceConfig?.mcpServers ?? defaults.mcpServers,
|
||||
bookmarks: interfaceConfig?.bookmarks ?? defaults.bookmarks,
|
||||
memories: shouldDisableMemories ? false : (interfaceConfig?.memories ?? defaults.memories),
|
||||
prompts: interfaceConfig?.prompts ?? defaults.prompts,
|
||||
|
||||
@@ -1503,7 +1503,6 @@
|
||||
* @property {boolean|{userProvide: boolean}} [anthropic] - Flag to indicate if Anthropic endpoint is user provided, or its configuration.
|
||||
* @property {boolean|{userProvide: boolean}} [google] - Flag to indicate if Google endpoint is user provided, or its configuration.
|
||||
* @property {boolean|{userProvide: boolean, userProvideURL: boolean, name: string}} [custom] - Custom Endpoint configuration.
|
||||
* @property {boolean|GptPlugins} [gptPlugins] - Configuration for GPT plugins.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
|
||||
@@ -109,6 +109,9 @@
|
||||
"tailwindcss-radix": "^2.8.0",
|
||||
"zod": "^3.22.4"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"jotai": "^2.12.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/plugin-transform-runtime": "^7.22.15",
|
||||
"@babel/preset-env": "^7.22.15",
|
||||
|
||||
89
client/src/Providers/BadgeRowContext.tsx
Normal file
89
client/src/Providers/BadgeRowContext.tsx
Normal file
@@ -0,0 +1,89 @@
|
||||
import React, { createContext, useContext } from 'react';
|
||||
import { Tools, LocalStorageKeys } from 'librechat-data-provider';
|
||||
import { useMCPSelect, useToolToggle, useCodeApiKeyForm, useSearchApiKeyForm } from '~/hooks';
|
||||
import { useGetStartupConfig } from '~/data-provider';
|
||||
|
||||
interface BadgeRowContextType {
|
||||
conversationId?: string | null;
|
||||
mcpSelect: ReturnType<typeof useMCPSelect>;
|
||||
webSearch: ReturnType<typeof useToolToggle>;
|
||||
codeInterpreter: ReturnType<typeof useToolToggle>;
|
||||
fileSearch: ReturnType<typeof useToolToggle>;
|
||||
codeApiKeyForm: ReturnType<typeof useCodeApiKeyForm>;
|
||||
searchApiKeyForm: ReturnType<typeof useSearchApiKeyForm>;
|
||||
startupConfig: ReturnType<typeof useGetStartupConfig>['data'];
|
||||
}
|
||||
|
||||
const BadgeRowContext = createContext<BadgeRowContextType | undefined>(undefined);
|
||||
|
||||
export function useBadgeRowContext() {
|
||||
const context = useContext(BadgeRowContext);
|
||||
if (context === undefined) {
|
||||
throw new Error('useBadgeRowContext must be used within a BadgeRowProvider');
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
interface BadgeRowProviderProps {
|
||||
children: React.ReactNode;
|
||||
conversationId?: string | null;
|
||||
}
|
||||
|
||||
export default function BadgeRowProvider({ children, conversationId }: BadgeRowProviderProps) {
|
||||
/** Startup config */
|
||||
const { data: startupConfig } = useGetStartupConfig();
|
||||
|
||||
/** MCPSelect hook */
|
||||
const mcpSelect = useMCPSelect({ conversationId });
|
||||
|
||||
/** CodeInterpreter hooks */
|
||||
const codeApiKeyForm = useCodeApiKeyForm({});
|
||||
const { setIsDialogOpen: setCodeDialogOpen } = codeApiKeyForm;
|
||||
|
||||
const codeInterpreter = useToolToggle({
|
||||
conversationId,
|
||||
setIsDialogOpen: setCodeDialogOpen,
|
||||
toolKey: Tools.execute_code,
|
||||
localStorageKey: LocalStorageKeys.LAST_CODE_TOGGLE_,
|
||||
authConfig: {
|
||||
toolId: Tools.execute_code,
|
||||
queryOptions: { retry: 1 },
|
||||
},
|
||||
});
|
||||
|
||||
/** WebSearch hooks */
|
||||
const searchApiKeyForm = useSearchApiKeyForm({});
|
||||
const { setIsDialogOpen: setWebSearchDialogOpen } = searchApiKeyForm;
|
||||
|
||||
const webSearch = useToolToggle({
|
||||
conversationId,
|
||||
toolKey: Tools.web_search,
|
||||
localStorageKey: LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_,
|
||||
setIsDialogOpen: setWebSearchDialogOpen,
|
||||
authConfig: {
|
||||
toolId: Tools.web_search,
|
||||
queryOptions: { retry: 1 },
|
||||
},
|
||||
});
|
||||
|
||||
/** FileSearch hook */
|
||||
const fileSearch = useToolToggle({
|
||||
conversationId,
|
||||
toolKey: Tools.file_search,
|
||||
localStorageKey: LocalStorageKeys.LAST_FILE_SEARCH_TOGGLE_,
|
||||
isAuthenticated: true,
|
||||
});
|
||||
|
||||
const value: BadgeRowContextType = {
|
||||
mcpSelect,
|
||||
webSearch,
|
||||
fileSearch,
|
||||
startupConfig,
|
||||
conversationId,
|
||||
codeApiKeyForm,
|
||||
codeInterpreter,
|
||||
searchApiKeyForm,
|
||||
};
|
||||
|
||||
return <BadgeRowContext.Provider value={value}>{children}</BadgeRowContext.Provider>;
|
||||
}
|
||||
@@ -22,3 +22,5 @@ export * from './CodeBlockContext';
|
||||
export * from './ToolCallsMapContext';
|
||||
export * from './SetConvoContext';
|
||||
export * from './SearchContext';
|
||||
export * from './BadgeRowContext';
|
||||
export { default as BadgeRowProvider } from './BadgeRowContext';
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
export type RenderProp<
|
||||
P = React.HTMLAttributes<any> & {
|
||||
ref?: React.Ref<any>;
|
||||
|
||||
@@ -68,6 +68,7 @@ export default function ExportAndShareMenu({
|
||||
return (
|
||||
<>
|
||||
<DropdownPopup
|
||||
portal={true}
|
||||
menuId={menuId}
|
||||
focusLoop={true}
|
||||
unmountOnHide={true}
|
||||
|
||||
@@ -1,19 +1,23 @@
|
||||
import React, {
|
||||
memo,
|
||||
useState,
|
||||
useRef,
|
||||
useEffect,
|
||||
useCallback,
|
||||
useMemo,
|
||||
useState,
|
||||
useEffect,
|
||||
forwardRef,
|
||||
useReducer,
|
||||
useCallback,
|
||||
} from 'react';
|
||||
import { useRecoilValue, useRecoilCallback } from 'recoil';
|
||||
import type { LucideIcon } from 'lucide-react';
|
||||
import CodeInterpreter from './CodeInterpreter';
|
||||
import { BadgeRowProvider } from '~/Providers';
|
||||
import ToolsDropdown from './ToolsDropdown';
|
||||
import type { BadgeItem } from '~/common';
|
||||
import { useChatBadges } from '~/hooks';
|
||||
import { Badge } from '~/components/ui';
|
||||
import ToolDialogs from './ToolDialogs';
|
||||
import FileSearch from './FileSearch';
|
||||
import MCPSelect from './MCPSelect';
|
||||
import WebSearch from './WebSearch';
|
||||
import store from '~/store';
|
||||
@@ -313,78 +317,83 @@ function BadgeRow({
|
||||
}, [dragState.draggedBadge, handleMouseMove, handleMouseUp]);
|
||||
|
||||
return (
|
||||
<div ref={containerRef} className="relative flex flex-wrap items-center gap-2">
|
||||
{tempBadges.map((badge, index) => (
|
||||
<React.Fragment key={badge.id}>
|
||||
{dragState.draggedBadge && dragState.insertIndex === index && ghostBadge && (
|
||||
<div className="badge-icon h-full">
|
||||
<Badge
|
||||
id={ghostBadge.id}
|
||||
icon={ghostBadge.icon as LucideIcon}
|
||||
label={ghostBadge.label}
|
||||
isActive={dragState.draggedBadgeActive}
|
||||
isEditing={isEditing}
|
||||
isAvailable={ghostBadge.isAvailable}
|
||||
isInChat={isInChat}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<BadgeWrapper
|
||||
badge={badge}
|
||||
isEditing={isEditing}
|
||||
isInChat={isInChat}
|
||||
onToggle={handleBadgeToggle}
|
||||
onDelete={handleDelete}
|
||||
onMouseDown={handleMouseDown}
|
||||
badgeRefs={badgeRefs}
|
||||
/>
|
||||
</React.Fragment>
|
||||
))}
|
||||
{dragState.draggedBadge && dragState.insertIndex === tempBadges.length && ghostBadge && (
|
||||
<div className="badge-icon h-full">
|
||||
<Badge
|
||||
id={ghostBadge.id}
|
||||
icon={ghostBadge.icon as LucideIcon}
|
||||
label={ghostBadge.label}
|
||||
isActive={dragState.draggedBadgeActive}
|
||||
isEditing={isEditing}
|
||||
isAvailable={ghostBadge.isAvailable}
|
||||
isInChat={isInChat}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{showEphemeralBadges === true && (
|
||||
<>
|
||||
<WebSearch conversationId={conversationId} />
|
||||
<CodeInterpreter conversationId={conversationId} />
|
||||
<MCPSelect conversationId={conversationId} />
|
||||
</>
|
||||
)}
|
||||
{ghostBadge && (
|
||||
<div
|
||||
className="ghost-badge h-full"
|
||||
style={{
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
left: 0,
|
||||
transform: `translateX(${dragState.mouseX - dragState.offsetX - (containerRectRef.current?.left || 0)}px)`,
|
||||
zIndex: 10,
|
||||
pointerEvents: 'none',
|
||||
}}
|
||||
>
|
||||
<Badge
|
||||
id={ghostBadge.id}
|
||||
icon={ghostBadge.icon as LucideIcon}
|
||||
label={ghostBadge.label}
|
||||
isActive={dragState.draggedBadgeActive}
|
||||
isAvailable={ghostBadge.isAvailable}
|
||||
isInChat={isInChat}
|
||||
isEditing
|
||||
isDragging
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<BadgeRowProvider conversationId={conversationId}>
|
||||
<div ref={containerRef} className="relative flex flex-wrap items-center gap-2">
|
||||
{showEphemeralBadges === true && <ToolsDropdown />}
|
||||
{tempBadges.map((badge, index) => (
|
||||
<React.Fragment key={badge.id}>
|
||||
{dragState.draggedBadge && dragState.insertIndex === index && ghostBadge && (
|
||||
<div className="badge-icon h-full">
|
||||
<Badge
|
||||
id={ghostBadge.id}
|
||||
icon={ghostBadge.icon as LucideIcon}
|
||||
label={ghostBadge.label}
|
||||
isActive={dragState.draggedBadgeActive}
|
||||
isEditing={isEditing}
|
||||
isAvailable={ghostBadge.isAvailable}
|
||||
isInChat={isInChat}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<BadgeWrapper
|
||||
badge={badge}
|
||||
isEditing={isEditing}
|
||||
isInChat={isInChat}
|
||||
onToggle={handleBadgeToggle}
|
||||
onDelete={handleDelete}
|
||||
onMouseDown={handleMouseDown}
|
||||
badgeRefs={badgeRefs}
|
||||
/>
|
||||
</React.Fragment>
|
||||
))}
|
||||
{dragState.draggedBadge && dragState.insertIndex === tempBadges.length && ghostBadge && (
|
||||
<div className="badge-icon h-full">
|
||||
<Badge
|
||||
id={ghostBadge.id}
|
||||
icon={ghostBadge.icon as LucideIcon}
|
||||
label={ghostBadge.label}
|
||||
isActive={dragState.draggedBadgeActive}
|
||||
isEditing={isEditing}
|
||||
isAvailable={ghostBadge.isAvailable}
|
||||
isInChat={isInChat}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{showEphemeralBadges === true && (
|
||||
<>
|
||||
<WebSearch />
|
||||
<CodeInterpreter />
|
||||
<FileSearch />
|
||||
<MCPSelect />
|
||||
</>
|
||||
)}
|
||||
{ghostBadge && (
|
||||
<div
|
||||
className="ghost-badge h-full"
|
||||
style={{
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
left: 0,
|
||||
transform: `translateX(${dragState.mouseX - dragState.offsetX - (containerRectRef.current?.left || 0)}px)`,
|
||||
zIndex: 10,
|
||||
pointerEvents: 'none',
|
||||
}}
|
||||
>
|
||||
<Badge
|
||||
id={ghostBadge.id}
|
||||
icon={ghostBadge.icon as LucideIcon}
|
||||
label={ghostBadge.label}
|
||||
isActive={dragState.draggedBadgeActive}
|
||||
isAvailable={ghostBadge.isAvailable}
|
||||
isInChat={isInChat}
|
||||
isEditing
|
||||
isDragging
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<ToolDialogs />
|
||||
</BadgeRowProvider>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,122 +1,37 @@
|
||||
import debounce from 'lodash/debounce';
|
||||
import React, { memo, useMemo, useCallback, useRef } from 'react';
|
||||
import { useRecoilState } from 'recoil';
|
||||
import React, { memo } from 'react';
|
||||
import { TerminalSquareIcon } from 'lucide-react';
|
||||
import {
|
||||
Tools,
|
||||
AuthType,
|
||||
Constants,
|
||||
LocalStorageKeys,
|
||||
PermissionTypes,
|
||||
Permissions,
|
||||
} from 'librechat-data-provider';
|
||||
import ApiKeyDialog from '~/components/SidePanel/Agents/Code/ApiKeyDialog';
|
||||
import { useLocalize, useHasAccess, useCodeApiKeyForm } from '~/hooks';
|
||||
import { PermissionTypes, Permissions } from 'librechat-data-provider';
|
||||
import CheckboxButton from '~/components/ui/CheckboxButton';
|
||||
import useLocalStorage from '~/hooks/useLocalStorageAlt';
|
||||
import { useVerifyAgentToolAuth } from '~/data-provider';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
import { useLocalize, useHasAccess } from '~/hooks';
|
||||
import { useBadgeRowContext } from '~/Providers';
|
||||
|
||||
const storageCondition = (value: unknown, rawCurrentValue?: string | null) => {
|
||||
if (rawCurrentValue) {
|
||||
try {
|
||||
const currentValue = rawCurrentValue?.trim() ?? '';
|
||||
if (currentValue === 'true' && value === false) {
|
||||
return true;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
return value !== undefined && value !== null && value !== '' && value !== false;
|
||||
};
|
||||
|
||||
function CodeInterpreter({ conversationId }: { conversationId?: string | null }) {
|
||||
const triggerRef = useRef<HTMLInputElement>(null);
|
||||
function CodeInterpreter() {
|
||||
const localize = useLocalize();
|
||||
const key = conversationId ?? Constants.NEW_CONVO;
|
||||
const { codeInterpreter, codeApiKeyForm } = useBadgeRowContext();
|
||||
const { toggleState: runCode, debouncedChange, isPinned } = codeInterpreter;
|
||||
const { badgeTriggerRef } = codeApiKeyForm;
|
||||
|
||||
const canRunCode = useHasAccess({
|
||||
permissionType: PermissionTypes.RUN_CODE,
|
||||
permission: Permissions.USE,
|
||||
});
|
||||
const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key));
|
||||
const isCodeToggleEnabled = useMemo(() => {
|
||||
return ephemeralAgent?.execute_code ?? false;
|
||||
}, [ephemeralAgent?.execute_code]);
|
||||
|
||||
const { data } = useVerifyAgentToolAuth(
|
||||
{ toolId: Tools.execute_code },
|
||||
{
|
||||
retry: 1,
|
||||
},
|
||||
);
|
||||
const authType = useMemo(() => data?.message ?? false, [data?.message]);
|
||||
const isAuthenticated = useMemo(() => data?.authenticated ?? false, [data?.authenticated]);
|
||||
const { methods, onSubmit, isDialogOpen, setIsDialogOpen, handleRevokeApiKey } =
|
||||
useCodeApiKeyForm({});
|
||||
|
||||
const setValue = useCallback(
|
||||
(isChecked: boolean) => {
|
||||
setEphemeralAgent((prev) => ({
|
||||
...prev,
|
||||
execute_code: isChecked,
|
||||
}));
|
||||
},
|
||||
[setEphemeralAgent],
|
||||
);
|
||||
|
||||
const [runCode, setRunCode] = useLocalStorage<boolean>(
|
||||
`${LocalStorageKeys.LAST_CODE_TOGGLE_}${key}`,
|
||||
isCodeToggleEnabled,
|
||||
setValue,
|
||||
storageCondition,
|
||||
);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(e: React.ChangeEvent<HTMLInputElement>, isChecked: boolean) => {
|
||||
if (!isAuthenticated) {
|
||||
setIsDialogOpen(true);
|
||||
e.preventDefault();
|
||||
return;
|
||||
}
|
||||
setRunCode(isChecked);
|
||||
},
|
||||
[setRunCode, setIsDialogOpen, isAuthenticated],
|
||||
);
|
||||
|
||||
const debouncedChange = useMemo(
|
||||
() => debounce(handleChange, 50, { leading: true }),
|
||||
[handleChange],
|
||||
);
|
||||
|
||||
if (!canRunCode) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
(runCode || isPinned) && (
|
||||
<CheckboxButton
|
||||
ref={triggerRef}
|
||||
ref={badgeTriggerRef}
|
||||
className="max-w-fit"
|
||||
defaultChecked={runCode}
|
||||
checked={runCode}
|
||||
setValue={debouncedChange}
|
||||
label={localize('com_assistants_code_interpreter')}
|
||||
isCheckedClassName="border-purple-600/40 bg-purple-500/10 hover:bg-purple-700/10"
|
||||
icon={<TerminalSquareIcon className="icon-md" />}
|
||||
/>
|
||||
<ApiKeyDialog
|
||||
onSubmit={onSubmit}
|
||||
isOpen={isDialogOpen}
|
||||
triggerRef={triggerRef}
|
||||
register={methods.register}
|
||||
onRevoke={handleRevokeApiKey}
|
||||
onOpenChange={setIsDialogOpen}
|
||||
handleSubmit={methods.handleSubmit}
|
||||
isToolAuthenticated={isAuthenticated}
|
||||
isUserProvided={authType === AuthType.USER_PROVIDED}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
28
client/src/components/Chat/Input/FileSearch.tsx
Normal file
28
client/src/components/Chat/Input/FileSearch.tsx
Normal file
@@ -0,0 +1,28 @@
|
||||
import React, { memo } from 'react';
|
||||
import CheckboxButton from '~/components/ui/CheckboxButton';
|
||||
import { useBadgeRowContext } from '~/Providers';
|
||||
import { VectorIcon } from '~/components/svg';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
function FileSearch() {
|
||||
const localize = useLocalize();
|
||||
const { fileSearch } = useBadgeRowContext();
|
||||
const { toggleState: fileSearchEnabled, debouncedChange, isPinned } = fileSearch;
|
||||
|
||||
return (
|
||||
<>
|
||||
{(fileSearchEnabled || isPinned) && (
|
||||
<CheckboxButton
|
||||
className="max-w-fit"
|
||||
checked={fileSearchEnabled}
|
||||
setValue={debouncedChange}
|
||||
label={localize('com_assistants_file_search')}
|
||||
isCheckedClassName="border-green-600/40 bg-green-500/10 hover:bg-green-700/10"
|
||||
icon={<VectorIcon className="icon-md" />}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(FileSearch);
|
||||
@@ -1,31 +1,21 @@
|
||||
import { memo, useMemo } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import {
|
||||
Constants,
|
||||
supportsFiles,
|
||||
mergeFileConfig,
|
||||
isAgentsEndpoint,
|
||||
isEphemeralAgent,
|
||||
EndpointFileConfig,
|
||||
fileConfig as defaultFileConfig,
|
||||
} from 'librechat-data-provider';
|
||||
import { useChatContext } from '~/Providers';
|
||||
import { useGetFileConfig } from '~/data-provider';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
import AttachFileMenu from './AttachFileMenu';
|
||||
import AttachFile from './AttachFile';
|
||||
import { useChatContext } from '~/Providers';
|
||||
|
||||
function AttachFileChat({ disableInputs }: { disableInputs: boolean }) {
|
||||
const { conversation } = useChatContext();
|
||||
|
||||
const conversationId = conversation?.conversationId ?? Constants.NEW_CONVO;
|
||||
const { endpoint: _endpoint, endpointType } = conversation ?? { endpoint: null };
|
||||
|
||||
const key = conversation?.conversationId ?? Constants.NEW_CONVO;
|
||||
const ephemeralAgent = useRecoilValue(ephemeralAgentByConvoId(key));
|
||||
const isAgents = useMemo(
|
||||
() => isAgentsEndpoint(_endpoint) || isEphemeralAgent(_endpoint, ephemeralAgent),
|
||||
[_endpoint, ephemeralAgent],
|
||||
);
|
||||
const isAgents = useMemo(() => isAgentsEndpoint(_endpoint), [_endpoint]);
|
||||
|
||||
const { data: fileConfig = defaultFileConfig } = useGetFileConfig({
|
||||
select: (data) => mergeFileConfig(data),
|
||||
@@ -38,11 +28,8 @@ function AttachFileChat({ disableInputs }: { disableInputs: boolean }) {
|
||||
const endpointSupportsFiles: boolean = supportsFiles[endpointType ?? _endpoint ?? ''] ?? false;
|
||||
const isUploadDisabled = (disableInputs || endpointFileConfig?.disabled) ?? false;
|
||||
|
||||
if (isAgents) {
|
||||
return <AttachFileMenu disabled={disableInputs} />;
|
||||
}
|
||||
if (endpointSupportsFiles && !isUploadDisabled) {
|
||||
return <AttachFile disabled={disableInputs} />;
|
||||
if (isAgents || (endpointSupportsFiles && !isUploadDisabled)) {
|
||||
return <AttachFileMenu disabled={disableInputs} conversationId={conversationId} />;
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -1,21 +1,25 @@
|
||||
import { useSetRecoilState } from 'recoil';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import React, { useRef, useState, useMemo } from 'react';
|
||||
import { FileSearch, ImageUpIcon, TerminalSquareIcon, FileType2Icon } from 'lucide-react';
|
||||
import { EToolResources, EModelEndpoint, defaultAgentCapabilities } from 'librechat-data-provider';
|
||||
import { FileUpload, TooltipAnchor, DropdownPopup, AttachmentIcon } from '~/components';
|
||||
import { EToolResources, EModelEndpoint } from 'librechat-data-provider';
|
||||
import { useGetEndpointsQuery } from '~/data-provider';
|
||||
import { useLocalize, useFileHandling } from '~/hooks';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
interface AttachFileProps {
|
||||
interface AttachFileMenuProps {
|
||||
conversationId: string;
|
||||
disabled?: boolean | null;
|
||||
}
|
||||
|
||||
const AttachFile = ({ disabled }: AttachFileProps) => {
|
||||
const AttachFileMenu = ({ disabled, conversationId }: AttachFileMenuProps) => {
|
||||
const localize = useLocalize();
|
||||
const isUploadDisabled = disabled ?? false;
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
const [isPopoverActive, setIsPopoverActive] = useState(false);
|
||||
const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(conversationId));
|
||||
const [toolResource, setToolResource] = useState<EToolResources | undefined>();
|
||||
const { data: endpointsConfig } = useGetEndpointsQuery();
|
||||
const { handleFileChange } = useFileHandling({
|
||||
@@ -69,6 +73,7 @@ const AttachFile = ({ disabled }: AttachFileProps) => {
|
||||
label: localize('com_ui_upload_file_search'),
|
||||
onClick: () => {
|
||||
setToolResource(EToolResources.file_search);
|
||||
/** File search is not automatically enabled to simulate legacy behavior */
|
||||
handleUploadClick();
|
||||
},
|
||||
icon: <FileSearch className="icon-md" />,
|
||||
@@ -80,6 +85,10 @@ const AttachFile = ({ disabled }: AttachFileProps) => {
|
||||
label: localize('com_ui_upload_code_files'),
|
||||
onClick: () => {
|
||||
setToolResource(EToolResources.execute_code);
|
||||
setEphemeralAgent((prev) => ({
|
||||
...prev,
|
||||
[EToolResources.execute_code]: true,
|
||||
}));
|
||||
handleUploadClick();
|
||||
},
|
||||
icon: <TerminalSquareIcon className="icon-md" />,
|
||||
@@ -87,7 +96,7 @@ const AttachFile = ({ disabled }: AttachFileProps) => {
|
||||
}
|
||||
|
||||
return items;
|
||||
}, [capabilities, localize, setToolResource]);
|
||||
}, [capabilities, localize, setToolResource, setEphemeralAgent]);
|
||||
|
||||
const menuTrigger = (
|
||||
<TooltipAnchor
|
||||
@@ -132,4 +141,4 @@ const AttachFile = ({ disabled }: AttachFileProps) => {
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(AttachFile);
|
||||
export default React.memo(AttachFileMenu);
|
||||
|
||||
@@ -7,7 +7,7 @@ import useLocalize from '~/hooks/useLocalize';
|
||||
import { OGDialog } from '~/components/ui';
|
||||
|
||||
interface DragDropModalProps {
|
||||
onOptionSelect: (option: string | undefined) => void;
|
||||
onOptionSelect: (option: EToolResources | undefined) => void;
|
||||
files: File[];
|
||||
isVisible: boolean;
|
||||
setShowModal: (showModal: boolean) => void;
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import React, { useEffect } from 'react';
|
||||
import { useForm, Controller } from 'react-hook-form';
|
||||
import { Input, Label, OGDialog, Button } from '~/components/ui';
|
||||
import OGDialogTemplate from '~/components/ui/OGDialogTemplate';
|
||||
import { Button, Input, Label, OGDialog, OGDialogTemplate } from '~/components';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
export interface ConfigFieldDetail {
|
||||
@@ -1,74 +1,27 @@
|
||||
import React, { memo, useRef, useMemo, useEffect, useCallback, useState } from 'react';
|
||||
import { useRecoilState } from 'recoil';
|
||||
import { Settings2 } from 'lucide-react';
|
||||
import React, { memo, useCallback, useState } from 'react';
|
||||
import { SettingsIcon } from 'lucide-react';
|
||||
import { Constants } from 'librechat-data-provider';
|
||||
import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-query';
|
||||
import { Constants, EModelEndpoint, LocalStorageKeys } from 'librechat-data-provider';
|
||||
import type { TPlugin, TPluginAuthConfig, TUpdateUserPlugins } from 'librechat-data-provider';
|
||||
import MCPConfigDialog, { type ConfigFieldDetail } from '~/components/ui/MCPConfigDialog';
|
||||
import { useAvailableToolsQuery } from '~/data-provider';
|
||||
import useLocalStorage from '~/hooks/useLocalStorageAlt';
|
||||
import type { TUpdateUserPlugins, TPlugin } from 'librechat-data-provider';
|
||||
import MCPConfigDialog, { type ConfigFieldDetail } from './MCPConfigDialog';
|
||||
import { useToastContext, useBadgeRowContext } from '~/Providers';
|
||||
import MultiSelect from '~/components/ui/MultiSelect';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import MCPIcon from '~/components/ui/MCPIcon';
|
||||
import { MCPIcon } from '~/components/svg';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
interface McpServerInfo {
|
||||
name: string;
|
||||
pluginKey: string;
|
||||
authConfig?: TPluginAuthConfig[];
|
||||
authenticated?: boolean;
|
||||
}
|
||||
|
||||
// Helper function to extract mcp_serverName from a full pluginKey like action_mcp_serverName
|
||||
const getBaseMCPPluginKey = (fullPluginKey: string): string => {
|
||||
const parts = fullPluginKey.split(Constants.mcp_delimiter);
|
||||
return Constants.mcp_prefix + parts[parts.length - 1];
|
||||
};
|
||||
|
||||
const storageCondition = (value: unknown, rawCurrentValue?: string | null) => {
|
||||
if (rawCurrentValue) {
|
||||
try {
|
||||
const currentValue = rawCurrentValue?.trim() ?? '';
|
||||
if (currentValue.length > 2) {
|
||||
return true;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
return Array.isArray(value) && value.length > 0;
|
||||
};
|
||||
|
||||
function MCPSelect({ conversationId }: { conversationId?: string | null }) {
|
||||
function MCPSelect() {
|
||||
const localize = useLocalize();
|
||||
const { showToast } = useToastContext();
|
||||
const key = conversationId ?? Constants.NEW_CONVO;
|
||||
const hasSetFetched = useRef<string | null>(null);
|
||||
const [isConfigModalOpen, setIsConfigModalOpen] = useState(false);
|
||||
const [selectedToolForConfig, setSelectedToolForConfig] = useState<McpServerInfo | null>(null);
|
||||
const { mcpSelect, startupConfig } = useBadgeRowContext();
|
||||
const { mcpValues, setMCPValues, mcpServerNames, mcpToolDetails, isPinned } = mcpSelect;
|
||||
|
||||
const { data: mcpToolDetails, isFetched } = useAvailableToolsQuery(EModelEndpoint.agents, {
|
||||
select: (data: TPlugin[]) => {
|
||||
const mcpToolsMap = new Map<string, McpServerInfo>();
|
||||
data.forEach((tool) => {
|
||||
const isMCP = tool.pluginKey.includes(Constants.mcp_delimiter);
|
||||
if (isMCP && tool.chatMenu !== false) {
|
||||
const parts = tool.pluginKey.split(Constants.mcp_delimiter);
|
||||
const serverName = parts[parts.length - 1];
|
||||
if (!mcpToolsMap.has(serverName)) {
|
||||
mcpToolsMap.set(serverName, {
|
||||
name: serverName,
|
||||
pluginKey: tool.pluginKey,
|
||||
authConfig: tool.authConfig,
|
||||
authenticated: tool.authenticated,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
return Array.from(mcpToolsMap.values());
|
||||
},
|
||||
});
|
||||
const [isConfigModalOpen, setIsConfigModalOpen] = useState(false);
|
||||
const [selectedToolForConfig, setSelectedToolForConfig] = useState<TPlugin | null>(null);
|
||||
|
||||
const updateUserPluginsMutation = useUpdateUserPluginsMutation({
|
||||
onSuccess: () => {
|
||||
@@ -84,48 +37,6 @@ function MCPSelect({ conversationId }: { conversationId?: string | null }) {
|
||||
},
|
||||
});
|
||||
|
||||
const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key));
|
||||
const mcpState = useMemo(() => {
|
||||
return ephemeralAgent?.mcp ?? [];
|
||||
}, [ephemeralAgent?.mcp]);
|
||||
|
||||
const setSelectedValues = useCallback(
|
||||
(values: string[] | null | undefined) => {
|
||||
if (!values) {
|
||||
return;
|
||||
}
|
||||
if (!Array.isArray(values)) {
|
||||
return;
|
||||
}
|
||||
setEphemeralAgent((prev) => ({
|
||||
...prev,
|
||||
mcp: values,
|
||||
}));
|
||||
},
|
||||
[setEphemeralAgent],
|
||||
);
|
||||
const [mcpValues, setMCPValues] = useLocalStorage<string[]>(
|
||||
`${LocalStorageKeys.LAST_MCP_}${key}`,
|
||||
mcpState,
|
||||
setSelectedValues,
|
||||
storageCondition,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (hasSetFetched.current === key) {
|
||||
return;
|
||||
}
|
||||
if (!isFetched) {
|
||||
return;
|
||||
}
|
||||
hasSetFetched.current = key;
|
||||
if ((mcpToolDetails?.length ?? 0) > 0) {
|
||||
setMCPValues(mcpValues.filter((mcp) => mcpToolDetails?.some((tool) => tool.name === mcp)));
|
||||
return;
|
||||
}
|
||||
setMCPValues([]);
|
||||
}, [isFetched, setMCPValues, mcpToolDetails, key, mcpValues]);
|
||||
|
||||
const renderSelectedValues = useCallback(
|
||||
(values: string[], placeholder?: string) => {
|
||||
if (values.length === 0) {
|
||||
@@ -139,10 +50,6 @@ function MCPSelect({ conversationId }: { conversationId?: string | null }) {
|
||||
[localize],
|
||||
);
|
||||
|
||||
const mcpServerNames = useMemo(() => {
|
||||
return (mcpToolDetails ?? []).map((tool) => tool.name);
|
||||
}, [mcpToolDetails]);
|
||||
|
||||
const handleConfigSave = useCallback(
|
||||
(targetName: string, authData: Record<string, string>) => {
|
||||
if (selectedToolForConfig && selectedToolForConfig.name === targetName) {
|
||||
@@ -198,10 +105,10 @@ function MCPSelect({ conversationId }: { conversationId?: string | null }) {
|
||||
setSelectedToolForConfig(tool);
|
||||
setIsConfigModalOpen(true);
|
||||
}}
|
||||
className="ml-2 flex h-6 w-6 items-center justify-center rounded p-1 hover:bg-black/10 dark:hover:bg-white/10"
|
||||
className="ml-2 flex h-6 w-6 items-center justify-center rounded p-1 hover:bg-surface-secondary"
|
||||
aria-label={`Configure ${serverName}`}
|
||||
>
|
||||
<Settings2 className={`h-4 w-4 ${tool.authenticated ? 'text-green-500' : ''}`} />
|
||||
<SettingsIcon className={`h-4 w-4 ${tool.authenticated ? 'text-green-500' : ''}`} />
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
@@ -212,10 +119,17 @@ function MCPSelect({ conversationId }: { conversationId?: string | null }) {
|
||||
[mcpToolDetails, setSelectedToolForConfig, setIsConfigModalOpen],
|
||||
);
|
||||
|
||||
// Don't render if no servers are selected and not pinned
|
||||
if ((!mcpValues || mcpValues.length === 0) && !isPinned) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!mcpToolDetails || mcpToolDetails.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const placeholderText =
|
||||
startupConfig?.interface?.mcpServers?.placeholder || localize('com_ui_mcp_servers');
|
||||
return (
|
||||
<>
|
||||
<MultiSelect
|
||||
@@ -225,7 +139,7 @@ function MCPSelect({ conversationId }: { conversationId?: string | null }) {
|
||||
defaultSelectedValues={mcpValues ?? []}
|
||||
renderSelectedValues={renderSelectedValues}
|
||||
renderItemContent={renderItemContent}
|
||||
placeholder={localize('com_ui_mcp_servers')}
|
||||
placeholder={placeholderText}
|
||||
popoverClassName="min-w-fit"
|
||||
className="badge-icon min-w-fit"
|
||||
selectIcon={<MCPIcon className="icon-md text-text-primary" />}
|
||||
|
||||
103
client/src/components/Chat/Input/MCPSubMenu.tsx
Normal file
103
client/src/components/Chat/Input/MCPSubMenu.tsx
Normal file
@@ -0,0 +1,103 @@
|
||||
import React from 'react';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import { ChevronRight } from 'lucide-react';
|
||||
import { PinIcon, MCPIcon } from '~/components/svg';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
interface MCPSubMenuProps {
|
||||
isMCPPinned: boolean;
|
||||
setIsMCPPinned: (value: boolean) => void;
|
||||
mcpValues?: string[];
|
||||
mcpServerNames: string[];
|
||||
handleMCPToggle: (serverName: string) => void;
|
||||
placeholder?: string;
|
||||
}
|
||||
|
||||
const MCPSubMenu = ({
|
||||
mcpValues,
|
||||
isMCPPinned,
|
||||
mcpServerNames,
|
||||
setIsMCPPinned,
|
||||
handleMCPToggle,
|
||||
placeholder,
|
||||
...props
|
||||
}: MCPSubMenuProps) => {
|
||||
const localize = useLocalize();
|
||||
|
||||
const menuStore = Ariakit.useMenuStore({
|
||||
focusLoop: true,
|
||||
showTimeout: 100,
|
||||
placement: 'right',
|
||||
});
|
||||
|
||||
return (
|
||||
<Ariakit.MenuProvider store={menuStore}>
|
||||
<Ariakit.MenuItem
|
||||
{...props}
|
||||
render={
|
||||
<Ariakit.MenuButton
|
||||
onClick={(e: React.MouseEvent<HTMLButtonElement>) => {
|
||||
e.stopPropagation();
|
||||
menuStore.toggle();
|
||||
}}
|
||||
className="flex w-full cursor-pointer items-center justify-between rounded-lg p-2 hover:bg-surface-hover"
|
||||
/>
|
||||
}
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<MCPIcon className="icon-md" />
|
||||
<span>{placeholder || localize('com_ui_mcp_servers')}</span>
|
||||
<ChevronRight className="ml-auto h-3 w-3" />
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
setIsMCPPinned(!isMCPPinned);
|
||||
}}
|
||||
className={cn(
|
||||
'rounded p-1 transition-all duration-200',
|
||||
'hover:bg-surface-tertiary hover:shadow-sm',
|
||||
!isMCPPinned && 'text-text-secondary hover:text-text-primary',
|
||||
)}
|
||||
aria-label={isMCPPinned ? 'Unpin' : 'Pin'}
|
||||
>
|
||||
<div className="h-4 w-4">
|
||||
<PinIcon unpin={isMCPPinned} />
|
||||
</div>
|
||||
</button>
|
||||
</Ariakit.MenuItem>
|
||||
<Ariakit.Menu
|
||||
portal={true}
|
||||
unmountOnHide={true}
|
||||
className={cn(
|
||||
'animate-popover-left z-50 ml-3 flex min-w-[200px] flex-col rounded-xl',
|
||||
'border border-border-light bg-surface-secondary p-1 shadow-lg',
|
||||
)}
|
||||
>
|
||||
{mcpServerNames.map((serverName) => (
|
||||
<Ariakit.MenuItem
|
||||
key={serverName}
|
||||
onClick={(event) => {
|
||||
event.preventDefault();
|
||||
handleMCPToggle(serverName);
|
||||
}}
|
||||
className={cn(
|
||||
'flex items-center gap-2 rounded-lg px-2 py-1.5 text-text-primary hover:cursor-pointer',
|
||||
'scroll-m-1 outline-none transition-colors',
|
||||
'hover:bg-black/[0.075] dark:hover:bg-white/10',
|
||||
'data-[active-item]:bg-black/[0.075] dark:data-[active-item]:bg-white/10',
|
||||
'w-full min-w-0 text-sm',
|
||||
)}
|
||||
>
|
||||
<Ariakit.MenuItemCheck checked={mcpValues?.includes(serverName) ?? false} />
|
||||
<span>{serverName}</span>
|
||||
</Ariakit.MenuItem>
|
||||
))}
|
||||
</Ariakit.Menu>
|
||||
</Ariakit.MenuProvider>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(MCPSubMenu);
|
||||
66
client/src/components/Chat/Input/ToolDialogs.tsx
Normal file
66
client/src/components/Chat/Input/ToolDialogs.tsx
Normal file
@@ -0,0 +1,66 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import { AuthType } from 'librechat-data-provider';
|
||||
import SearchApiKeyDialog from '~/components/SidePanel/Agents/Search/ApiKeyDialog';
|
||||
import CodeApiKeyDialog from '~/components/SidePanel/Agents/Code/ApiKeyDialog';
|
||||
import { useBadgeRowContext } from '~/Providers';
|
||||
|
||||
function ToolDialogs() {
|
||||
const { webSearch, codeInterpreter, searchApiKeyForm, codeApiKeyForm } = useBadgeRowContext();
|
||||
const { authData: webSearchAuthData } = webSearch;
|
||||
const { authData: codeAuthData } = codeInterpreter;
|
||||
|
||||
const {
|
||||
methods: searchMethods,
|
||||
onSubmit: searchOnSubmit,
|
||||
isDialogOpen: searchDialogOpen,
|
||||
setIsDialogOpen: setSearchDialogOpen,
|
||||
handleRevokeApiKey: searchHandleRevoke,
|
||||
badgeTriggerRef: searchBadgeTriggerRef,
|
||||
menuTriggerRef: searchMenuTriggerRef,
|
||||
} = searchApiKeyForm;
|
||||
|
||||
const {
|
||||
methods: codeMethods,
|
||||
onSubmit: codeOnSubmit,
|
||||
isDialogOpen: codeDialogOpen,
|
||||
setIsDialogOpen: setCodeDialogOpen,
|
||||
handleRevokeApiKey: codeHandleRevoke,
|
||||
badgeTriggerRef: codeBadgeTriggerRef,
|
||||
menuTriggerRef: codeMenuTriggerRef,
|
||||
} = codeApiKeyForm;
|
||||
|
||||
const searchAuthTypes = useMemo(
|
||||
() => webSearchAuthData?.authTypes ?? [],
|
||||
[webSearchAuthData?.authTypes],
|
||||
);
|
||||
const codeAuthType = useMemo(() => codeAuthData?.message ?? false, [codeAuthData?.message]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<SearchApiKeyDialog
|
||||
onSubmit={searchOnSubmit}
|
||||
authTypes={searchAuthTypes}
|
||||
isOpen={searchDialogOpen}
|
||||
onRevoke={searchHandleRevoke}
|
||||
register={searchMethods.register}
|
||||
onOpenChange={setSearchDialogOpen}
|
||||
handleSubmit={searchMethods.handleSubmit}
|
||||
triggerRefs={[searchMenuTriggerRef, searchBadgeTriggerRef]}
|
||||
isToolAuthenticated={webSearchAuthData?.authenticated ?? false}
|
||||
/>
|
||||
<CodeApiKeyDialog
|
||||
onSubmit={codeOnSubmit}
|
||||
isOpen={codeDialogOpen}
|
||||
onRevoke={codeHandleRevoke}
|
||||
register={codeMethods.register}
|
||||
onOpenChange={setCodeDialogOpen}
|
||||
handleSubmit={codeMethods.handleSubmit}
|
||||
triggerRefs={[codeMenuTriggerRef, codeBadgeTriggerRef]}
|
||||
isUserProvided={codeAuthType === AuthType.USER_PROVIDED}
|
||||
isToolAuthenticated={codeAuthData?.authenticated ?? false}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default ToolDialogs;
|
||||
323
client/src/components/Chat/Input/ToolsDropdown.tsx
Normal file
323
client/src/components/Chat/Input/ToolsDropdown.tsx
Normal file
@@ -0,0 +1,323 @@
|
||||
import React, { useState, useMemo, useCallback } from 'react';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import { Globe, Settings, Settings2, TerminalSquareIcon } from 'lucide-react';
|
||||
import type { MenuItemProps } from '~/common';
|
||||
import { Permissions, PermissionTypes, AuthType } from 'librechat-data-provider';
|
||||
import { TooltipAnchor, DropdownPopup } from '~/components';
|
||||
import MCPSubMenu from '~/components/Chat/Input/MCPSubMenu';
|
||||
import { PinIcon, VectorIcon } from '~/components/svg';
|
||||
import { useLocalize, useHasAccess } from '~/hooks';
|
||||
import { useBadgeRowContext } from '~/Providers';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
interface ToolsDropdownProps {
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
const ToolsDropdown = ({ disabled }: ToolsDropdownProps) => {
|
||||
const localize = useLocalize();
|
||||
const isDisabled = disabled ?? false;
|
||||
const [isPopoverActive, setIsPopoverActive] = useState(false);
|
||||
const {
|
||||
webSearch,
|
||||
mcpSelect,
|
||||
fileSearch,
|
||||
startupConfig,
|
||||
codeApiKeyForm,
|
||||
codeInterpreter,
|
||||
searchApiKeyForm,
|
||||
} = useBadgeRowContext();
|
||||
const { setIsDialogOpen: setIsCodeDialogOpen, menuTriggerRef: codeMenuTriggerRef } =
|
||||
codeApiKeyForm;
|
||||
const { setIsDialogOpen: setIsSearchDialogOpen, menuTriggerRef: searchMenuTriggerRef } =
|
||||
searchApiKeyForm;
|
||||
const {
|
||||
isPinned: isSearchPinned,
|
||||
setIsPinned: setIsSearchPinned,
|
||||
authData: webSearchAuthData,
|
||||
} = webSearch;
|
||||
const {
|
||||
isPinned: isCodePinned,
|
||||
setIsPinned: setIsCodePinned,
|
||||
authData: codeAuthData,
|
||||
} = codeInterpreter;
|
||||
const { isPinned: isFileSearchPinned, setIsPinned: setIsFileSearchPinned } = fileSearch;
|
||||
const {
|
||||
mcpValues,
|
||||
mcpServerNames,
|
||||
isPinned: isMCPPinned,
|
||||
setIsPinned: setIsMCPPinned,
|
||||
} = mcpSelect;
|
||||
|
||||
const canUseWebSearch = useHasAccess({
|
||||
permissionType: PermissionTypes.WEB_SEARCH,
|
||||
permission: Permissions.USE,
|
||||
});
|
||||
|
||||
const canRunCode = useHasAccess({
|
||||
permissionType: PermissionTypes.RUN_CODE,
|
||||
permission: Permissions.USE,
|
||||
});
|
||||
|
||||
const showWebSearchSettings = useMemo(() => {
|
||||
const authTypes = webSearchAuthData?.authTypes ?? [];
|
||||
if (authTypes.length === 0) return true;
|
||||
return !authTypes.every(([, authType]) => authType === AuthType.SYSTEM_DEFINED);
|
||||
}, [webSearchAuthData?.authTypes]);
|
||||
|
||||
const showCodeSettings = useMemo(
|
||||
() => codeAuthData?.message !== AuthType.SYSTEM_DEFINED,
|
||||
[codeAuthData?.message],
|
||||
);
|
||||
|
||||
const handleWebSearchToggle = useCallback(() => {
|
||||
const newValue = !webSearch.toggleState;
|
||||
webSearch.debouncedChange({ isChecked: newValue });
|
||||
}, [webSearch]);
|
||||
|
||||
const handleCodeInterpreterToggle = useCallback(() => {
|
||||
const newValue = !codeInterpreter.toggleState;
|
||||
codeInterpreter.debouncedChange({ isChecked: newValue });
|
||||
}, [codeInterpreter]);
|
||||
|
||||
const handleFileSearchToggle = useCallback(() => {
|
||||
const newValue = !fileSearch.toggleState;
|
||||
fileSearch.debouncedChange({ isChecked: newValue });
|
||||
}, [fileSearch]);
|
||||
|
||||
const handleMCPToggle = useCallback(
|
||||
(serverName: string) => {
|
||||
const currentValues = mcpSelect.mcpValues ?? [];
|
||||
const newValues = currentValues.includes(serverName)
|
||||
? currentValues.filter((v) => v !== serverName)
|
||||
: [...currentValues, serverName];
|
||||
mcpSelect.setMCPValues(newValues);
|
||||
},
|
||||
[mcpSelect],
|
||||
);
|
||||
|
||||
const mcpPlaceholder = startupConfig?.interface?.mcpServers?.placeholder;
|
||||
|
||||
const dropdownItems = useMemo(() => {
|
||||
const items: MenuItemProps[] = [];
|
||||
items.push({
|
||||
onClick: handleFileSearchToggle,
|
||||
hideOnClick: false,
|
||||
render: (props) => (
|
||||
<div {...props}>
|
||||
<div className="flex items-center gap-2">
|
||||
<VectorIcon className="icon-md" />
|
||||
<span>{localize('com_assistants_file_search')}</span>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
setIsFileSearchPinned(!isFileSearchPinned);
|
||||
}}
|
||||
className={cn(
|
||||
'rounded p-1 transition-all duration-200',
|
||||
'hover:bg-surface-secondary hover:shadow-sm',
|
||||
!isFileSearchPinned && 'text-text-secondary hover:text-text-primary',
|
||||
)}
|
||||
aria-label={isFileSearchPinned ? 'Unpin' : 'Pin'}
|
||||
>
|
||||
<div className="h-4 w-4">
|
||||
<PinIcon unpin={isFileSearchPinned} />
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
),
|
||||
});
|
||||
|
||||
if (canUseWebSearch) {
|
||||
items.push({
|
||||
onClick: handleWebSearchToggle,
|
||||
hideOnClick: false,
|
||||
render: (props) => (
|
||||
<div {...props}>
|
||||
<div className="flex items-center gap-2">
|
||||
<Globe className="icon-md" />
|
||||
<span>{localize('com_ui_web_search')}</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1">
|
||||
{showWebSearchSettings && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
setIsSearchDialogOpen(true);
|
||||
}}
|
||||
className={cn(
|
||||
'rounded p-1 transition-all duration-200',
|
||||
'hover:bg-surface-secondary hover:shadow-sm',
|
||||
'text-text-secondary hover:text-text-primary',
|
||||
)}
|
||||
aria-label="Configure web search"
|
||||
ref={searchMenuTriggerRef}
|
||||
>
|
||||
<div className="h-4 w-4">
|
||||
<Settings className="h-4 w-4" />
|
||||
</div>
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
setIsSearchPinned(!isSearchPinned);
|
||||
}}
|
||||
className={cn(
|
||||
'rounded p-1 transition-all duration-200',
|
||||
'hover:bg-surface-secondary hover:shadow-sm',
|
||||
!isSearchPinned && 'text-text-secondary hover:text-text-primary',
|
||||
)}
|
||||
aria-label={isSearchPinned ? 'Unpin' : 'Pin'}
|
||||
>
|
||||
<div className="h-4 w-4">
|
||||
<PinIcon unpin={isSearchPinned} />
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
if (canRunCode) {
|
||||
items.push({
|
||||
onClick: handleCodeInterpreterToggle,
|
||||
hideOnClick: false,
|
||||
render: (props) => (
|
||||
<div {...props}>
|
||||
<div className="flex items-center gap-2">
|
||||
<TerminalSquareIcon className="icon-md" />
|
||||
<span>{localize('com_assistants_code_interpreter')}</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1">
|
||||
{showCodeSettings && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
setIsCodeDialogOpen(true);
|
||||
}}
|
||||
ref={codeMenuTriggerRef}
|
||||
className={cn(
|
||||
'rounded p-1 transition-all duration-200',
|
||||
'hover:bg-surface-secondary hover:shadow-sm',
|
||||
'text-text-secondary hover:text-text-primary',
|
||||
)}
|
||||
aria-label="Configure code interpreter"
|
||||
>
|
||||
<div className="h-4 w-4">
|
||||
<Settings className="h-4 w-4" />
|
||||
</div>
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
setIsCodePinned(!isCodePinned);
|
||||
}}
|
||||
className={cn(
|
||||
'rounded p-1 transition-all duration-200',
|
||||
'hover:bg-surface-secondary hover:shadow-sm',
|
||||
!isCodePinned && 'text-text-primary hover:text-text-primary',
|
||||
)}
|
||||
aria-label={isCodePinned ? 'Unpin' : 'Pin'}
|
||||
>
|
||||
<div className="h-4 w-4">
|
||||
<PinIcon unpin={isCodePinned} />
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
if (mcpServerNames && mcpServerNames.length > 0) {
|
||||
items.push({
|
||||
hideOnClick: false,
|
||||
render: (props) => (
|
||||
<MCPSubMenu
|
||||
{...props}
|
||||
mcpValues={mcpValues}
|
||||
isMCPPinned={isMCPPinned}
|
||||
placeholder={mcpPlaceholder}
|
||||
mcpServerNames={mcpServerNames}
|
||||
setIsMCPPinned={setIsMCPPinned}
|
||||
handleMCPToggle={handleMCPToggle}
|
||||
/>
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
return items;
|
||||
}, [
|
||||
localize,
|
||||
mcpValues,
|
||||
canRunCode,
|
||||
isMCPPinned,
|
||||
isCodePinned,
|
||||
mcpPlaceholder,
|
||||
mcpServerNames,
|
||||
isSearchPinned,
|
||||
setIsMCPPinned,
|
||||
canUseWebSearch,
|
||||
setIsCodePinned,
|
||||
handleMCPToggle,
|
||||
showCodeSettings,
|
||||
setIsSearchPinned,
|
||||
isFileSearchPinned,
|
||||
codeMenuTriggerRef,
|
||||
setIsCodeDialogOpen,
|
||||
searchMenuTriggerRef,
|
||||
showWebSearchSettings,
|
||||
setIsFileSearchPinned,
|
||||
handleWebSearchToggle,
|
||||
setIsSearchDialogOpen,
|
||||
handleFileSearchToggle,
|
||||
handleCodeInterpreterToggle,
|
||||
]);
|
||||
|
||||
const menuTrigger = (
|
||||
<TooltipAnchor
|
||||
render={
|
||||
<Ariakit.MenuButton
|
||||
disabled={isDisabled}
|
||||
id="tools-dropdown-button"
|
||||
aria-label="Tools Options"
|
||||
className={cn(
|
||||
'flex size-9 items-center justify-center rounded-full p-1 transition-colors hover:bg-surface-hover focus:outline-none focus:ring-2 focus:ring-primary focus:ring-opacity-50',
|
||||
)}
|
||||
>
|
||||
<div className="flex w-full items-center justify-center gap-2">
|
||||
<Settings2 className="icon-md" />
|
||||
</div>
|
||||
</Ariakit.MenuButton>
|
||||
}
|
||||
id="tools-dropdown-button"
|
||||
description={localize('com_ui_tools')}
|
||||
disabled={isDisabled}
|
||||
/>
|
||||
);
|
||||
|
||||
return (
|
||||
<DropdownPopup
|
||||
itemClassName="flex w-full cursor-pointer items-center justify-between hover:bg-surface-hover gap-5"
|
||||
menuId="tools-dropdown-menu"
|
||||
isOpen={isPopoverActive}
|
||||
setIsOpen={setIsPopoverActive}
|
||||
modal={true}
|
||||
unmountOnHide={true}
|
||||
trigger={menuTrigger}
|
||||
items={dropdownItems}
|
||||
iconClassName="mr-0"
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(ToolsDropdown);
|
||||
@@ -1,122 +1,37 @@
|
||||
import React, { memo, useRef, useMemo, useCallback } from 'react';
|
||||
import React, { memo } from 'react';
|
||||
import { Globe } from 'lucide-react';
|
||||
import debounce from 'lodash/debounce';
|
||||
import { useRecoilState } from 'recoil';
|
||||
import {
|
||||
Tools,
|
||||
AuthType,
|
||||
Constants,
|
||||
Permissions,
|
||||
PermissionTypes,
|
||||
LocalStorageKeys,
|
||||
} from 'librechat-data-provider';
|
||||
import ApiKeyDialog from '~/components/SidePanel/Agents/Search/ApiKeyDialog';
|
||||
import { useLocalize, useHasAccess, useSearchApiKeyForm } from '~/hooks';
|
||||
import { Permissions, PermissionTypes } from 'librechat-data-provider';
|
||||
import CheckboxButton from '~/components/ui/CheckboxButton';
|
||||
import useLocalStorage from '~/hooks/useLocalStorageAlt';
|
||||
import { useVerifyAgentToolAuth } from '~/data-provider';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
import { useLocalize, useHasAccess } from '~/hooks';
|
||||
import { useBadgeRowContext } from '~/Providers';
|
||||
|
||||
const storageCondition = (value: unknown, rawCurrentValue?: string | null) => {
|
||||
if (rawCurrentValue) {
|
||||
try {
|
||||
const currentValue = rawCurrentValue?.trim() ?? '';
|
||||
if (currentValue === 'true' && value === false) {
|
||||
return true;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
return value !== undefined && value !== null && value !== '' && value !== false;
|
||||
};
|
||||
|
||||
function WebSearch({ conversationId }: { conversationId?: string | null }) {
|
||||
const triggerRef = useRef<HTMLInputElement>(null);
|
||||
function WebSearch() {
|
||||
const localize = useLocalize();
|
||||
const key = conversationId ?? Constants.NEW_CONVO;
|
||||
const { webSearch: webSearchData, searchApiKeyForm } = useBadgeRowContext();
|
||||
const { toggleState: webSearch, debouncedChange, isPinned } = webSearchData;
|
||||
const { badgeTriggerRef } = searchApiKeyForm;
|
||||
|
||||
const canUseWebSearch = useHasAccess({
|
||||
permissionType: PermissionTypes.WEB_SEARCH,
|
||||
permission: Permissions.USE,
|
||||
});
|
||||
const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key));
|
||||
const isWebSearchToggleEnabled = useMemo(() => {
|
||||
return ephemeralAgent?.web_search ?? false;
|
||||
}, [ephemeralAgent?.web_search]);
|
||||
|
||||
const { data } = useVerifyAgentToolAuth(
|
||||
{ toolId: Tools.web_search },
|
||||
{
|
||||
retry: 1,
|
||||
},
|
||||
);
|
||||
const authTypes = useMemo(() => data?.authTypes ?? [], [data?.authTypes]);
|
||||
const isAuthenticated = useMemo(() => data?.authenticated ?? false, [data?.authenticated]);
|
||||
const { methods, onSubmit, isDialogOpen, setIsDialogOpen, handleRevokeApiKey } =
|
||||
useSearchApiKeyForm({});
|
||||
|
||||
const setValue = useCallback(
|
||||
(isChecked: boolean) => {
|
||||
setEphemeralAgent((prev) => ({
|
||||
...prev,
|
||||
web_search: isChecked,
|
||||
}));
|
||||
},
|
||||
[setEphemeralAgent],
|
||||
);
|
||||
|
||||
const [webSearch, setWebSearch] = useLocalStorage<boolean>(
|
||||
`${LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_}${key}`,
|
||||
isWebSearchToggleEnabled,
|
||||
setValue,
|
||||
storageCondition,
|
||||
);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(e: React.ChangeEvent<HTMLInputElement>, isChecked: boolean) => {
|
||||
if (!isAuthenticated) {
|
||||
setIsDialogOpen(true);
|
||||
e.preventDefault();
|
||||
return;
|
||||
}
|
||||
setWebSearch(isChecked);
|
||||
},
|
||||
[setWebSearch, setIsDialogOpen, isAuthenticated],
|
||||
);
|
||||
|
||||
const debouncedChange = useMemo(
|
||||
() => debounce(handleChange, 50, { leading: true }),
|
||||
[handleChange],
|
||||
);
|
||||
|
||||
if (!canUseWebSearch) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
(webSearch || isPinned) && (
|
||||
<CheckboxButton
|
||||
ref={triggerRef}
|
||||
ref={badgeTriggerRef}
|
||||
className="max-w-fit"
|
||||
defaultChecked={webSearch}
|
||||
checked={webSearch}
|
||||
setValue={debouncedChange}
|
||||
label={localize('com_ui_search')}
|
||||
isCheckedClassName="border-blue-600/40 bg-blue-500/10 hover:bg-blue-700/10"
|
||||
icon={<Globe className="icon-md" />}
|
||||
/>
|
||||
<ApiKeyDialog
|
||||
onSubmit={onSubmit}
|
||||
authTypes={authTypes}
|
||||
isOpen={isDialogOpen}
|
||||
triggerRef={triggerRef}
|
||||
register={methods.register}
|
||||
onRevoke={handleRevokeApiKey}
|
||||
onOpenChange={setIsDialogOpen}
|
||||
handleSubmit={methods.handleSubmit}
|
||||
isToolAuthenticated={isAuthenticated}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ export function filterModels(
|
||||
let modelName = modelId;
|
||||
|
||||
if (isAgentsEndpoint(endpoint.value) && agentsMap && agentsMap[modelId]) {
|
||||
modelName = agentsMap[modelId].name || modelId;
|
||||
modelName = agentsMap[modelId]?.name || modelId;
|
||||
} else if (
|
||||
isAssistantsEndpoint(endpoint.value) &&
|
||||
assistantsMap &&
|
||||
|
||||
@@ -1,14 +1,33 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { X, ArrowDownToLine, PanelLeftOpen, PanelLeftClose } from 'lucide-react';
|
||||
import { useState, useEffect, useCallback, useRef } from 'react';
|
||||
import { X, ArrowDownToLine, PanelLeftOpen, PanelLeftClose, RotateCcw } from 'lucide-react';
|
||||
import { Button, OGDialog, OGDialogContent, TooltipAnchor } from '~/components';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
const getQualityStyles = (quality: string): string => {
|
||||
if (quality === 'high') {
|
||||
return 'bg-green-100 text-green-800';
|
||||
}
|
||||
if (quality === 'low') {
|
||||
return 'bg-orange-100 text-orange-800';
|
||||
}
|
||||
return 'bg-gray-100 text-gray-800';
|
||||
};
|
||||
|
||||
export default function DialogImage({ isOpen, onOpenChange, src = '', downloadImage, args }) {
|
||||
const localize = useLocalize();
|
||||
const [isPromptOpen, setIsPromptOpen] = useState(false);
|
||||
const [imageSize, setImageSize] = useState<string | null>(null);
|
||||
|
||||
const getImageSize = async (url: string) => {
|
||||
// Zoom and pan state
|
||||
const [zoom, setZoom] = useState(1);
|
||||
const [panX, setPanX] = useState(0);
|
||||
const [panY, setPanY] = useState(0);
|
||||
const [isDragging, setIsDragging] = useState(false);
|
||||
const [dragStart, setDragStart] = useState({ x: 0, y: 0 });
|
||||
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const getImageSize = useCallback(async (url: string) => {
|
||||
try {
|
||||
const response = await fetch(url, { method: 'HEAD' });
|
||||
const contentLength = response.headers.get('Content-Length');
|
||||
@@ -25,7 +44,7 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm
|
||||
console.error('Error getting image size:', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const formatFileSize = (bytes: number): string => {
|
||||
if (bytes === 0) return '0 Bytes';
|
||||
@@ -37,11 +56,129 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
};
|
||||
|
||||
const getImageMaxWidth = () => {
|
||||
// On mobile (when panel overlays), use full width minus padding
|
||||
// On desktop, account for the side panel width
|
||||
if (isPromptOpen) {
|
||||
return window.innerWidth >= 640 ? 'calc(100vw - 22rem)' : 'calc(100vw - 2rem)';
|
||||
}
|
||||
return 'calc(100vw - 2rem)';
|
||||
};
|
||||
|
||||
const resetZoom = useCallback(() => {
|
||||
setZoom(1);
|
||||
setPanX(0);
|
||||
setPanY(0);
|
||||
}, []);
|
||||
|
||||
const getCursor = () => {
|
||||
if (zoom <= 1) return 'default';
|
||||
return isDragging ? 'grabbing' : 'grab';
|
||||
};
|
||||
|
||||
const handleDoubleClick = useCallback(() => {
|
||||
if (zoom > 1) {
|
||||
resetZoom();
|
||||
} else {
|
||||
// Zoom in to 2x on double click when at normal zoom
|
||||
setZoom(2);
|
||||
}
|
||||
}, [zoom, resetZoom]);
|
||||
|
||||
const handleWheel = useCallback(
|
||||
(e: React.WheelEvent<HTMLDivElement>) => {
|
||||
e.preventDefault();
|
||||
if (!containerRef.current) return;
|
||||
|
||||
const rect = containerRef.current.getBoundingClientRect();
|
||||
const mouseX = e.clientX - rect.left;
|
||||
const mouseY = e.clientY - rect.top;
|
||||
|
||||
// Calculate zoom factor
|
||||
const zoomFactor = e.deltaY > 0 ? 0.9 : 1.1;
|
||||
const newZoom = Math.min(Math.max(zoom * zoomFactor, 1), 5);
|
||||
|
||||
if (newZoom === zoom) return;
|
||||
|
||||
// If zooming back to 1, reset pan to center the image
|
||||
if (newZoom === 1) {
|
||||
setZoom(1);
|
||||
setPanX(0);
|
||||
setPanY(0);
|
||||
return;
|
||||
}
|
||||
|
||||
// Calculate the zoom center relative to the current viewport
|
||||
const containerCenterX = rect.width / 2;
|
||||
const containerCenterY = rect.height / 2;
|
||||
|
||||
// Calculate new pan position to zoom towards mouse cursor
|
||||
const zoomRatio = newZoom / zoom;
|
||||
const deltaX = (mouseX - containerCenterX - panX) * (zoomRatio - 1);
|
||||
const deltaY = (mouseY - containerCenterY - panY) * (zoomRatio - 1);
|
||||
|
||||
setZoom(newZoom);
|
||||
setPanX(panX - deltaX);
|
||||
setPanY(panY - deltaY);
|
||||
},
|
||||
[zoom, panX, panY],
|
||||
);
|
||||
|
||||
const handleMouseDown = useCallback(
|
||||
(e: React.MouseEvent<HTMLDivElement>) => {
|
||||
e.preventDefault();
|
||||
if (zoom <= 1) return;
|
||||
setIsDragging(true);
|
||||
setDragStart({
|
||||
x: e.clientX - panX,
|
||||
y: e.clientY - panY,
|
||||
});
|
||||
},
|
||||
[zoom, panX, panY],
|
||||
);
|
||||
|
||||
const handleMouseMove = useCallback(
|
||||
(e: React.MouseEvent<HTMLDivElement>) => {
|
||||
if (!isDragging || zoom <= 1) return;
|
||||
const newPanX = e.clientX - dragStart.x;
|
||||
const newPanY = e.clientY - dragStart.y;
|
||||
setPanX(newPanX);
|
||||
setPanY(newPanY);
|
||||
},
|
||||
[isDragging, dragStart, zoom],
|
||||
);
|
||||
const handleMouseUp = useCallback(() => {
|
||||
setIsDragging(false);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const onKey = (e: KeyboardEvent) => e.key === 'Escape' && resetZoom();
|
||||
document.addEventListener('keydown', onKey);
|
||||
return () => document.removeEventListener('keydown', onKey);
|
||||
}, [resetZoom]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen && src) {
|
||||
getImageSize(src).then(setImageSize);
|
||||
resetZoom();
|
||||
}
|
||||
}, [isOpen, src]);
|
||||
}, [isOpen, src, getImageSize, resetZoom]);
|
||||
|
||||
// Ensure image is centered when zoom changes to 1
|
||||
useEffect(() => {
|
||||
if (zoom === 1) {
|
||||
setPanX(0);
|
||||
setPanY(0);
|
||||
}
|
||||
}, [zoom]);
|
||||
|
||||
// Reset pan when panel opens/closes to maintain centering
|
||||
useEffect(() => {
|
||||
if (zoom === 1) {
|
||||
setPanX(0);
|
||||
setPanY(0);
|
||||
}
|
||||
}, [isPromptOpen, zoom]);
|
||||
|
||||
return (
|
||||
<OGDialog open={isOpen} onOpenChange={onOpenChange}>
|
||||
@@ -52,7 +189,7 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm
|
||||
overlayClassName="bg-surface-primary opacity-95 z-50"
|
||||
>
|
||||
<div
|
||||
className={`absolute left-0 top-0 z-10 flex items-center justify-between p-4 transition-all duration-500 ease-in-out ${isPromptOpen ? 'right-80' : 'right-0'}`}
|
||||
className={`ease-[cubic-bezier(0.175,0.885,0.32,1.275)] absolute left-0 top-0 z-10 flex items-center justify-between p-3 transition-all duration-500 sm:p-4 ${isPromptOpen ? 'right-0 sm:right-80' : 'right-0'}`}
|
||||
>
|
||||
<TooltipAnchor
|
||||
description={localize('com_ui_close')}
|
||||
@@ -62,11 +199,21 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm
|
||||
variant="ghost"
|
||||
className="h-10 w-10 p-0 hover:bg-surface-hover"
|
||||
>
|
||||
<X className="size-6" />
|
||||
<X className="size-7 sm:size-6" />
|
||||
</Button>
|
||||
}
|
||||
/>
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="flex items-center gap-1 sm:gap-2">
|
||||
{zoom > 1 && (
|
||||
<TooltipAnchor
|
||||
description={localize('com_ui_reset_zoom')}
|
||||
render={
|
||||
<Button onClick={resetZoom} variant="ghost" className="h-10 w-10 p-0">
|
||||
<RotateCcw className="size-6" />
|
||||
</Button>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
<TooltipAnchor
|
||||
description={localize('com_ui_download')}
|
||||
render={
|
||||
@@ -88,9 +235,9 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm
|
||||
className="h-10 w-10 p-0"
|
||||
>
|
||||
{isPromptOpen ? (
|
||||
<PanelLeftOpen className="size-6" />
|
||||
<PanelLeftOpen className="size-7 sm:size-6" />
|
||||
) : (
|
||||
<PanelLeftClose className="size-6" />
|
||||
<PanelLeftClose className="size-7 sm:size-6" />
|
||||
)}
|
||||
</Button>
|
||||
}
|
||||
@@ -100,36 +247,81 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm
|
||||
|
||||
{/* Main content area with image */}
|
||||
<div
|
||||
className={`flex h-full transition-all duration-500 ease-in-out ${isPromptOpen ? 'mr-80' : 'mr-0'}`}
|
||||
className={`ease-[cubic-bezier(0.175,0.885,0.32,1.275)] flex h-full transition-all duration-500 ${isPromptOpen ? 'mr-0 sm:mr-80' : 'mr-0'}`}
|
||||
>
|
||||
<div className="flex flex-1 items-center justify-center px-4 pb-4 pt-20">
|
||||
<img
|
||||
src={src}
|
||||
alt="Image"
|
||||
className="max-h-full max-w-full object-contain"
|
||||
<div
|
||||
ref={containerRef}
|
||||
className="flex flex-1 items-center justify-center px-2 pb-4 pt-16 sm:px-4 sm:pt-20"
|
||||
onWheel={handleWheel}
|
||||
onMouseDown={handleMouseDown}
|
||||
onMouseMove={handleMouseMove}
|
||||
onMouseUp={handleMouseUp}
|
||||
onMouseLeave={handleMouseUp}
|
||||
onDoubleClick={handleDoubleClick}
|
||||
style={{
|
||||
cursor: getCursor(),
|
||||
overflow: zoom > 1 ? 'hidden' : 'visible',
|
||||
minHeight: 0, // Allow flexbox to shrink
|
||||
}}
|
||||
>
|
||||
<div
|
||||
className="flex items-center justify-center transition-transform duration-100 ease-out"
|
||||
style={{
|
||||
maxHeight: 'calc(100vh - 6rem)',
|
||||
maxWidth: '100%',
|
||||
transform: `translate(${panX}px, ${panY}px) scale(${zoom})`,
|
||||
transformOrigin: 'center center',
|
||||
width: '100%',
|
||||
height: '100%',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
}}
|
||||
/>
|
||||
>
|
||||
<img
|
||||
src={src}
|
||||
alt="Image"
|
||||
className="block object-contain"
|
||||
style={{
|
||||
maxHeight: 'calc(100vh - 8rem)',
|
||||
maxWidth: getImageMaxWidth(),
|
||||
width: 'auto',
|
||||
height: 'auto',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Side Panel */}
|
||||
<div
|
||||
className={`shadow-l-lg fixed right-0 top-0 z-20 h-full w-80 transform rounded-l-2xl border-l border-border-light bg-surface-primary transition-transform duration-500 ease-in-out ${
|
||||
className={`sm:shadow-l-lg ease-[cubic-bezier(0.175,0.885,0.32,1.275)] fixed right-0 top-0 z-20 h-full w-full transform border-l border-border-light bg-surface-primary shadow-2xl backdrop-blur-sm transition-transform duration-500 sm:w-80 sm:rounded-l-2xl ${
|
||||
isPromptOpen ? 'translate-x-0' : 'translate-x-full'
|
||||
}`}
|
||||
>
|
||||
<div className="h-full overflow-y-auto p-6">
|
||||
<div className="mb-4">
|
||||
{/* Mobile pull handle - removed for cleaner look */}
|
||||
|
||||
<div className="h-full overflow-y-auto p-4 sm:p-6">
|
||||
{/* Mobile close button */}
|
||||
<div className="mb-4 flex items-center justify-between sm:hidden">
|
||||
<h3 className="text-lg font-semibold text-text-primary">
|
||||
{localize('com_ui_image_details')}
|
||||
</h3>
|
||||
<Button
|
||||
onClick={() => setIsPromptOpen(false)}
|
||||
variant="ghost"
|
||||
className="h-12 w-12 p-0"
|
||||
>
|
||||
<X className="size-6" />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="mb-4 hidden sm:block">
|
||||
<h3 className="mb-2 text-lg font-semibold text-text-primary">
|
||||
{localize('com_ui_image_details')}
|
||||
</h3>
|
||||
<div className="mb-4 h-px bg-border-medium"></div>
|
||||
</div>
|
||||
|
||||
<div className="space-y-6">
|
||||
<div className="space-y-4 sm:space-y-6">
|
||||
{/* Prompt Section */}
|
||||
<div>
|
||||
<h4 className="mb-2 text-sm font-medium text-text-primary">
|
||||
@@ -157,13 +349,7 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm text-text-primary">{localize('com_ui_quality')}:</span>
|
||||
<span
|
||||
className={`rounded px-2 py-1 text-xs font-medium capitalize ${
|
||||
args?.quality === 'high'
|
||||
? 'bg-green-100 text-green-800'
|
||||
: args?.quality === 'low'
|
||||
? 'bg-orange-100 text-orange-800'
|
||||
: 'bg-gray-100 text-gray-800'
|
||||
}`}
|
||||
className={`rounded px-2 py-1 text-xs font-medium capitalize ${getQualityStyles(args?.quality || '')}`}
|
||||
>
|
||||
{args?.quality || 'Standard'}
|
||||
</span>
|
||||
|
||||
@@ -12,7 +12,6 @@ import {
|
||||
InputNumber,
|
||||
SelectDropDown,
|
||||
HoverCardTrigger,
|
||||
MultiSelectDropDown,
|
||||
} from '~/components/ui';
|
||||
import {
|
||||
removeFocusOutlines,
|
||||
@@ -25,6 +24,7 @@ import {
|
||||
} from '~/utils';
|
||||
import OptionHoverAlt from '~/components/SidePanel/Parameters/OptionHover';
|
||||
import { useLocalize, useDebouncedInput } from '~/hooks';
|
||||
import MultiSelectDropDown from '~/components/Input/ModelSelect/MultiSelectDropDown';
|
||||
import OptionHover from './OptionHover';
|
||||
import { ESide } from '~/common';
|
||||
import store from '~/store';
|
||||
@@ -165,7 +165,7 @@ export default function Settings({
|
||||
)}
|
||||
className={cn(
|
||||
defaultTextProps,
|
||||
'flex max-h-[138px] min-h-[100px] w-full resize-none px-3 py-2 ',
|
||||
'flex max-h-[138px] min-h-[100px] w-full resize-none px-3 py-2',
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import React, { useState, useRef } from 'react';
|
||||
import { Wrench, ArrowRight } from 'lucide-react';
|
||||
import {
|
||||
Listbox,
|
||||
ListboxButton,
|
||||
@@ -7,12 +8,11 @@ import {
|
||||
ListboxOption,
|
||||
Transition,
|
||||
} from '@headlessui/react';
|
||||
import { Wrench, ArrowRight } from 'lucide-react';
|
||||
import { CheckMark } from '~/components/svg';
|
||||
import useOnClickOutside from '~/hooks/useOnClickOutside';
|
||||
import { useMultiSearch } from './MultiSearch';
|
||||
import { cn } from '~/utils/';
|
||||
import type { TPlugin } from 'librechat-data-provider';
|
||||
import { useMultiSearch } from '~/components/MultiSearch';
|
||||
import { useOnClickOutside } from '~/hooks';
|
||||
import { CheckMark } from '~/svgs';
|
||||
import { cn } from '~/utils/';
|
||||
|
||||
export type TMultiSelectDropDownProps = {
|
||||
title?: string;
|
||||
@@ -142,7 +142,7 @@ function MultiSelectDropDown({
|
||||
viewBox="0 0 24 24"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
className="h-4 w-4 text-gray-400"
|
||||
className="h-4 w-4 text-gray-400"
|
||||
height="1em"
|
||||
width="1em"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
@@ -2,7 +2,7 @@ import { Wrench } from 'lucide-react';
|
||||
import { Root, Trigger, Content, Portal } from '@radix-ui/react-popover';
|
||||
import type { TPlugin } from 'librechat-data-provider';
|
||||
import MenuItem from '~/components/Chat/Menus/UI/MenuItem';
|
||||
import { useMultiSearch } from './MultiSearch';
|
||||
import { useMultiSearch } from '~/components';
|
||||
import { cn } from '~/utils/';
|
||||
|
||||
type SelectDropDownProps = {
|
||||
@@ -32,8 +32,6 @@ function MultiSelectPop({
|
||||
optionValueKey = 'value',
|
||||
searchPlaceholder,
|
||||
}: SelectDropDownProps) {
|
||||
// const localize = useLocalize();
|
||||
|
||||
const title = _title;
|
||||
const excludeIds = ['select-plugin', 'plugins-label', 'selected-plugins'];
|
||||
|
||||
@@ -54,14 +52,14 @@ function MultiSelectPop({
|
||||
<button
|
||||
data-testid="select-dropdown-button"
|
||||
className={cn(
|
||||
'relative flex flex-col rounded-md border border-black/10 bg-white py-2 pl-3 pr-10 text-left focus:outline-none focus:ring-0 focus:ring-offset-0 dark:border-gray-700 dark:bg-gray-800 dark:bg-gray-800 sm:text-sm',
|
||||
'relative flex flex-col rounded-md border border-black/10 bg-white py-2 pl-3 pr-10 text-left focus:outline-none focus:ring-0 focus:ring-offset-0 dark:border-gray-700 dark:bg-gray-800 sm:text-sm',
|
||||
'pointer-cursor font-normal',
|
||||
'hover:bg-gray-50 radix-state-open:bg-gray-50 dark:hover:bg-gray-700 dark:radix-state-open:bg-gray-700',
|
||||
)}
|
||||
>
|
||||
{' '}
|
||||
{showLabel && (
|
||||
<label className="block text-xs text-gray-700 dark:text-gray-500 ">{title}</label>
|
||||
<label className="block text-xs text-gray-700 dark:text-gray-500">{title}</label>
|
||||
)}
|
||||
<span className="inline-flex" id={excludeIds[2]}>
|
||||
<span
|
||||
@@ -73,7 +71,7 @@ function MultiSelectPop({
|
||||
{/* {!showLabel && title.length > 0 && (
|
||||
<span className="text-xs text-gray-700 dark:text-gray-500">{title}:</span>
|
||||
)} */}
|
||||
<span className="flex items-center gap-1 ">
|
||||
<span className="flex items-center gap-1">
|
||||
<div className="flex gap-1">
|
||||
{value.length === 0 && 'None selected'}
|
||||
{value.map((v, i) => (
|
||||
@@ -98,7 +96,7 @@ function MultiSelectPop({
|
||||
viewBox="0 0 24 24"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
className="h-4 w-4 text-gray-400"
|
||||
className="h-4 w-4 text-gray-400"
|
||||
height="1em"
|
||||
width="1em"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
@@ -4,15 +4,10 @@ import { useState, useEffect, useMemo } from 'react';
|
||||
import { useAvailablePluginsQuery } from 'librechat-data-provider/react-query';
|
||||
import type { TPlugin } from 'librechat-data-provider';
|
||||
import type { TModelSelectProps } from '~/common';
|
||||
import {
|
||||
Button,
|
||||
MultiSelectPop,
|
||||
SelectDropDown,
|
||||
SelectDropDownPop,
|
||||
MultiSelectDropDown,
|
||||
} from '~/components/ui';
|
||||
import { Button, SelectDropDown, SelectDropDownPop, MultiSelectDropDown } from '~/components/ui';
|
||||
import { useSetIndexOptions, useAuthContext, useMediaQuery, useLocalize } from '~/hooks';
|
||||
import { cn, cardStyle, selectPlugins, processPlugins } from '~/utils';
|
||||
import MultiSelectPop from './MultiSelectPop';
|
||||
import store from '~/store';
|
||||
|
||||
export default function PluginsByIndex({
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import React from 'react';
|
||||
import { Root, Trigger, Content, Portal } from '@radix-ui/react-popover';
|
||||
import MenuItem from '~/components/Chat/Menus/UI/MenuItem';
|
||||
import { useMultiSearch } from '~/components';
|
||||
import type { Option } from '~/common';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { cn } from '~/utils/';
|
||||
import { useMultiSearch } from './MultiSearch';
|
||||
|
||||
type SelectDropDownProps = {
|
||||
id?: string;
|
||||
@@ -1,26 +1,28 @@
|
||||
import { useCallback, useState, useMemo, useEffect } from 'react';
|
||||
import { Link } from 'react-router-dom';
|
||||
import debounce from 'lodash/debounce';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { TrashIcon, MessageSquare, ArrowUpDown, ArrowUp, ArrowDown } from 'lucide-react';
|
||||
import type { SharedLinkItem, SharedLinksListParams } from 'librechat-data-provider';
|
||||
import {
|
||||
OGDialog,
|
||||
OGDialogTemplate,
|
||||
OGDialogTrigger,
|
||||
OGDialogContent,
|
||||
OGDialogHeader,
|
||||
OGDialogTitle,
|
||||
TooltipAnchor,
|
||||
DataTable,
|
||||
Spinner,
|
||||
Button,
|
||||
Label,
|
||||
Spinner,
|
||||
} from '~/components';
|
||||
import { useDeleteSharedLinkMutation, useSharedLinksQuery } from '~/data-provider';
|
||||
import OGDialogTemplate from '~/components/ui/OGDialogTemplate';
|
||||
import { useLocalize, useMediaQuery } from '~/hooks';
|
||||
import DataTable from '~/components/ui/DataTable';
|
||||
import { NotificationSeverity } from '~/common';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import { formatDate } from '~/utils';
|
||||
import store from '~/store';
|
||||
|
||||
const PAGE_SIZE = 25;
|
||||
|
||||
@@ -36,6 +38,7 @@ export default function SharedLinks() {
|
||||
const localize = useLocalize();
|
||||
const { showToast } = useToastContext();
|
||||
const isSmallScreen = useMediaQuery('(max-width: 768px)');
|
||||
const isSearchEnabled = useRecoilValue(store.search);
|
||||
const [queryParams, setQueryParams] = useState<SharedLinksListParams>(DEFAULT_PARAMS);
|
||||
const [deleteRow, setDeleteRow] = useState<SharedLinkItem | null>(null);
|
||||
const [isDeleteOpen, setIsDeleteOpen] = useState(false);
|
||||
@@ -308,6 +311,7 @@ export default function SharedLinks() {
|
||||
onFilterChange={debouncedFilterChange}
|
||||
filterValue={queryParams.search}
|
||||
isLoading={isLoading}
|
||||
enableSearch={isSearchEnabled}
|
||||
/>
|
||||
</OGDialogContent>
|
||||
</OGDialog>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useState, useCallback, useMemo, useEffect } from 'react';
|
||||
import debounce from 'lodash/debounce';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { TrashIcon, ArchiveRestore, ArrowUp, ArrowDown, ArrowUpDown } from 'lucide-react';
|
||||
import type { ConversationListParams, TConversation } from 'librechat-data-provider';
|
||||
import {
|
||||
@@ -11,6 +12,7 @@ import {
|
||||
Label,
|
||||
TooltipAnchor,
|
||||
Spinner,
|
||||
DataTable,
|
||||
} from '~/components';
|
||||
import {
|
||||
useArchiveConvoMutation,
|
||||
@@ -19,10 +21,10 @@ import {
|
||||
} from '~/data-provider';
|
||||
import { useLocalize, useMediaQuery } from '~/hooks';
|
||||
import { MinimalIcon } from '~/components/Endpoints';
|
||||
import DataTable from '~/components/ui/DataTable';
|
||||
import { NotificationSeverity } from '~/common';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import { formatDate } from '~/utils';
|
||||
import store from '~/store';
|
||||
|
||||
const DEFAULT_PARAMS: ConversationListParams = {
|
||||
isArchived: true,
|
||||
@@ -39,7 +41,7 @@ export default function ArchivedChatsTable({
|
||||
const localize = useLocalize();
|
||||
const isSmallScreen = useMediaQuery('(max-width: 768px)');
|
||||
const { showToast } = useToastContext();
|
||||
|
||||
const isSearchEnabled = useRecoilValue(store.search);
|
||||
const [isDeleteOpen, setIsDeleteOpen] = useState(false);
|
||||
const [queryParams, setQueryParams] = useState<ConversationListParams>(DEFAULT_PARAMS);
|
||||
const [deleteConversation, setDeleteConversation] = useState<TConversation | null>(null);
|
||||
@@ -272,6 +274,7 @@ export default function ArchivedChatsTable({
|
||||
isFetchingNextPage={isFetchingNextPage}
|
||||
isLoading={isLoading}
|
||||
showCheckboxes={false}
|
||||
enableSearch={isSearchEnabled}
|
||||
/>
|
||||
|
||||
<OGDialog open={isDeleteOpen} onOpenChange={onOpenChange}>
|
||||
|
||||
@@ -15,6 +15,7 @@ export default function ApiKeyDialog({
|
||||
register,
|
||||
handleSubmit,
|
||||
triggerRef,
|
||||
triggerRefs,
|
||||
}: {
|
||||
isOpen: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
@@ -24,7 +25,8 @@ export default function ApiKeyDialog({
|
||||
isToolAuthenticated: boolean;
|
||||
register: UseFormRegister<ApiKeyFormData>;
|
||||
handleSubmit: UseFormHandleSubmit<ApiKeyFormData>;
|
||||
triggerRef?: RefObject<HTMLInputElement>;
|
||||
triggerRef?: RefObject<HTMLInputElement | HTMLButtonElement>;
|
||||
triggerRefs?: RefObject<HTMLInputElement | HTMLButtonElement>[];
|
||||
}) {
|
||||
const localize = useLocalize();
|
||||
const languageIcons = [
|
||||
@@ -41,7 +43,12 @@ export default function ApiKeyDialog({
|
||||
];
|
||||
|
||||
return (
|
||||
<OGDialog open={isOpen} onOpenChange={onOpenChange} triggerRef={triggerRef}>
|
||||
<OGDialog
|
||||
open={isOpen}
|
||||
onOpenChange={onOpenChange}
|
||||
triggerRef={triggerRef}
|
||||
triggerRefs={triggerRefs}
|
||||
>
|
||||
<OGDialogTemplate
|
||||
className="w-11/12 sm:w-[450px]"
|
||||
title=""
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useFormContext, Controller } from 'react-hook-form';
|
||||
import { MCP } from 'librechat-data-provider/dist/types/types/assistants';
|
||||
import type { MCP } from 'librechat-data-provider';
|
||||
import MCPAuth from '~/components/SidePanel/Builder/MCPAuth';
|
||||
import MCPIcon from '~/components/SidePanel/Agents/MCPIcon';
|
||||
import { Label, Checkbox } from '~/components/ui';
|
||||
|
||||
@@ -21,6 +21,7 @@ export default function ApiKeyDialog({
|
||||
register,
|
||||
handleSubmit,
|
||||
triggerRef,
|
||||
triggerRefs,
|
||||
}: {
|
||||
isOpen: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
@@ -30,7 +31,8 @@ export default function ApiKeyDialog({
|
||||
isToolAuthenticated: boolean;
|
||||
register: UseFormRegister<SearchApiKeyFormData>;
|
||||
handleSubmit: UseFormHandleSubmit<SearchApiKeyFormData>;
|
||||
triggerRef?: React.RefObject<HTMLInputElement>;
|
||||
triggerRef?: React.RefObject<HTMLInputElement | HTMLButtonElement>;
|
||||
triggerRefs?: React.RefObject<HTMLInputElement | HTMLButtonElement>[];
|
||||
}) {
|
||||
const localize = useLocalize();
|
||||
const { data: config } = useGetStartupConfig();
|
||||
@@ -181,7 +183,12 @@ export default function ApiKeyDialog({
|
||||
}
|
||||
|
||||
return (
|
||||
<OGDialog open={isOpen} onOpenChange={onOpenChange} triggerRef={triggerRef}>
|
||||
<OGDialog
|
||||
open={isOpen}
|
||||
onOpenChange={onOpenChange}
|
||||
triggerRef={triggerRef}
|
||||
triggerRefs={triggerRefs}
|
||||
>
|
||||
<OGDialogTemplate
|
||||
className="w-11/12 sm:w-[500px]"
|
||||
title=""
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
export default function MCPIcon() {
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="24"
|
||||
height="24"
|
||||
fill="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
className="h-4 w-4"
|
||||
>
|
||||
<path d="M11.016 2.099a3.998 3.998 0 0 1 5.58.072l.073.074a3.991 3.991 0 0 1 1.058 3.318 3.994 3.994 0 0 1 3.32 1.06l.073.071.048.047.071.075a3.998 3.998 0 0 1 0 5.506l-.071.074-8.183 8.182-.034.042a.267.267 0 0 0 .034.335l1.68 1.68a.8.8 0 0 1-1.131 1.13l-1.68-1.679a1.866 1.866 0 0 1-.034-2.604l8.26-8.261a2.4 2.4 0 0 0-.044-3.349l-.047-.047-.044-.043a2.4 2.4 0 0 0-3.349.043l-6.832 6.832-.03.029a.8.8 0 0 1-1.1-1.16l6.876-6.875a2.4 2.4 0 0 0-.044-3.35l-.179-.161a2.399 2.399 0 0 0-3.169.119l-.045.043-9.047 9.047-.03.028a.8.8 0 0 1-1.1-1.16l9.046-9.046.074-.072Z" />
|
||||
<path d="M13.234 4.404a.8.8 0 0 1 1.1 1.16l-6.69 6.691a2.399 2.399 0 1 0 3.393 3.393l6.691-6.692a.8.8 0 0 1 1.131 1.131l-6.691 6.692a4 4 0 0 1-5.581.07l-.073-.07a3.998 3.998 0 0 1 0-5.655l6.69-6.691.03-.029Z" />
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
@@ -1,111 +0,0 @@
|
||||
import React from 'react';
|
||||
import { Search } from 'lucide-react';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
const AnimatedSearchInput = ({
|
||||
value,
|
||||
onChange,
|
||||
isSearching: searching,
|
||||
placeholder,
|
||||
}: {
|
||||
value?: string;
|
||||
onChange: (e: React.ChangeEvent<HTMLInputElement>) => void;
|
||||
isSearching?: boolean;
|
||||
placeholder: string;
|
||||
}) => {
|
||||
const isSearching = searching === true;
|
||||
const hasValue = value != null && value.length > 0;
|
||||
|
||||
return (
|
||||
<div className="relative w-full">
|
||||
<div className="relative rounded-lg transition-all duration-500 ease-in-out">
|
||||
<div className="relative">
|
||||
{/* Icon on the left */}
|
||||
<div className="absolute left-3 top-1/2 z-50 -translate-y-1/2">
|
||||
<Search
|
||||
className={cn(
|
||||
`
|
||||
h-4 w-4 transition-all duration-500 ease-in-out`,
|
||||
isSearching && hasValue ? 'text-blue-400' : 'text-gray-400',
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Input field */}
|
||||
<input
|
||||
type="text"
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
placeholder={placeholder}
|
||||
className={`
|
||||
peer relative z-20 w-full rounded-lg bg-surface-secondary px-10
|
||||
py-2 outline-none ring-0 backdrop-blur-sm transition-all
|
||||
duration-500 ease-in-out placeholder:text-gray-400
|
||||
focus:outline-none focus:ring-0
|
||||
`}
|
||||
/>
|
||||
|
||||
{/* Gradient overlay */}
|
||||
<div
|
||||
className={`
|
||||
pointer-events-none absolute inset-0 z-20 rounded-lg
|
||||
bg-gradient-to-r from-blue-500/20 via-purple-500/20 to-blue-500/20
|
||||
transition-all duration-500 ease-in-out
|
||||
${isSearching && hasValue ? 'opacity-100 blur-sm' : 'opacity-0 blur-none'}
|
||||
`}
|
||||
/>
|
||||
|
||||
{/* Animated loading indicator */}
|
||||
<div
|
||||
className={`
|
||||
absolute right-3 top-1/2 z-20 -translate-y-1/2
|
||||
transition-all duration-500 ease-in-out
|
||||
${isSearching && hasValue ? 'scale-100 opacity-100' : 'scale-0 opacity-0'}
|
||||
`}
|
||||
>
|
||||
<div className="relative h-2 w-2">
|
||||
<div className="absolute inset-0 animate-ping rounded-full bg-blue-500/60" />
|
||||
<div className="absolute inset-0 rounded-full bg-blue-500" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Outer glow effect */}
|
||||
<div
|
||||
className={`
|
||||
absolute -inset-8 -z-10
|
||||
transition-all duration-700 ease-in-out
|
||||
${isSearching && hasValue ? 'scale-105 opacity-100' : 'scale-100 opacity-0'}
|
||||
`}
|
||||
>
|
||||
<div className="absolute inset-0">
|
||||
<div
|
||||
className={`
|
||||
bg-gradient-radial absolute inset-0 from-blue-500/10 to-transparent
|
||||
transition-opacity duration-700 ease-in-out
|
||||
${isSearching && hasValue ? 'animate-pulse-slow opacity-100' : 'opacity-0'}
|
||||
`}
|
||||
/>
|
||||
<div
|
||||
className={`
|
||||
absolute inset-0 bg-gradient-to-r from-purple-500/5 via-blue-500/5 to-purple-500/5
|
||||
blur-xl transition-all duration-700 ease-in-out
|
||||
${isSearching && hasValue ? 'animate-gradient-x opacity-100' : 'opacity-0'}
|
||||
`}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className={`
|
||||
absolute inset-0 -z-20 scale-100 bg-gradient-to-r from-blue-500/10
|
||||
via-purple-500/10 to-blue-500/10 opacity-0 blur-xl
|
||||
transition-all duration-500 ease-in-out
|
||||
peer-focus:scale-105 peer-focus:opacity-100
|
||||
`}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default AnimatedSearchInput;
|
||||
@@ -1,5 +0,0 @@
|
||||
import { useDelayedRender } from '~/hooks';
|
||||
|
||||
const DelayedRender = ({ delay, children }) => useDelayedRender(delay)(() => children);
|
||||
|
||||
export default DelayedRender;
|
||||
@@ -1,26 +0,0 @@
|
||||
import * as React from 'react';
|
||||
import * as SliderPrimitive from '@radix-ui/react-slider';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
const Slider = React.forwardRef<
|
||||
React.ElementRef<typeof SliderPrimitive.Root>,
|
||||
React.ComponentPropsWithoutRef<typeof SliderPrimitive.Root> & { onDoubleClick?: () => void }
|
||||
>(({ className, onDoubleClick, ...props }, ref) => (
|
||||
<SliderPrimitive.Root
|
||||
ref={ref}
|
||||
className={cn(
|
||||
'relative flex w-full cursor-pointer touch-none select-none items-center',
|
||||
className,
|
||||
)}
|
||||
onDoubleClick={onDoubleClick}
|
||||
{...props}
|
||||
>
|
||||
<SliderPrimitive.Track className="relative h-2 w-full grow overflow-hidden rounded-full bg-secondary">
|
||||
<SliderPrimitive.Range className="absolute h-full bg-primary" />
|
||||
</SliderPrimitive.Track>
|
||||
<SliderPrimitive.Thumb className="block h-5 w-5 rounded-full border-2 border-primary bg-background ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50" />
|
||||
</SliderPrimitive.Root>
|
||||
));
|
||||
Slider.displayName = SliderPrimitive.Root.displayName;
|
||||
|
||||
export { Slider };
|
||||
@@ -4,7 +4,7 @@ import MarkdownLite from '~/components/Chat/Messages/Content/MarkdownLite';
|
||||
import DialogTemplate from '~/components/ui/DialogTemplate';
|
||||
import { useAcceptTermsMutation } from '~/data-provider';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import { OGDialog } from '~/components/ui';
|
||||
import { OGDialog } from '~/components';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
const TermsAndConditionsModal = ({
|
||||
@@ -73,7 +73,7 @@ const TermsAndConditionsModal = ({
|
||||
main={
|
||||
<section
|
||||
// Motivation: This is a dialog, so its content should be focusable
|
||||
// eslint-disable-next-line jsx-a11y/no-noninteractive-tabindex
|
||||
|
||||
tabIndex={0}
|
||||
className="max-h-[60vh] overflow-y-auto p-4"
|
||||
aria-label={localize('com_ui_terms_and_conditions')}
|
||||
|
||||
84
client/src/hooks/Files/useClientResize.ts
Normal file
84
client/src/hooks/Files/useClientResize.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import { mergeFileConfig } from 'librechat-data-provider';
|
||||
import { useCallback } from 'react';
|
||||
import { useGetFileConfig } from '~/data-provider';
|
||||
import {
|
||||
resizeImage,
|
||||
shouldResizeImage,
|
||||
supportsClientResize,
|
||||
type ResizeOptions,
|
||||
type ResizeResult,
|
||||
} from '~/utils/imageResize';
|
||||
|
||||
/**
|
||||
* Hook for client-side image resizing functionality
|
||||
* Integrates with LibreChat's file configuration system
|
||||
*/
|
||||
export const useClientResize = () => {
|
||||
const { data: fileConfig = null } = useGetFileConfig({
|
||||
select: (data) => mergeFileConfig(data),
|
||||
});
|
||||
|
||||
// Safe access to clientImageResize config with fallbacks
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
const config = (fileConfig as any)?.clientImageResize ?? {
|
||||
enabled: false,
|
||||
maxWidth: 1900,
|
||||
maxHeight: 1900,
|
||||
quality: 0.92,
|
||||
};
|
||||
const isEnabled = config?.enabled ?? false;
|
||||
|
||||
/**
|
||||
* Resizes an image if client-side resizing is enabled and supported
|
||||
* @param file - The image file to resize
|
||||
* @param options - Optional resize options to override defaults
|
||||
* @returns Promise resolving to either the resized file result or original file
|
||||
*/
|
||||
const resizeImageIfNeeded = useCallback(
|
||||
async (
|
||||
file: File,
|
||||
options?: Partial<ResizeOptions>,
|
||||
): Promise<{ file: File; resized: boolean; result?: ResizeResult }> => {
|
||||
// Return original file if resizing is disabled
|
||||
if (!isEnabled) {
|
||||
return { file, resized: false };
|
||||
}
|
||||
|
||||
// Return original file if browser doesn't support resizing
|
||||
if (!supportsClientResize()) {
|
||||
console.warn('Client-side image resizing not supported in this browser');
|
||||
return { file, resized: false };
|
||||
}
|
||||
|
||||
// Return original file if it doesn't need resizing
|
||||
if (!shouldResizeImage(file)) {
|
||||
return { file, resized: false };
|
||||
}
|
||||
|
||||
try {
|
||||
const resizeOptions: Partial<ResizeOptions> = {
|
||||
maxWidth: config?.maxWidth,
|
||||
maxHeight: config?.maxHeight,
|
||||
quality: config?.quality,
|
||||
...options,
|
||||
};
|
||||
|
||||
const result = await resizeImage(file, resizeOptions);
|
||||
return { file: result.file, resized: true, result };
|
||||
} catch (error) {
|
||||
console.warn('Client-side image resizing failed:', error);
|
||||
return { file, resized: false };
|
||||
}
|
||||
},
|
||||
[isEnabled, config],
|
||||
);
|
||||
|
||||
return {
|
||||
isEnabled,
|
||||
isSupported: supportsClientResize(),
|
||||
config,
|
||||
resizeImageIfNeeded,
|
||||
};
|
||||
};
|
||||
|
||||
export default useClientResize;
|
||||
@@ -1,43 +1,46 @@
|
||||
import { useState, useMemo } from 'react';
|
||||
import { useDrop } from 'react-dnd';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { NativeTypes } from 'react-dnd-html5-backend';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { useRecoilValue, useSetRecoilState } from 'recoil';
|
||||
import {
|
||||
Constants,
|
||||
QueryKeys,
|
||||
Constants,
|
||||
EModelEndpoint,
|
||||
isAgentsEndpoint,
|
||||
isEphemeralAgent,
|
||||
EToolResources,
|
||||
AgentCapabilities,
|
||||
isAssistantsEndpoint,
|
||||
} from 'librechat-data-provider';
|
||||
import type * as t from 'librechat-data-provider';
|
||||
import type { DropTargetMonitor } from 'react-dnd';
|
||||
import useFileHandling from './useFileHandling';
|
||||
import type * as t from 'librechat-data-provider';
|
||||
import store, { ephemeralAgentByConvoId } from '~/store';
|
||||
import useFileHandling from './useFileHandling';
|
||||
|
||||
export default function useDragHelpers() {
|
||||
const queryClient = useQueryClient();
|
||||
const [showModal, setShowModal] = useState(false);
|
||||
const [draggedFiles, setDraggedFiles] = useState<File[]>([]);
|
||||
const conversation = useRecoilValue(store.conversationByIndex(0)) || undefined;
|
||||
const key = useMemo(
|
||||
() => conversation?.conversationId ?? Constants.NEW_CONVO,
|
||||
[conversation?.conversationId],
|
||||
const setEphemeralAgent = useSetRecoilState(
|
||||
ephemeralAgentByConvoId(conversation?.conversationId ?? Constants.NEW_CONVO),
|
||||
);
|
||||
const ephemeralAgent = useRecoilValue(ephemeralAgentByConvoId(key));
|
||||
|
||||
const handleOptionSelect = (toolResource: string | undefined) => {
|
||||
const handleOptionSelect = (toolResource: EToolResources | undefined) => {
|
||||
/** File search is not automatically enabled to simulate legacy behavior */
|
||||
if (toolResource && toolResource !== EToolResources.file_search) {
|
||||
setEphemeralAgent((prev) => ({
|
||||
...prev,
|
||||
[toolResource]: true,
|
||||
}));
|
||||
}
|
||||
handleFiles(draggedFiles, toolResource);
|
||||
setShowModal(false);
|
||||
setDraggedFiles([]);
|
||||
};
|
||||
|
||||
const isAgents = useMemo(
|
||||
() =>
|
||||
isAgentsEndpoint(conversation?.endpoint) ||
|
||||
isEphemeralAgent(conversation?.endpoint, ephemeralAgent),
|
||||
[conversation?.endpoint, ephemeralAgent],
|
||||
() => !isAssistantsEndpoint(conversation?.endpoint),
|
||||
[conversation?.endpoint],
|
||||
);
|
||||
|
||||
const { handleFiles } = useFileHandling({
|
||||
|
||||
@@ -18,6 +18,7 @@ import useLocalize, { TranslationKeys } from '~/hooks/useLocalize';
|
||||
import { useChatContext } from '~/Providers/ChatContext';
|
||||
import { useToastContext } from '~/Providers/ToastContext';
|
||||
import { logger, validateFiles } from '~/utils';
|
||||
import useClientResize from './useClientResize';
|
||||
import { processFileForUpload } from '~/utils/heicConverter';
|
||||
import { useDelayedUploadToast } from './useDelayedUploadToast';
|
||||
import useUpdateFiles from './useUpdateFiles';
|
||||
@@ -41,6 +42,7 @@ const useFileHandling = (params?: UseFileHandling) => {
|
||||
const { addFile, replaceFile, updateFileById, deleteFileById } = useUpdateFiles(
|
||||
params?.fileSetter ?? setFiles,
|
||||
);
|
||||
const { resizeImageIfNeeded } = useClientResize();
|
||||
|
||||
const agent_id = params?.additionalMetadata?.agent_id ?? '';
|
||||
const assistant_id = params?.additionalMetadata?.assistant_id ?? '';
|
||||
@@ -298,7 +300,7 @@ const useFileHandling = (params?: UseFileHandling) => {
|
||||
}
|
||||
|
||||
// Process file for HEIC conversion if needed
|
||||
const processedFile = await processFileForUpload(
|
||||
const heicProcessedFile = await processFileForUpload(
|
||||
originalFile,
|
||||
0.9,
|
||||
(conversionProgress) => {
|
||||
@@ -311,23 +313,50 @@ const useFileHandling = (params?: UseFileHandling) => {
|
||||
},
|
||||
);
|
||||
|
||||
// If file was converted, update with new file and preview
|
||||
if (processedFile !== originalFile) {
|
||||
let finalProcessedFile = heicProcessedFile;
|
||||
|
||||
// Apply client-side resizing if available and appropriate
|
||||
if (heicProcessedFile.type.startsWith('image/')) {
|
||||
try {
|
||||
const resizeResult = await resizeImageIfNeeded(heicProcessedFile);
|
||||
finalProcessedFile = resizeResult.file;
|
||||
|
||||
// Show toast notification if image was resized
|
||||
if (resizeResult.resized && resizeResult.result) {
|
||||
const { originalSize, newSize, compressionRatio } = resizeResult.result;
|
||||
const originalSizeMB = (originalSize / (1024 * 1024)).toFixed(1);
|
||||
const newSizeMB = (newSize / (1024 * 1024)).toFixed(1);
|
||||
const savedPercent = Math.round((1 - compressionRatio) * 100);
|
||||
|
||||
showToast({
|
||||
message: `Image resized: ${originalSizeMB}MB → ${newSizeMB}MB (${savedPercent}% smaller)`,
|
||||
status: 'success',
|
||||
duration: 3000,
|
||||
});
|
||||
}
|
||||
} catch (resizeError) {
|
||||
console.warn('Image resize failed, using original:', resizeError);
|
||||
// Continue with HEIC processed file if resizing fails
|
||||
}
|
||||
}
|
||||
|
||||
// If file was processed (HEIC converted or resized), update with new file and preview
|
||||
if (finalProcessedFile !== originalFile) {
|
||||
URL.revokeObjectURL(initialPreview); // Clean up original preview
|
||||
const newPreview = URL.createObjectURL(processedFile);
|
||||
const newPreview = URL.createObjectURL(finalProcessedFile);
|
||||
|
||||
const updatedExtendedFile: ExtendedFile = {
|
||||
...initialExtendedFile,
|
||||
file: processedFile,
|
||||
type: processedFile.type,
|
||||
file: finalProcessedFile,
|
||||
type: finalProcessedFile.type,
|
||||
preview: newPreview,
|
||||
progress: 0.5, // Conversion complete, ready for upload
|
||||
size: processedFile.size,
|
||||
progress: 0.5, // Processing complete, ready for upload
|
||||
size: finalProcessedFile.size,
|
||||
};
|
||||
|
||||
replaceFile(updatedExtendedFile);
|
||||
|
||||
const isImage = processedFile.type.split('/')[0] === 'image';
|
||||
const isImage = finalProcessedFile.type.split('/')[0] === 'image';
|
||||
if (isImage) {
|
||||
loadImage(updatedExtendedFile, newPreview);
|
||||
continue;
|
||||
@@ -335,7 +364,7 @@ const useFileHandling = (params?: UseFileHandling) => {
|
||||
|
||||
await startUpload(updatedExtendedFile);
|
||||
} else {
|
||||
// File wasn't converted, proceed with original
|
||||
// File wasn't processed, proceed with original
|
||||
const isImage = originalFile.type.split('/')[0] === 'image';
|
||||
const tool_resource =
|
||||
initialExtendedFile.tool_resource ?? params?.additionalMetadata?.tool_resource;
|
||||
|
||||
@@ -15,12 +15,11 @@ import BookmarkPanel from '~/components/SidePanel/Bookmarks/BookmarkPanel';
|
||||
import MemoryViewer from '~/components/SidePanel/Memories/MemoryViewer';
|
||||
import PanelSwitch from '~/components/SidePanel/Builder/PanelSwitch';
|
||||
import PromptsAccordion from '~/components/Prompts/PromptsAccordion';
|
||||
import { Blocks, MCPIcon, AttachmentIcon } from '~/components/svg';
|
||||
import Parameters from '~/components/SidePanel/Parameters/Panel';
|
||||
import FilesPanel from '~/components/SidePanel/Files/Panel';
|
||||
import MCPPanel from '~/components/SidePanel/MCP/MCPPanel';
|
||||
import { Blocks, AttachmentIcon } from '~/components/svg';
|
||||
import { useGetStartupConfig } from '~/data-provider';
|
||||
import MCPIcon from '~/components/ui/MCPIcon';
|
||||
import { useHasAccess } from '~/hooks';
|
||||
|
||||
export default function useSideNavLinks({
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
export * from './useMCPSelect';
|
||||
export * from './useToolToggle';
|
||||
export { default as useAuthCodeTool } from './useAuthCodeTool';
|
||||
export { default as usePluginInstall } from './usePluginInstall';
|
||||
export { default as useCodeApiKeyForm } from './useCodeApiKeyForm';
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// client/src/hooks/Plugins/useCodeApiKeyForm.ts
|
||||
import { useState, useCallback } from 'react';
|
||||
import { useRef, useState, useCallback } from 'react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import type { ApiKeyFormData } from '~/common';
|
||||
import useAuthCodeTool from '~/hooks/Plugins/useAuthCodeTool';
|
||||
@@ -12,6 +12,8 @@ export default function useCodeApiKeyForm({
|
||||
onRevoke?: () => void;
|
||||
}) {
|
||||
const methods = useForm<ApiKeyFormData>();
|
||||
const menuTriggerRef = useRef<HTMLButtonElement>(null);
|
||||
const badgeTriggerRef = useRef<HTMLInputElement>(null);
|
||||
const [isDialogOpen, setIsDialogOpen] = useState(false);
|
||||
const { installTool, removeTool } = useAuthCodeTool({ isEntityTool: true });
|
||||
const { reset } = methods;
|
||||
@@ -39,5 +41,7 @@ export default function useCodeApiKeyForm({
|
||||
setIsDialogOpen,
|
||||
handleRevokeApiKey,
|
||||
onSubmit: onSubmitHandler,
|
||||
badgeTriggerRef,
|
||||
menuTriggerRef,
|
||||
};
|
||||
}
|
||||
|
||||
114
client/src/hooks/Plugins/useMCPSelect.ts
Normal file
114
client/src/hooks/Plugins/useMCPSelect.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { useRef, useEffect, useCallback, useMemo } from 'react';
|
||||
import { useRecoilState } from 'recoil';
|
||||
import { Constants, LocalStorageKeys, EModelEndpoint } from 'librechat-data-provider';
|
||||
import type { TPlugin } from 'librechat-data-provider';
|
||||
import { useAvailableToolsQuery } from '~/data-provider';
|
||||
import useLocalStorage from '~/hooks/useLocalStorageAlt';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
|
||||
const storageCondition = (value: unknown, rawCurrentValue?: string | null) => {
|
||||
if (rawCurrentValue) {
|
||||
try {
|
||||
const currentValue = rawCurrentValue?.trim() ?? '';
|
||||
if (currentValue.length > 2) {
|
||||
return true;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
return Array.isArray(value) && value.length > 0;
|
||||
};
|
||||
|
||||
interface UseMCPSelectOptions {
|
||||
conversationId?: string | null;
|
||||
}
|
||||
|
||||
export function useMCPSelect({ conversationId }: UseMCPSelectOptions) {
|
||||
const key = conversationId ?? Constants.NEW_CONVO;
|
||||
const hasSetFetched = useRef<string | null>(null);
|
||||
const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key));
|
||||
const { data: mcpToolDetails, isFetched } = useAvailableToolsQuery(EModelEndpoint.agents, {
|
||||
select: (data: TPlugin[]) => {
|
||||
const mcpToolsMap = new Map<string, TPlugin>();
|
||||
data.forEach((tool) => {
|
||||
const isMCP = tool.pluginKey.includes(Constants.mcp_delimiter);
|
||||
if (isMCP && tool.chatMenu !== false) {
|
||||
const parts = tool.pluginKey.split(Constants.mcp_delimiter);
|
||||
const serverName = parts[parts.length - 1];
|
||||
if (!mcpToolsMap.has(serverName)) {
|
||||
mcpToolsMap.set(serverName, {
|
||||
name: serverName,
|
||||
pluginKey: tool.pluginKey,
|
||||
authConfig: tool.authConfig,
|
||||
authenticated: tool.authenticated,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
return Array.from(mcpToolsMap.values());
|
||||
},
|
||||
});
|
||||
|
||||
const mcpState = useMemo(() => {
|
||||
return ephemeralAgent?.mcp ?? [];
|
||||
}, [ephemeralAgent?.mcp]);
|
||||
|
||||
const setSelectedValues = useCallback(
|
||||
(values: string[] | null | undefined) => {
|
||||
if (!values) {
|
||||
return;
|
||||
}
|
||||
if (!Array.isArray(values)) {
|
||||
return;
|
||||
}
|
||||
setEphemeralAgent((prev) => ({
|
||||
...prev,
|
||||
mcp: values,
|
||||
}));
|
||||
},
|
||||
[setEphemeralAgent],
|
||||
);
|
||||
|
||||
const [mcpValues, setMCPValues] = useLocalStorage<string[]>(
|
||||
`${LocalStorageKeys.LAST_MCP_}${key}`,
|
||||
mcpState,
|
||||
setSelectedValues,
|
||||
storageCondition,
|
||||
);
|
||||
|
||||
const [isPinned, setIsPinned] = useLocalStorage<boolean>(
|
||||
`${LocalStorageKeys.PIN_MCP_}${key}`,
|
||||
true,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (hasSetFetched.current === key) {
|
||||
return;
|
||||
}
|
||||
if (!isFetched) {
|
||||
return;
|
||||
}
|
||||
hasSetFetched.current = key;
|
||||
if ((mcpToolDetails?.length ?? 0) > 0) {
|
||||
setMCPValues(mcpValues.filter((mcp) => mcpToolDetails?.some((tool) => tool.name === mcp)));
|
||||
return;
|
||||
}
|
||||
setMCPValues([]);
|
||||
}, [isFetched, setMCPValues, mcpToolDetails, key, mcpValues]);
|
||||
|
||||
const mcpServerNames = useMemo(() => {
|
||||
return (mcpToolDetails ?? []).map((tool) => tool.name);
|
||||
}, [mcpToolDetails]);
|
||||
|
||||
return {
|
||||
isPinned,
|
||||
mcpValues,
|
||||
setIsPinned,
|
||||
setMCPValues,
|
||||
mcpServerNames,
|
||||
ephemeralAgent,
|
||||
mcpToolDetails,
|
||||
setEphemeralAgent,
|
||||
};
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useState, useCallback } from 'react';
|
||||
import { useRef, useState, useCallback } from 'react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import useAuthSearchTool from '~/hooks/Plugins/useAuthSearchTool';
|
||||
import type { SearchApiKeyFormData } from '~/hooks/Plugins/useAuthSearchTool';
|
||||
@@ -11,6 +11,8 @@ export default function useSearchApiKeyForm({
|
||||
onRevoke?: () => void;
|
||||
}) {
|
||||
const methods = useForm<SearchApiKeyFormData>();
|
||||
const menuTriggerRef = useRef<HTMLButtonElement>(null);
|
||||
const badgeTriggerRef = useRef<HTMLInputElement>(null);
|
||||
const [isDialogOpen, setIsDialogOpen] = useState(false);
|
||||
const { installTool, removeTool } = useAuthSearchTool({ isEntityTool: true });
|
||||
const { reset } = methods;
|
||||
@@ -38,5 +40,7 @@ export default function useSearchApiKeyForm({
|
||||
setIsDialogOpen,
|
||||
handleRevokeApiKey,
|
||||
onSubmit: onSubmitHandler,
|
||||
badgeTriggerRef,
|
||||
menuTriggerRef,
|
||||
};
|
||||
}
|
||||
|
||||
119
client/src/hooks/Plugins/useToolToggle.ts
Normal file
119
client/src/hooks/Plugins/useToolToggle.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { useRef, useEffect, useCallback, useMemo } from 'react';
|
||||
import { useRecoilState } from 'recoil';
|
||||
import debounce from 'lodash/debounce';
|
||||
import { Constants, LocalStorageKeys } from 'librechat-data-provider';
|
||||
import type { VerifyToolAuthResponse } from 'librechat-data-provider';
|
||||
import type { UseQueryOptions } from '@tanstack/react-query';
|
||||
import { useVerifyAgentToolAuth } from '~/data-provider';
|
||||
import useLocalStorage from '~/hooks/useLocalStorageAlt';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
|
||||
const storageCondition = (value: unknown, rawCurrentValue?: string | null) => {
|
||||
if (rawCurrentValue) {
|
||||
try {
|
||||
const currentValue = rawCurrentValue?.trim() ?? '';
|
||||
if (currentValue === 'true' && value === false) {
|
||||
return true;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
return value !== undefined && value !== null && value !== '' && value !== false;
|
||||
};
|
||||
|
||||
interface UseToolToggleOptions {
|
||||
conversationId?: string | null;
|
||||
toolKey: string;
|
||||
localStorageKey: LocalStorageKeys;
|
||||
isAuthenticated?: boolean;
|
||||
setIsDialogOpen?: (open: boolean) => void;
|
||||
/** Options for auth verification */
|
||||
authConfig?: {
|
||||
toolId: string;
|
||||
queryOptions?: UseQueryOptions<VerifyToolAuthResponse>;
|
||||
};
|
||||
}
|
||||
|
||||
export function useToolToggle({
|
||||
conversationId,
|
||||
toolKey,
|
||||
localStorageKey,
|
||||
isAuthenticated: externalIsAuthenticated,
|
||||
setIsDialogOpen,
|
||||
authConfig,
|
||||
}: UseToolToggleOptions) {
|
||||
const key = conversationId ?? Constants.NEW_CONVO;
|
||||
const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key));
|
||||
|
||||
const authQuery = useVerifyAgentToolAuth(
|
||||
{ toolId: authConfig?.toolId || '' },
|
||||
{
|
||||
enabled: !!authConfig?.toolId,
|
||||
...authConfig?.queryOptions,
|
||||
},
|
||||
);
|
||||
|
||||
const isAuthenticated = useMemo(
|
||||
() =>
|
||||
externalIsAuthenticated ?? (authConfig ? (authQuery?.data?.authenticated ?? false) : false),
|
||||
[externalIsAuthenticated, authConfig, authQuery.data?.authenticated],
|
||||
);
|
||||
|
||||
const isToolEnabled = useMemo(() => {
|
||||
return ephemeralAgent?.[toolKey] ?? false;
|
||||
}, [ephemeralAgent, toolKey]);
|
||||
|
||||
/** Track previous value to prevent infinite loops */
|
||||
const prevIsToolEnabled = useRef(isToolEnabled);
|
||||
|
||||
const [toggleState, setToggleState] = useLocalStorage<boolean>(
|
||||
`${localStorageKey}${key}`,
|
||||
isToolEnabled,
|
||||
undefined,
|
||||
storageCondition,
|
||||
);
|
||||
|
||||
const [isPinned, setIsPinned] = useLocalStorage<boolean>(`${localStorageKey}pinned`, false);
|
||||
|
||||
const handleChange = useCallback(
|
||||
({ e, isChecked }: { e?: React.ChangeEvent<HTMLInputElement>; isChecked: boolean }) => {
|
||||
if (isAuthenticated !== undefined && !isAuthenticated && setIsDialogOpen) {
|
||||
setIsDialogOpen(true);
|
||||
e?.preventDefault?.();
|
||||
return;
|
||||
}
|
||||
setToggleState(isChecked);
|
||||
setEphemeralAgent((prev) => ({
|
||||
...prev,
|
||||
[toolKey]: isChecked,
|
||||
}));
|
||||
},
|
||||
[setToggleState, setIsDialogOpen, isAuthenticated, setEphemeralAgent, toolKey],
|
||||
);
|
||||
|
||||
const debouncedChange = useMemo(
|
||||
() => debounce(handleChange, 50, { leading: true }),
|
||||
[handleChange],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (prevIsToolEnabled.current !== isToolEnabled) {
|
||||
setToggleState(isToolEnabled);
|
||||
}
|
||||
prevIsToolEnabled.current = isToolEnabled;
|
||||
}, [isToolEnabled, setToggleState]);
|
||||
|
||||
return {
|
||||
toggleState,
|
||||
handleChange,
|
||||
isToolEnabled,
|
||||
setToggleState,
|
||||
ephemeralAgent,
|
||||
debouncedChange,
|
||||
setEphemeralAgent,
|
||||
authData: authQuery?.data,
|
||||
isPinned,
|
||||
setIsPinned,
|
||||
};
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user