Compare commits
35 Commits
v0.7.1
...
azure-test
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
093d5bb05f | ||
|
|
b6d6343f54 | ||
|
|
89b1e33be0 | ||
|
|
436f7195b5 | ||
|
|
2aec4a6250 | ||
|
|
b77bd19092 | ||
|
|
446ffe0417 | ||
|
|
b9bcaee656 | ||
|
|
110c0535fb | ||
|
|
25fceb78b7 | ||
|
|
c8baceac76 | ||
|
|
a0288f1c5c | ||
|
|
5d3c90be26 | ||
|
|
ab6fbe48f1 | ||
|
|
3b44741cf9 | ||
|
|
d21a05606e | ||
|
|
0e50c07e3f | ||
|
|
a5cac03fa4 | ||
|
|
ba4fa6150e | ||
|
|
463ca5d613 | ||
|
|
039c7ae880 | ||
|
|
63ef15ab63 | ||
|
|
8a78500fe2 | ||
|
|
144fd5f6aa | ||
|
|
2720327aa1 | ||
|
|
4d0806d3e8 | ||
|
|
5b5f9b950b | ||
|
|
3ccff19821 | ||
|
|
11d5e232b3 | ||
|
|
099aa9dead | ||
|
|
4121818124 | ||
|
|
ca9a0fe629 | ||
|
|
bde6bb0152 | ||
|
|
667f5f91fe | ||
|
|
75da75be08 |
16
.env.example
16
.env.example
@@ -64,13 +64,14 @@ PROXY=
|
||||
#===================================#
|
||||
# https://docs.librechat.ai/install/configuration/ai_endpoints.html
|
||||
|
||||
# GROQ_API_KEY=
|
||||
# SHUTTLEAI_KEY=
|
||||
# OPENROUTER_KEY=
|
||||
# MISTRAL_API_KEY=
|
||||
# ANYSCALE_API_KEY=
|
||||
# APIPIE_API_KEY=
|
||||
# FIREWORKS_API_KEY=
|
||||
# GROQ_API_KEY=
|
||||
# MISTRAL_API_KEY=
|
||||
# OPENROUTER_KEY=
|
||||
# PERPLEXITY_API_KEY=
|
||||
# SHUTTLEAI_API_KEY=
|
||||
# TOGETHERAI_API_KEY=
|
||||
|
||||
#============#
|
||||
@@ -113,9 +114,14 @@ BINGAI_TOKEN=user_provided
|
||||
#============#
|
||||
|
||||
GOOGLE_KEY=user_provided
|
||||
# GOOGLE_MODELS=gemini-pro,gemini-pro-vision,chat-bison,chat-bison-32k,codechat-bison,codechat-bison-32k,text-bison,text-bison-32k,text-unicorn,code-gecko,code-bison,code-bison-32k
|
||||
# GOOGLE_REVERSE_PROXY=
|
||||
|
||||
# Gemini API
|
||||
# GOOGLE_MODELS=gemini-1.0-pro,gemini-1.0-pro-001,gemini-1.0-pro-latest,gemini-1.0-pro-vision-latest,gemini-1.5-pro-latest,gemini-pro,gemini-pro-vision
|
||||
|
||||
# Vertex AI
|
||||
# GOOGLE_MODELS=gemini-1.5-pro-preview-0409,gemini-1.0-pro-vision-001,gemini-pro,gemini-pro-vision,chat-bison,chat-bison-32k,codechat-bison,codechat-bison-32k,text-bison,text-bison-32k,text-unicorn,code-gecko,code-bison,code-bison-32k
|
||||
|
||||
#============#
|
||||
# OpenAI #
|
||||
#============#
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -76,6 +76,7 @@ config.local.ts
|
||||
**/storageState.json
|
||||
junit.xml
|
||||
**/.venv/
|
||||
**/venv/
|
||||
|
||||
# docker override file
|
||||
docker-compose.override.yaml
|
||||
|
||||
16
README.md
16
README.md
@@ -41,6 +41,14 @@
|
||||
# 📃 Features
|
||||
|
||||
- 🖥️ UI matching ChatGPT, including Dark mode, Streaming, and latest updates
|
||||
- 🤖 AI model selection:
|
||||
- OpenAI, Azure OpenAI, BingAI, ChatGPT, Google Vertex AI, Anthropic (Claude), Plugins, Assistants API (including Azure Assistants)
|
||||
- ✅ Compatible across both **[Remote & Local AI services](https://docs.librechat.ai/install/configuration/ai_endpoints.html#intro):**
|
||||
- groq, Ollama, Cohere, Mistral AI, Apple MLX, koboldcpp, OpenRouter, together.ai, Perplexity, ShuttleAI, and more
|
||||
- 💾 Create, Save, & Share Custom Presets
|
||||
- 🔀 Switch between AI Endpoints and Presets, mid-chat
|
||||
- 🔄 Edit, Resubmit, and Continue Messages with Conversation branching
|
||||
- 🌿 Fork Messages & Conversations for Advanced Context control
|
||||
- 💬 Multimodal Chat:
|
||||
- Upload and analyze images with Claude 3, GPT-4, and Gemini Vision 📸
|
||||
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, & Google. 🗃️
|
||||
@@ -50,14 +58,14 @@
|
||||
- 🌎 Multilingual UI:
|
||||
- English, 中文, Deutsch, Español, Français, Italiano, Polski, Português Brasileiro,
|
||||
- Русский, 日本語, Svenska, 한국어, Tiếng Việt, 繁體中文, العربية, Türkçe, Nederlands, עברית
|
||||
- 🤖 AI model selection: OpenAI, Azure OpenAI, BingAI, ChatGPT, Google Vertex AI, Anthropic (Claude), Plugins, Assistants API (including Azure Assistants)
|
||||
- 💾 Create, Save, & Share Custom Presets
|
||||
- 🔄 Edit, Resubmit, and Continue messages with conversation branching
|
||||
- 🎨 Customizable Dropdown & Interface: Adapts to both power users and newcomers.
|
||||
- 📥 Import Conversations from LibreChat, ChatGPT, Chatbot UI
|
||||
- 📤 Export conversations as screenshots, markdown, text, json.
|
||||
- 🔍 Search all messages/conversations
|
||||
- 🔌 Plugins, including web access, image generation with DALL-E-3 and more
|
||||
- 👥 Multi-User, Secure Authentication with Moderation and Token spend tools
|
||||
- ⚙️ Configure Proxy, Reverse Proxy, Docker, & many Deployment options
|
||||
- ⚙️ Configure Proxy, Reverse Proxy, Docker, & many Deployment options:
|
||||
- Use completely local or deploy on the cloud
|
||||
- 📖 Completely Open-Source & Built in Public
|
||||
- 🧑🤝🧑 Community-driven development, support, and feedback
|
||||
|
||||
|
||||
@@ -655,6 +655,9 @@ class AnthropicClient extends BaseClient {
|
||||
promptPrefix: this.options.promptPrefix,
|
||||
modelLabel: this.options.modelLabel,
|
||||
resendFiles: this.options.resendFiles,
|
||||
iconURL: this.options.iconURL,
|
||||
greeting: this.options.greeting,
|
||||
spec: this.options.spec,
|
||||
...this.modelOptions,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -456,6 +456,8 @@ class BaseClient {
|
||||
sender: this.sender,
|
||||
text: addSpaceIfNeeded(generation) + completion,
|
||||
promptTokens,
|
||||
iconURL: this.options.iconURL,
|
||||
endpoint: this.options.endpoint,
|
||||
...(this.metadata ?? {}),
|
||||
};
|
||||
|
||||
@@ -525,8 +527,19 @@ class BaseClient {
|
||||
return _messages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a message to the database.
|
||||
* @param {TMessage} message
|
||||
* @param {Partial<TConversation>} endpointOptions
|
||||
* @param {string | null} user
|
||||
*/
|
||||
async saveMessageToDatabase(message, endpointOptions, user = null) {
|
||||
await saveMessage({ ...message, endpoint: this.options.endpoint, user, unfinished: false });
|
||||
await saveMessage({
|
||||
...message,
|
||||
endpoint: this.options.endpoint,
|
||||
unfinished: false,
|
||||
user,
|
||||
});
|
||||
await saveConvo(user, {
|
||||
conversationId: message.conversationId,
|
||||
endpoint: this.options.endpoint,
|
||||
@@ -556,11 +569,11 @@ class BaseClient {
|
||||
* the message is considered a root message.
|
||||
*
|
||||
* @param {Object} options - The options for the function.
|
||||
* @param {Array} options.messages - An array of message objects. Each object should have either an 'id' or 'messageId' property, and may have a 'parentMessageId' property.
|
||||
* @param {TMessage[]} options.messages - An array of message objects. Each object should have either an 'id' or 'messageId' property, and may have a 'parentMessageId' property.
|
||||
* @param {string} options.parentMessageId - The ID of the parent message to start the traversal from.
|
||||
* @param {Function} [options.mapMethod] - An optional function to map over the ordered messages. If provided, it will be applied to each message in the resulting array.
|
||||
* @param {boolean} [options.summary=false] - If set to true, the traversal modifies messages with 'summary' and 'summaryTokenCount' properties and stops at the message with a 'summary' property.
|
||||
* @returns {Array} An array containing the messages in the order they should be displayed, starting with the most recent message with a 'summary' property if the 'summary' option is true, and ending with the message identified by 'parentMessageId'.
|
||||
* @returns {TMessage[]} An array containing the messages in the order they should be displayed, starting with the most recent message with a 'summary' property if the 'summary' option is true, and ending with the message identified by 'parentMessageId'.
|
||||
*/
|
||||
static getMessagesForConversation({
|
||||
messages,
|
||||
|
||||
@@ -708,6 +708,9 @@ class GoogleClient extends BaseClient {
|
||||
return {
|
||||
promptPrefix: this.options.promptPrefix,
|
||||
modelLabel: this.options.modelLabel,
|
||||
iconURL: this.options.iconURL,
|
||||
greeting: this.options.greeting,
|
||||
spec: this.options.spec,
|
||||
...this.modelOptions,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const {
|
||||
Constants,
|
||||
ImageDetail,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
@@ -20,16 +21,16 @@ const {
|
||||
const {
|
||||
truncateText,
|
||||
formatMessage,
|
||||
createContextHandlers,
|
||||
CUT_OFF_PROMPT,
|
||||
titleInstruction,
|
||||
createContextHandlers,
|
||||
} = require('./prompts');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { handleOpenAIErrors } = require('./tools/util');
|
||||
const { isEnabled, sleep } = require('~/server/utils');
|
||||
const spendTokens = require('~/models/spendTokens');
|
||||
const { createLLM, RunManager } = require('./llm');
|
||||
const ChatGPTClient = require('./ChatGPTClient');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { summaryBuffer } = require('./memory');
|
||||
const { runTitleChain } = require('./chains');
|
||||
const { tokenSplit } = require('./document');
|
||||
@@ -200,16 +201,6 @@ class OpenAIClient extends BaseClient {
|
||||
|
||||
this.setupTokens();
|
||||
|
||||
if (!this.modelOptions.stop && !this.isVisionModel) {
|
||||
const stopTokens = [this.startToken];
|
||||
if (this.endToken && this.endToken !== this.startToken) {
|
||||
stopTokens.push(this.endToken);
|
||||
}
|
||||
stopTokens.push(`\n${this.userLabel}:`);
|
||||
stopTokens.push('<|diff_marker|>');
|
||||
this.modelOptions.stop = stopTokens;
|
||||
}
|
||||
|
||||
if (reverseProxy) {
|
||||
this.completionsUrl = reverseProxy;
|
||||
this.langchainProxy = extractBaseURL(reverseProxy);
|
||||
@@ -390,6 +381,9 @@ class OpenAIClient extends BaseClient {
|
||||
promptPrefix: this.options.promptPrefix,
|
||||
resendFiles: this.options.resendFiles,
|
||||
imageDetail: this.options.imageDetail,
|
||||
iconURL: this.options.iconURL,
|
||||
greeting: this.options.greeting,
|
||||
spec: this.options.spec,
|
||||
...this.modelOptions,
|
||||
};
|
||||
}
|
||||
@@ -729,7 +723,10 @@ class OpenAIClient extends BaseClient {
|
||||
|
||||
const { OPENAI_TITLE_MODEL } = process.env ?? {};
|
||||
|
||||
const model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
|
||||
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
|
||||
if (model === Constants.CURRENT_MODEL) {
|
||||
model = this.modelOptions.model;
|
||||
}
|
||||
|
||||
const modelOptions = {
|
||||
// TODO: remove the gpt fallback and make it specific to endpoint
|
||||
@@ -851,7 +848,11 @@ ${convo}
|
||||
|
||||
// TODO: remove the gpt fallback and make it specific to endpoint
|
||||
const { OPENAI_SUMMARY_MODEL = 'gpt-3.5-turbo' } = process.env ?? {};
|
||||
const model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL;
|
||||
let model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL;
|
||||
if (model === Constants.CURRENT_MODEL) {
|
||||
model = this.modelOptions.model;
|
||||
}
|
||||
|
||||
const maxContextTokens =
|
||||
getModelMaxTokens(
|
||||
model,
|
||||
@@ -1158,6 +1159,8 @@ ${convo}
|
||||
stream.controller.abort();
|
||||
break;
|
||||
}
|
||||
|
||||
await sleep(25);
|
||||
}
|
||||
|
||||
if (!UnexpectedRoleError) {
|
||||
|
||||
@@ -42,8 +42,12 @@ class PluginsClient extends OpenAIClient {
|
||||
return {
|
||||
chatGptLabel: this.options.chatGptLabel,
|
||||
promptPrefix: this.options.promptPrefix,
|
||||
tools: this.options.tools,
|
||||
...this.modelOptions,
|
||||
agentOptions: this.agentOptions,
|
||||
iconURL: this.options.iconURL,
|
||||
greeting: this.options.greeting,
|
||||
spec: this.options.spec,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -144,9 +148,11 @@ class PluginsClient extends OpenAIClient {
|
||||
signal,
|
||||
pastMessages,
|
||||
tools: this.tools,
|
||||
currentDateString: this.currentDateString,
|
||||
verbose: this.options.debug,
|
||||
returnIntermediateSteps: true,
|
||||
customName: this.options.chatGptLabel,
|
||||
currentDateString: this.currentDateString,
|
||||
customInstructions: this.options.promptPrefix,
|
||||
callbackManager: CallbackManager.fromHandlers({
|
||||
async handleAgentAction(action, runId) {
|
||||
handleAction(action, runId, onAgentAction);
|
||||
@@ -304,6 +310,8 @@ class PluginsClient extends OpenAIClient {
|
||||
}
|
||||
|
||||
const responseMessage = {
|
||||
endpoint: EModelEndpoint.gptPlugins,
|
||||
iconURL: this.options.iconURL,
|
||||
messageId: responseMessageId,
|
||||
conversationId,
|
||||
parentMessageId: userMessage.messageId,
|
||||
|
||||
@@ -13,10 +13,18 @@ const initializeCustomAgent = async ({
|
||||
tools,
|
||||
model,
|
||||
pastMessages,
|
||||
customName,
|
||||
customInstructions,
|
||||
currentDateString,
|
||||
...rest
|
||||
}) => {
|
||||
let prompt = CustomAgent.createPrompt(tools, { currentDateString, model: model.modelName });
|
||||
if (customName) {
|
||||
prompt = `You are "${customName}".\n${prompt}`;
|
||||
}
|
||||
if (customInstructions) {
|
||||
prompt = `${prompt}\n${customInstructions}`;
|
||||
}
|
||||
|
||||
const chatPrompt = ChatPromptTemplate.fromMessages([
|
||||
new SystemMessagePromptTemplate(prompt),
|
||||
|
||||
@@ -10,6 +10,8 @@ const initializeFunctionsAgent = async ({
|
||||
tools,
|
||||
model,
|
||||
pastMessages,
|
||||
customName,
|
||||
customInstructions,
|
||||
currentDateString,
|
||||
...rest
|
||||
}) => {
|
||||
@@ -24,7 +26,13 @@ const initializeFunctionsAgent = async ({
|
||||
returnMessages: true,
|
||||
});
|
||||
|
||||
const prefix = addToolDescriptions(`Current Date: ${currentDateString}\n${PREFIX}`, tools);
|
||||
let prefix = addToolDescriptions(`Current Date: ${currentDateString}\n${PREFIX}`, tools);
|
||||
if (customName) {
|
||||
prefix = `You are "${customName}".\n${prefix}`;
|
||||
}
|
||||
if (customInstructions) {
|
||||
prefix = `${prefix}\n${customInstructions}`;
|
||||
}
|
||||
|
||||
return await initializeAgentExecutorWithOptions(tools, model, {
|
||||
agentType: 'openai-functions',
|
||||
|
||||
@@ -1,11 +1,28 @@
|
||||
const { MeiliSearch } = require('meilisearch');
|
||||
const Message = require('~/models/schema/messageSchema');
|
||||
const Conversation = require('~/models/schema/convoSchema');
|
||||
const Message = require('~/models/schema/messageSchema');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const searchEnabled = process.env?.SEARCH?.toLowerCase() === 'true';
|
||||
let currentTimeout = null;
|
||||
|
||||
class MeiliSearchClient {
|
||||
static instance = null;
|
||||
|
||||
static getInstance() {
|
||||
if (!MeiliSearchClient.instance) {
|
||||
if (!process.env.MEILI_HOST || !process.env.MEILI_MASTER_KEY) {
|
||||
throw new Error('Meilisearch configuration is missing.');
|
||||
}
|
||||
MeiliSearchClient.instance = new MeiliSearch({
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
});
|
||||
}
|
||||
return MeiliSearchClient.instance;
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
async function indexSync(req, res, next) {
|
||||
if (!searchEnabled) {
|
||||
@@ -13,20 +30,10 @@ async function indexSync(req, res, next) {
|
||||
}
|
||||
|
||||
try {
|
||||
if (!process.env.MEILI_HOST || !process.env.MEILI_MASTER_KEY || !searchEnabled) {
|
||||
throw new Error('Meilisearch not configured, search will be disabled.');
|
||||
}
|
||||
|
||||
const client = new MeiliSearch({
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
});
|
||||
const client = MeiliSearchClient.getInstance();
|
||||
|
||||
const { status } = await client.health();
|
||||
// logger.debug(`[indexSync] Meilisearch: ${status}`);
|
||||
const result = status === 'available' && !!process.env.SEARCH;
|
||||
|
||||
if (!result) {
|
||||
if (status !== 'available' || !process.env.SEARCH) {
|
||||
throw new Error('Meilisearch not available');
|
||||
}
|
||||
|
||||
@@ -37,12 +44,8 @@ async function indexSync(req, res, next) {
|
||||
const messagesIndexed = messages.numberOfDocuments;
|
||||
const convosIndexed = convos.numberOfDocuments;
|
||||
|
||||
logger.debug(
|
||||
`[indexSync] There are ${messageCount} messages in the database, ${messagesIndexed} indexed`,
|
||||
);
|
||||
logger.debug(
|
||||
`[indexSync] There are ${convoCount} convos in the database, ${convosIndexed} indexed`,
|
||||
);
|
||||
logger.debug(`[indexSync] There are ${messageCount} messages and ${messagesIndexed} indexed`);
|
||||
logger.debug(`[indexSync] There are ${convoCount} convos and ${convosIndexed} indexed`);
|
||||
|
||||
if (messageCount !== messagesIndexed) {
|
||||
logger.debug('[indexSync] Messages out of sync, indexing');
|
||||
@@ -54,7 +57,6 @@ async function indexSync(req, res, next) {
|
||||
Conversation.syncWithMeili();
|
||||
}
|
||||
} catch (err) {
|
||||
// logger.debug('[indexSync] in index sync');
|
||||
if (err.message.includes('not found')) {
|
||||
logger.debug('[indexSync] Creating indices...');
|
||||
currentTimeout = setTimeout(async () => {
|
||||
|
||||
@@ -2,6 +2,12 @@ const Conversation = require('./schema/convoSchema');
|
||||
const { getMessages, deleteMessages } = require('./Message');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
/**
|
||||
* Retrieves a single conversation for a given user and conversation ID.
|
||||
* @param {string} user - The user's ID.
|
||||
* @param {string} conversationId - The conversation's ID.
|
||||
* @returns {Promise<TConversation>} The conversation object.
|
||||
*/
|
||||
const getConvo = async (user, conversationId) => {
|
||||
try {
|
||||
return await Conversation.findOne({ user, conversationId }).lean();
|
||||
@@ -30,11 +36,35 @@ module.exports = {
|
||||
return { message: 'Error saving conversation' };
|
||||
}
|
||||
},
|
||||
getConvosByPage: async (user, pageNumber = 1, pageSize = 25) => {
|
||||
bulkSaveConvos: async (conversations) => {
|
||||
try {
|
||||
const totalConvos = (await Conversation.countDocuments({ user })) || 1;
|
||||
const bulkOps = conversations.map((convo) => ({
|
||||
updateOne: {
|
||||
filter: { conversationId: convo.conversationId, user: convo.user },
|
||||
update: convo,
|
||||
upsert: true,
|
||||
timestamps: false,
|
||||
},
|
||||
}));
|
||||
|
||||
const result = await Conversation.bulkWrite(bulkOps);
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[saveBulkConversations] Error saving conversations in bulk', error);
|
||||
throw new Error('Failed to save conversations in bulk.');
|
||||
}
|
||||
},
|
||||
getConvosByPage: async (user, pageNumber = 1, pageSize = 25, isArchived = false) => {
|
||||
const query = { user };
|
||||
if (isArchived) {
|
||||
query.isArchived = true;
|
||||
} else {
|
||||
query.$or = [{ isArchived: false }, { isArchived: { $exists: false } }];
|
||||
}
|
||||
try {
|
||||
const totalConvos = (await Conversation.countDocuments(query)) || 1;
|
||||
const totalPages = Math.ceil(totalConvos / pageSize);
|
||||
const convos = await Conversation.find({ user })
|
||||
const convos = await Conversation.find(query)
|
||||
.sort({ updatedAt: -1 })
|
||||
.skip((pageNumber - 1) * pageSize)
|
||||
.limit(pageSize)
|
||||
|
||||
@@ -10,6 +10,7 @@ module.exports = {
|
||||
async saveMessage({
|
||||
user,
|
||||
endpoint,
|
||||
iconURL,
|
||||
messageId,
|
||||
newMessageId,
|
||||
conversationId,
|
||||
@@ -35,6 +36,7 @@ module.exports = {
|
||||
|
||||
const update = {
|
||||
user,
|
||||
iconURL,
|
||||
endpoint,
|
||||
messageId: newMessageId || messageId,
|
||||
conversationId,
|
||||
@@ -72,6 +74,25 @@ module.exports = {
|
||||
throw new Error('Failed to save message.');
|
||||
}
|
||||
},
|
||||
|
||||
async bulkSaveMessages(messages) {
|
||||
try {
|
||||
const bulkOps = messages.map((message) => ({
|
||||
updateOne: {
|
||||
filter: { messageId: message.messageId },
|
||||
update: message,
|
||||
upsert: true,
|
||||
},
|
||||
}));
|
||||
|
||||
const result = await Message.bulkWrite(bulkOps);
|
||||
return result;
|
||||
} catch (err) {
|
||||
logger.error('Error saving messages in bulk:', err);
|
||||
throw new Error('Failed to save messages in bulk.');
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Records a message in the database.
|
||||
*
|
||||
|
||||
@@ -39,6 +39,12 @@ module.exports = {
|
||||
try {
|
||||
const setter = { $set: {} };
|
||||
const update = { presetId, ...preset };
|
||||
if (preset.tools && Array.isArray(preset.tools)) {
|
||||
update.tools =
|
||||
preset.tools
|
||||
.map((tool) => tool?.pluginKey ?? tool)
|
||||
.filter((toolName) => typeof toolName === 'string') ?? [];
|
||||
}
|
||||
if (newPresetId) {
|
||||
update.presetId = newPresetId;
|
||||
}
|
||||
|
||||
@@ -88,6 +88,22 @@ const conversationPreset = {
|
||||
instructions: {
|
||||
type: String,
|
||||
},
|
||||
stop: { type: [{ type: String }], default: undefined },
|
||||
isArchived: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
/* UI Components */
|
||||
iconURL: {
|
||||
type: String,
|
||||
},
|
||||
greeting: {
|
||||
type: String,
|
||||
},
|
||||
spec: {
|
||||
type: String,
|
||||
},
|
||||
tools: { type: [{ type: String }], default: undefined },
|
||||
};
|
||||
|
||||
const agentOptions = {
|
||||
|
||||
@@ -110,6 +110,10 @@ const messageSchema = mongoose.Schema(
|
||||
thread_id: {
|
||||
type: String,
|
||||
},
|
||||
/* frontend components */
|
||||
iconURL: {
|
||||
type: String,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
@@ -41,6 +41,7 @@
|
||||
"@langchain/community": "^0.0.46",
|
||||
"@langchain/google-genai": "^0.0.11",
|
||||
"@langchain/google-vertexai": "^0.0.5",
|
||||
"agenda": "^5.0.0",
|
||||
"axios": "^1.3.4",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
|
||||
@@ -55,6 +55,9 @@ const getAvailablePluginsController = async (req, res) => {
|
||||
return;
|
||||
}
|
||||
|
||||
/** @type {{ filteredTools: string[] }} */
|
||||
const { filteredTools = [] } = req.app.locals;
|
||||
|
||||
const pluginManifest = await fs.readFile(req.app.locals.paths.pluginManifest, 'utf8');
|
||||
|
||||
const jsonData = JSON.parse(pluginManifest);
|
||||
@@ -67,7 +70,10 @@ const getAvailablePluginsController = async (req, res) => {
|
||||
return plugin;
|
||||
}
|
||||
});
|
||||
const plugins = await addOpenAPISpecs(authenticatedPlugins);
|
||||
|
||||
let plugins = await addOpenAPISpecs(authenticatedPlugins);
|
||||
plugins = plugins.filter((plugin) => !filteredTools.includes(plugin.pluginKey));
|
||||
|
||||
await cache.set(CacheKeys.PLUGINS, plugins);
|
||||
res.status(200).json(plugins);
|
||||
} catch (error) {
|
||||
|
||||
@@ -73,6 +73,8 @@ const createAbortController = (req, res, getAbortData) => {
|
||||
...responseData,
|
||||
conversationId,
|
||||
finish_reason: 'incomplete',
|
||||
endpoint: endpointOption.endpoint,
|
||||
iconURL: endpointOption.iconURL,
|
||||
model: endpointOption.modelOptions.model,
|
||||
unfinished: false,
|
||||
error: false,
|
||||
|
||||
@@ -7,6 +7,8 @@ const anthropic = require('~/server/services/Endpoints/anthropic');
|
||||
const openAI = require('~/server/services/Endpoints/openAI');
|
||||
const custom = require('~/server/services/Endpoints/custom');
|
||||
const google = require('~/server/services/Endpoints/google');
|
||||
const enforceModelSpec = require('./enforceModelSpec');
|
||||
const { handleError } = require('~/server/utils');
|
||||
|
||||
const buildFunction = {
|
||||
[EModelEndpoint.openAI]: openAI.buildOptions,
|
||||
@@ -21,6 +23,31 @@ const buildFunction = {
|
||||
async function buildEndpointOption(req, res, next) {
|
||||
const { endpoint, endpointType } = req.body;
|
||||
const parsedBody = parseConvo({ endpoint, endpointType, conversation: req.body });
|
||||
|
||||
if (req.app.locals.modelSpecs?.list && req.app.locals.modelSpecs?.enforce) {
|
||||
/** @type {{ list: TModelSpec[] }}*/
|
||||
const { list } = req.app.locals.modelSpecs;
|
||||
const { spec } = parsedBody;
|
||||
|
||||
if (!spec) {
|
||||
return handleError(res, { text: 'No model spec selected' });
|
||||
}
|
||||
|
||||
const currentModelSpec = list.find((s) => s.name === spec);
|
||||
if (!currentModelSpec) {
|
||||
return handleError(res, { text: 'Invalid model spec' });
|
||||
}
|
||||
|
||||
if (endpoint !== currentModelSpec.preset.endpoint) {
|
||||
return handleError(res, { text: 'Model spec mismatch' });
|
||||
}
|
||||
|
||||
const isValidModelSpec = enforceModelSpec(currentModelSpec, parsedBody);
|
||||
if (!isValidModelSpec) {
|
||||
return handleError(res, { text: 'Model spec mismatch' });
|
||||
}
|
||||
}
|
||||
|
||||
req.body.endpointOption = buildFunction[endpointType ?? endpoint](
|
||||
endpoint,
|
||||
parsedBody,
|
||||
|
||||
25
api/server/middleware/checkDomainAllowed.js
Normal file
25
api/server/middleware/checkDomainAllowed.js
Normal file
@@ -0,0 +1,25 @@
|
||||
const { isDomainAllowed } = require('~/server/services/AuthService');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Checks the domain's social login is allowed
|
||||
*
|
||||
* @async
|
||||
* @function
|
||||
* @param {Object} req - Express request object.
|
||||
* @param {Object} res - Express response object.
|
||||
* @param {Function} next - Next middleware function.
|
||||
*
|
||||
* @returns {Promise<function|Object>} - Returns a Promise which when resolved calls next middleware if the domain's email is allowed
|
||||
*/
|
||||
const checkDomainAllowed = async (req, res, next = () => {}) => {
|
||||
const email = req?.user?.email;
|
||||
if (email && !(await isDomainAllowed(email))) {
|
||||
logger.error(`[Social Login] [Social Login not allowed] [Email: ${email}]`);
|
||||
return res.redirect('/login');
|
||||
} else {
|
||||
return next();
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = checkDomainAllowed;
|
||||
47
api/server/middleware/enforceModelSpec.js
Normal file
47
api/server/middleware/enforceModelSpec.js
Normal file
@@ -0,0 +1,47 @@
|
||||
const interchangeableKeys = new Map([
|
||||
['chatGptLabel', ['modelLabel']],
|
||||
['modelLabel', ['chatGptLabel']],
|
||||
]);
|
||||
|
||||
/**
|
||||
* Middleware to enforce the model spec for a conversation
|
||||
* @param {TModelSpec} modelSpec - The model spec to enforce
|
||||
* @param {TConversation} parsedBody - The parsed body of the conversation
|
||||
* @returns {boolean} - Whether the model spec is enforced
|
||||
*/
|
||||
const enforceModelSpec = (modelSpec, parsedBody) => {
|
||||
for (const [key, value] of Object.entries(modelSpec.preset)) {
|
||||
if (key === 'endpoint') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!checkMatch(key, value, parsedBody)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if there is a match for the given key and value in the parsed body
|
||||
* or any of its interchangeable keys.
|
||||
* @param {string} key
|
||||
* @param {any} value
|
||||
* @param {TConversation} parsedBody
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const checkMatch = (key, value, parsedBody) => {
|
||||
if (parsedBody[key] === value) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (interchangeableKeys.has(key)) {
|
||||
return interchangeableKeys
|
||||
.get(key)
|
||||
.some((interchangeableKey) => parsedBody[interchangeableKey] === value);
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
module.exports = enforceModelSpec;
|
||||
69
api/server/middleware/importLimiters.js
Normal file
69
api/server/middleware/importLimiters.js
Normal file
@@ -0,0 +1,69 @@
|
||||
const rateLimit = require('express-rate-limit');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const logViolation = require('~/cache/logViolation');
|
||||
|
||||
const getEnvironmentVariables = () => {
|
||||
const IMPORT_IP_MAX = parseInt(process.env.IMPORT_IP_MAX) || 100;
|
||||
const IMPORT_IP_WINDOW = parseInt(process.env.IMPORT_IP_WINDOW) || 15;
|
||||
const IMPORT_USER_MAX = parseInt(process.env.IMPORT_USER_MAX) || 50;
|
||||
const IMPORT_USER_WINDOW = parseInt(process.env.IMPORT_USER_WINDOW) || 15;
|
||||
|
||||
const importIpWindowMs = IMPORT_IP_WINDOW * 60 * 1000;
|
||||
const importIpMax = IMPORT_IP_MAX;
|
||||
const importIpWindowInMinutes = importIpWindowMs / 60000;
|
||||
|
||||
const importUserWindowMs = IMPORT_USER_WINDOW * 60 * 1000;
|
||||
const importUserMax = IMPORT_USER_MAX;
|
||||
const importUserWindowInMinutes = importUserWindowMs / 60000;
|
||||
|
||||
return {
|
||||
importIpWindowMs,
|
||||
importIpMax,
|
||||
importIpWindowInMinutes,
|
||||
importUserWindowMs,
|
||||
importUserMax,
|
||||
importUserWindowInMinutes,
|
||||
};
|
||||
};
|
||||
|
||||
const createImportHandler = (ip = true) => {
|
||||
const { importIpMax, importIpWindowInMinutes, importUserMax, importUserWindowInMinutes } =
|
||||
getEnvironmentVariables();
|
||||
|
||||
return async (req, res) => {
|
||||
const type = ViolationTypes.FILE_UPLOAD_LIMIT;
|
||||
const errorMessage = {
|
||||
type,
|
||||
max: ip ? importIpMax : importUserMax,
|
||||
limiter: ip ? 'ip' : 'user',
|
||||
windowInMinutes: ip ? importIpWindowInMinutes : importUserWindowInMinutes,
|
||||
};
|
||||
|
||||
await logViolation(req, res, type, errorMessage);
|
||||
res.status(429).json({ message: 'Too many conversation import requests. Try again later' });
|
||||
};
|
||||
};
|
||||
|
||||
const createImportLimiters = () => {
|
||||
const { importIpWindowMs, importIpMax, importUserWindowMs, importUserMax } =
|
||||
getEnvironmentVariables();
|
||||
|
||||
const importIpLimiter = rateLimit({
|
||||
windowMs: importIpWindowMs,
|
||||
max: importIpMax,
|
||||
handler: createImportHandler(),
|
||||
});
|
||||
|
||||
const importUserLimiter = rateLimit({
|
||||
windowMs: importUserWindowMs,
|
||||
max: importUserMax,
|
||||
handler: createImportHandler(false),
|
||||
keyGenerator: function (req) {
|
||||
return req.user?.id; // Use the user ID or NULL if not available
|
||||
},
|
||||
});
|
||||
|
||||
return { importIpLimiter, importUserLimiter };
|
||||
};
|
||||
|
||||
module.exports = { createImportLimiters };
|
||||
@@ -1,5 +1,6 @@
|
||||
const abortMiddleware = require('./abortMiddleware');
|
||||
const checkBan = require('./checkBan');
|
||||
const checkDomainAllowed = require('./checkDomainAllowed');
|
||||
const uaParser = require('./uaParser');
|
||||
const setHeaders = require('./setHeaders');
|
||||
const loginLimiter = require('./loginLimiter');
|
||||
@@ -17,6 +18,7 @@ const validateRegistration = require('./validateRegistration');
|
||||
const validateImageRequest = require('./validateImageRequest');
|
||||
const moderateText = require('./moderateText');
|
||||
const noIndex = require('./noIndex');
|
||||
const importLimiters = require('./importLimiters');
|
||||
|
||||
module.exports = {
|
||||
...uploadLimiters,
|
||||
@@ -38,4 +40,6 @@ module.exports = {
|
||||
validateModel,
|
||||
moderateText,
|
||||
noIndex,
|
||||
...importLimiters,
|
||||
checkDomainAllowed,
|
||||
};
|
||||
|
||||
@@ -5,7 +5,6 @@ const { actionDelimiter, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { updateAction, getActions, deleteAction } = require('~/models/Action');
|
||||
const { updateAssistant, getAssistant } = require('~/models/Assistant');
|
||||
const { withSession } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
@@ -109,10 +108,10 @@ router.post('/:assistant_id', async (req, res) => {
|
||||
})),
|
||||
);
|
||||
|
||||
let updatedAssistant = await openai.beta.assistants.update(assistant_id, { tools });
|
||||
const promises = [];
|
||||
promises.push(
|
||||
withSession(
|
||||
updateAssistant,
|
||||
updateAssistant(
|
||||
{ assistant_id },
|
||||
{
|
||||
actions,
|
||||
@@ -120,29 +119,26 @@ router.post('/:assistant_id', async (req, res) => {
|
||||
},
|
||||
),
|
||||
);
|
||||
promises.push(openai.beta.assistants.update(assistant_id, { tools }));
|
||||
promises.push(
|
||||
withSession(updateAction, { action_id }, { metadata, assistant_id, user: req.user.id }),
|
||||
);
|
||||
promises.push(updateAction({ action_id }, { metadata, assistant_id, user: req.user.id }));
|
||||
|
||||
/** @type {[AssistantDocument, Assistant, Action]} */
|
||||
const resolved = await Promise.all(promises);
|
||||
/** @type {[AssistantDocument, Action]} */
|
||||
let [assistantDocument, updatedAction] = await Promise.all(promises);
|
||||
const sensitiveFields = ['api_key', 'oauth_client_id', 'oauth_client_secret'];
|
||||
for (let field of sensitiveFields) {
|
||||
if (resolved[2].metadata[field]) {
|
||||
delete resolved[2].metadata[field];
|
||||
if (updatedAction.metadata[field]) {
|
||||
delete updatedAction.metadata[field];
|
||||
}
|
||||
}
|
||||
|
||||
/* Map Azure OpenAI model to the assistant as defined by config */
|
||||
if (req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||
resolved[1] = {
|
||||
...resolved[1],
|
||||
updatedAssistant = {
|
||||
...updatedAssistant,
|
||||
model: req.body.model,
|
||||
};
|
||||
}
|
||||
|
||||
res.json(resolved);
|
||||
res.json([assistantDocument, updatedAssistant, updatedAction]);
|
||||
} catch (error) {
|
||||
const message = 'Trouble updating the Assistant Action';
|
||||
logger.error(message, error);
|
||||
@@ -190,10 +186,11 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => {
|
||||
(tool) => !(tool.function && tool.function.name.includes(domain)),
|
||||
);
|
||||
|
||||
await openai.beta.assistants.update(assistant_id, { tools: updatedTools });
|
||||
|
||||
const promises = [];
|
||||
promises.push(
|
||||
withSession(
|
||||
updateAssistant,
|
||||
updateAssistant(
|
||||
{ assistant_id },
|
||||
{
|
||||
actions: updatedActions,
|
||||
@@ -201,8 +198,7 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => {
|
||||
},
|
||||
),
|
||||
);
|
||||
promises.push(openai.beta.assistants.update(assistant_id, { tools: updatedTools }));
|
||||
promises.push(withSession(deleteAction, { action_id }));
|
||||
promises.push(deleteAction({ action_id }));
|
||||
|
||||
await Promise.all(promises);
|
||||
res.status(200).json({ message: 'Action deleted successfully' });
|
||||
|
||||
@@ -14,6 +14,7 @@ router.get('/', async function (req, res) {
|
||||
};
|
||||
|
||||
try {
|
||||
/** @type {TStartupConfig} */
|
||||
const payload = {
|
||||
appTitle: process.env.APP_TITLE || 'LibreChat',
|
||||
socialLogins: req.app.locals.socialLogins ?? defaultSocialLogins,
|
||||
@@ -44,7 +45,8 @@ router.get('/', async function (req, res) {
|
||||
isEnabled(process.env.SHOW_BIRTHDAY_ICON) ||
|
||||
process.env.SHOW_BIRTHDAY_ICON === '',
|
||||
helpAndFaqURL: process.env.HELP_AND_FAQ_URL || 'https://librechat.ai',
|
||||
interface: req.app.locals.interface,
|
||||
interface: req.app.locals.interfaceConfig,
|
||||
modelSpecs: req.app.locals.modelSpecs,
|
||||
};
|
||||
|
||||
if (typeof process.env.CUSTOM_FOOTER === 'string') {
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
const multer = require('multer');
|
||||
const express = require('express');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { getConvosByPage, deleteConvos, getConvo, saveConvo } = require('~/models/Conversation');
|
||||
const { IMPORT_CONVERSATION_JOB_NAME } = require('~/server/utils/import/jobDefinition');
|
||||
const { storage, importFileFilter } = require('~/server/routes/files/multer');
|
||||
const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
||||
const { forkConversation } = require('~/server/utils/import/fork');
|
||||
const { createImportLimiters } = require('~/server/middleware');
|
||||
const jobScheduler = require('~/server/utils/jobScheduler');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
@@ -18,7 +24,15 @@ router.get('/', async (req, res) => {
|
||||
return res.status(400).json({ error: 'Invalid page number' });
|
||||
}
|
||||
|
||||
res.status(200).send(await getConvosByPage(req.user.id, pageNumber));
|
||||
let pageSize = req.query.pageSize || 25;
|
||||
pageSize = parseInt(pageSize, 10);
|
||||
|
||||
if (isNaN(pageSize) || pageSize < 1) {
|
||||
return res.status(400).json({ error: 'Invalid page size' });
|
||||
}
|
||||
const isArchived = req.query.isArchived === 'true';
|
||||
|
||||
res.status(200).send(await getConvosByPage(req.user.id, pageNumber, pageSize, isArchived));
|
||||
});
|
||||
|
||||
router.get('/:conversationId', async (req, res) => {
|
||||
@@ -99,4 +113,80 @@ router.post('/update', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
const { importIpLimiter, importUserLimiter } = createImportLimiters();
|
||||
const upload = multer({ storage: storage, fileFilter: importFileFilter });
|
||||
|
||||
/**
|
||||
* Imports a conversation from a JSON file and saves it to the database.
|
||||
* @route POST /import
|
||||
* @param {Express.Multer.File} req.file - The JSON file to import.
|
||||
* @returns {object} 201 - success response - application/json
|
||||
*/
|
||||
router.post(
|
||||
'/import',
|
||||
importIpLimiter,
|
||||
importUserLimiter,
|
||||
upload.single('file'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const filepath = req.file.path;
|
||||
const job = await jobScheduler.now(IMPORT_CONVERSATION_JOB_NAME, filepath, req.user.id);
|
||||
|
||||
res.status(201).json({ message: 'Import started', jobId: job.id });
|
||||
} catch (error) {
|
||||
logger.error('Error processing file', error);
|
||||
res.status(500).send('Error processing file');
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /fork
|
||||
* This route handles forking a conversation based on the TForkConvoRequest and responds with TForkConvoResponse.
|
||||
* @route POST /fork
|
||||
* @param {express.Request<{}, TForkConvoResponse, TForkConvoRequest>} req - Express request object.
|
||||
* @param {express.Response<TForkConvoResponse>} res - Express response object.
|
||||
* @returns {Promise<void>} - The response after forking the conversation.
|
||||
*/
|
||||
router.post('/fork', async (req, res) => {
|
||||
try {
|
||||
/** @type {TForkConvoRequest} */
|
||||
const { conversationId, messageId, option, splitAtTarget, latestMessageId } = req.body;
|
||||
const result = await forkConversation({
|
||||
requestUserId: req.user.id,
|
||||
originalConvoId: conversationId,
|
||||
targetMessageId: messageId,
|
||||
latestMessageId,
|
||||
records: true,
|
||||
splitAtTarget,
|
||||
option,
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
logger.error('Error forking conversation', error);
|
||||
res.status(500).send('Error forking conversation');
|
||||
}
|
||||
});
|
||||
|
||||
// Get the status of an import job for polling
|
||||
router.get('/import/jobs/:jobId', async (req, res) => {
|
||||
try {
|
||||
const { jobId } = req.params;
|
||||
const { userId, ...jobStatus } = await jobScheduler.getJobStatus(jobId);
|
||||
if (!jobStatus) {
|
||||
return res.status(404).json({ message: 'Job not found.' });
|
||||
}
|
||||
|
||||
if (userId !== req.user.id) {
|
||||
return res.status(403).json({ message: 'Unauthorized' });
|
||||
}
|
||||
|
||||
res.json(jobStatus);
|
||||
} catch (error) {
|
||||
logger.error('Error getting job details', error);
|
||||
res.status(500).send('Error getting job details');
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const express = require('express');
|
||||
const createMulterInstance = require('./multer');
|
||||
const { uaParser, checkBan, requireJwtAuth, createFileLimiters } = require('~/server/middleware');
|
||||
const { createMulterInstance } = require('./multer');
|
||||
|
||||
const files = require('./files');
|
||||
const images = require('./images');
|
||||
|
||||
@@ -20,6 +20,16 @@ const storage = multer.diskStorage({
|
||||
},
|
||||
});
|
||||
|
||||
const importFileFilter = (req, file, cb) => {
|
||||
if (file.mimetype === 'application/json') {
|
||||
cb(null, true);
|
||||
} else if (path.extname(file.originalname).toLowerCase() === '.json') {
|
||||
cb(null, true);
|
||||
} else {
|
||||
cb(new Error('Only JSON files are allowed'), false);
|
||||
}
|
||||
};
|
||||
|
||||
const fileFilter = (req, file, cb) => {
|
||||
if (!file) {
|
||||
return cb(new Error('No file provided'), false);
|
||||
@@ -42,4 +52,4 @@ const createMulterInstance = async () => {
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = createMulterInstance;
|
||||
module.exports = { createMulterInstance, storage, importFileFilter };
|
||||
|
||||
@@ -4,7 +4,7 @@ const passport = require('passport');
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { setAuthTokens } = require('~/server/services/AuthService');
|
||||
const { loginLimiter, checkBan } = require('~/server/middleware');
|
||||
const { loginLimiter, checkBan, checkDomainAllowed } = require('~/server/middleware');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const domains = {
|
||||
@@ -16,6 +16,7 @@ router.use(loginLimiter);
|
||||
|
||||
const oauthHandler = async (req, res) => {
|
||||
try {
|
||||
await checkDomainAllowed(req, res);
|
||||
await checkBan(req, res);
|
||||
if (req.banned) {
|
||||
return;
|
||||
|
||||
@@ -1,14 +1,10 @@
|
||||
const {
|
||||
FileSources,
|
||||
EModelEndpoint,
|
||||
EImageOutputType,
|
||||
defaultSocialLogins,
|
||||
} = require('librechat-data-provider');
|
||||
const { FileSources, EModelEndpoint, getConfigDefaults } = require('librechat-data-provider');
|
||||
const { checkVariables, checkHealth, checkConfig, checkAzureVariables } = require('./start/checks');
|
||||
const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants');
|
||||
const { initializeFirebase } = require('./Files/Firebase/initialize');
|
||||
const loadCustomConfig = require('./Config/loadCustomConfig');
|
||||
const handleRateLimits = require('./Config/handleRateLimits');
|
||||
const { loadDefaultInterface } = require('./start/interface');
|
||||
const { azureConfigSetup } = require('./start/azureOpenAI');
|
||||
const { loadAndFormatTools } = require('./ToolService');
|
||||
const paths = require('~/config/paths');
|
||||
@@ -22,9 +18,12 @@ const paths = require('~/config/paths');
|
||||
const AppService = async (app) => {
|
||||
/** @type {TCustomConfig}*/
|
||||
const config = (await loadCustomConfig()) ?? {};
|
||||
const configDefaults = getConfigDefaults();
|
||||
|
||||
const filteredTools = config.filteredTools;
|
||||
const fileStrategy = config.fileStrategy ?? configDefaults.fileStrategy;
|
||||
const imageOutputType = config?.imageOutputType ?? configDefaults.imageOutputType;
|
||||
|
||||
const fileStrategy = config.fileStrategy ?? FileSources.local;
|
||||
const imageOutputType = config?.imageOutputType ?? EImageOutputType.PNG;
|
||||
process.env.CDN_PROVIDER = fileStrategy;
|
||||
|
||||
checkVariables();
|
||||
@@ -37,24 +36,22 @@ const AppService = async (app) => {
|
||||
/** @type {Record<string, FunctionTool} */
|
||||
const availableTools = loadAndFormatTools({
|
||||
directory: paths.structuredTools,
|
||||
filter: new Set([
|
||||
'ChatTool.js',
|
||||
'CodeSherpa.js',
|
||||
'CodeSherpaTools.js',
|
||||
'E2BTools.js',
|
||||
'extractionChain.js',
|
||||
]),
|
||||
adminFilter: filteredTools,
|
||||
});
|
||||
|
||||
const socialLogins = config?.registration?.socialLogins ?? defaultSocialLogins;
|
||||
const socialLogins =
|
||||
config?.registration?.socialLogins ?? configDefaults?.registration?.socialLogins;
|
||||
const interfaceConfig = loadDefaultInterface(config, configDefaults);
|
||||
|
||||
if (!Object.keys(config).length) {
|
||||
app.locals = {
|
||||
paths,
|
||||
fileStrategy,
|
||||
socialLogins,
|
||||
filteredTools,
|
||||
availableTools,
|
||||
imageOutputType,
|
||||
interfaceConfig,
|
||||
};
|
||||
|
||||
return;
|
||||
@@ -85,9 +82,11 @@ const AppService = async (app) => {
|
||||
paths,
|
||||
socialLogins,
|
||||
fileStrategy,
|
||||
filteredTools,
|
||||
availableTools,
|
||||
imageOutputType,
|
||||
interface: config?.interface,
|
||||
interfaceConfig,
|
||||
modelSpecs: config.modelSpecs,
|
||||
fileConfig: config?.fileConfig,
|
||||
secureImageLinks: config?.secureImageLinks,
|
||||
...endpointLocals,
|
||||
|
||||
@@ -93,6 +93,16 @@ describe('AppService', () => {
|
||||
expect(app.locals).toEqual({
|
||||
socialLogins: ['testLogin'],
|
||||
fileStrategy: 'testStrategy',
|
||||
interfaceConfig: expect.objectContaining({
|
||||
privacyPolicy: undefined,
|
||||
termsOfService: undefined,
|
||||
endpointsMenu: true,
|
||||
modelSelect: true,
|
||||
parameters: true,
|
||||
sidePanel: true,
|
||||
presets: true,
|
||||
}),
|
||||
modelSpecs: undefined,
|
||||
availableTools: {
|
||||
ExampleTool: {
|
||||
type: 'function',
|
||||
@@ -109,7 +119,6 @@ describe('AppService', () => {
|
||||
},
|
||||
paths: expect.anything(),
|
||||
imageOutputType: expect.any(String),
|
||||
interface: undefined,
|
||||
fileConfig: undefined,
|
||||
secureImageLinks: undefined,
|
||||
});
|
||||
@@ -181,7 +190,6 @@ describe('AppService', () => {
|
||||
|
||||
expect(loadAndFormatTools).toHaveBeenCalledWith({
|
||||
directory: expect.anything(),
|
||||
filter: expect.anything(),
|
||||
});
|
||||
|
||||
expect(app.locals.availableTools.ExampleTool).toBeDefined();
|
||||
@@ -339,6 +347,69 @@ describe('AppService', () => {
|
||||
expect(process.env.FILE_UPLOAD_USER_MAX).toEqual('initialUserMax');
|
||||
expect(process.env.FILE_UPLOAD_USER_WINDOW).toEqual('initialUserWindow');
|
||||
});
|
||||
|
||||
it('should not modify IMPORT environment variables without rate limits', async () => {
|
||||
// Setup initial environment variables
|
||||
process.env.IMPORT_IP_MAX = '10';
|
||||
process.env.IMPORT_IP_WINDOW = '15';
|
||||
process.env.IMPORT_USER_MAX = '5';
|
||||
process.env.IMPORT_USER_WINDOW = '20';
|
||||
|
||||
const initialEnv = { ...process.env };
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Expect environment variables to remain unchanged
|
||||
expect(process.env.IMPORT_IP_MAX).toEqual(initialEnv.IMPORT_IP_MAX);
|
||||
expect(process.env.IMPORT_IP_WINDOW).toEqual(initialEnv.IMPORT_IP_WINDOW);
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual(initialEnv.IMPORT_USER_MAX);
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual(initialEnv.IMPORT_USER_WINDOW);
|
||||
});
|
||||
|
||||
it('should correctly set IMPORT environment variables based on rate limits', async () => {
|
||||
// Define and mock a custom configuration with rate limits
|
||||
const importLimitsConfig = {
|
||||
rateLimits: {
|
||||
conversationsImport: {
|
||||
ipMax: '150',
|
||||
ipWindowInMinutes: '60',
|
||||
userMax: '50',
|
||||
userWindowInMinutes: '30',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve(importLimitsConfig),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Verify that process.env has been updated according to the rate limits config
|
||||
expect(process.env.IMPORT_IP_MAX).toEqual('150');
|
||||
expect(process.env.IMPORT_IP_WINDOW).toEqual('60');
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual('50');
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual('30');
|
||||
});
|
||||
|
||||
it('should fallback to default IMPORT environment variables when rate limits are unspecified', async () => {
|
||||
// Setup initial environment variables to non-default values
|
||||
process.env.IMPORT_IP_MAX = 'initialMax';
|
||||
process.env.IMPORT_IP_WINDOW = 'initialWindow';
|
||||
process.env.IMPORT_USER_MAX = 'initialUserMax';
|
||||
process.env.IMPORT_USER_WINDOW = 'initialUserWindow';
|
||||
|
||||
// Mock a custom configuration without specific rate limits
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve({}));
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Verify that process.env falls back to the initial values
|
||||
expect(process.env.IMPORT_IP_MAX).toEqual('initialMax');
|
||||
expect(process.env.IMPORT_IP_WINDOW).toEqual('initialWindow');
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual('initialUserMax');
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual('initialUserWindow');
|
||||
});
|
||||
});
|
||||
|
||||
describe('AppService updating app.locals and issuing warnings', () => {
|
||||
|
||||
@@ -2,7 +2,7 @@ const crypto = require('crypto');
|
||||
const bcrypt = require('bcryptjs');
|
||||
const { errorsToString } = require('librechat-data-provider');
|
||||
const { registerSchema } = require('~/strategies/validators');
|
||||
const getCustomConfig = require('~/server/services/Config/getCustomConfig');
|
||||
const isDomainAllowed = require('./isDomainAllowed');
|
||||
const Token = require('~/models/schema/tokenSchema');
|
||||
const { sendEmail } = require('~/server/utils');
|
||||
const Session = require('~/models/Session');
|
||||
@@ -14,27 +14,6 @@ const domains = {
|
||||
server: process.env.DOMAIN_SERVER,
|
||||
};
|
||||
|
||||
async function isDomainAllowed(email) {
|
||||
if (!email) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const domain = email.split('@')[1];
|
||||
|
||||
if (!domain) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const customConfig = await getCustomConfig();
|
||||
if (!customConfig) {
|
||||
return true;
|
||||
} else if (!customConfig?.registration?.allowedDomains) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return customConfig.registration.allowedDomains.includes(domain);
|
||||
}
|
||||
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
const getCustomConfig = require('~/server/services/Config/getCustomConfig');
|
||||
const { isDomainAllowed } = require('./AuthService');
|
||||
|
||||
jest.mock('~/server/services/Config/getCustomConfig', () => jest.fn());
|
||||
|
||||
describe('isDomainAllowed', () => {
|
||||
it('should allow domain when customConfig is not available', async () => {
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
await expect(isDomainAllowed('test@domain1.com')).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('should allow domain when allowedDomains is not defined in customConfig', async () => {
|
||||
getCustomConfig.mockResolvedValue({});
|
||||
await expect(isDomainAllowed('test@domain1.com')).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('should reject an email if it is falsy', async () => {
|
||||
getCustomConfig.mockResolvedValue({});
|
||||
await expect(isDomainAllowed('')).resolves.toBe(false);
|
||||
});
|
||||
|
||||
it('should allow a domain if it is included in the allowedDomains', async () => {
|
||||
getCustomConfig.mockResolvedValue({
|
||||
registration: {
|
||||
allowedDomains: ['domain1.com', 'domain2.com'],
|
||||
},
|
||||
});
|
||||
await expect(isDomainAllowed('user@domain1.com')).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('should reject a domain if it is not included in the allowedDomains', async () => {
|
||||
getCustomConfig.mockResolvedValue({
|
||||
registration: {
|
||||
allowedDomains: ['domain1.com', 'domain2.com'],
|
||||
},
|
||||
});
|
||||
await expect(isDomainAllowed('user@domain3.com')).resolves.toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -6,17 +6,24 @@ const handleRateLimits = (rateLimits) => {
|
||||
if (!rateLimits) {
|
||||
return;
|
||||
}
|
||||
const { fileUploads } = rateLimits;
|
||||
if (!fileUploads) {
|
||||
return;
|
||||
const { fileUploads, conversationsImport } = rateLimits;
|
||||
if (fileUploads) {
|
||||
process.env.FILE_UPLOAD_IP_MAX = fileUploads.ipMax ?? process.env.FILE_UPLOAD_IP_MAX;
|
||||
process.env.FILE_UPLOAD_IP_WINDOW =
|
||||
fileUploads.ipWindowInMinutes ?? process.env.FILE_UPLOAD_IP_WINDOW;
|
||||
process.env.FILE_UPLOAD_USER_MAX = fileUploads.userMax ?? process.env.FILE_UPLOAD_USER_MAX;
|
||||
process.env.FILE_UPLOAD_USER_WINDOW =
|
||||
fileUploads.userWindowInMinutes ?? process.env.FILE_UPLOAD_USER_WINDOW;
|
||||
}
|
||||
|
||||
process.env.FILE_UPLOAD_IP_MAX = fileUploads.ipMax ?? process.env.FILE_UPLOAD_IP_MAX;
|
||||
process.env.FILE_UPLOAD_IP_WINDOW =
|
||||
fileUploads.ipWindowInMinutes ?? process.env.FILE_UPLOAD_IP_WINDOW;
|
||||
process.env.FILE_UPLOAD_USER_MAX = fileUploads.userMax ?? process.env.FILE_UPLOAD_USER_MAX;
|
||||
process.env.FILE_UPLOAD_USER_WINDOW =
|
||||
fileUploads.userWindowInMinutes ?? process.env.FILE_UPLOAD_USER_WINDOW;
|
||||
if (conversationsImport) {
|
||||
process.env.IMPORT_IP_MAX = conversationsImport.ipMax ?? process.env.IMPORT_IP_MAX;
|
||||
process.env.IMPORT_IP_WINDOW =
|
||||
conversationsImport.ipWindowInMinutes ?? process.env.IMPORT_IP_WINDOW;
|
||||
process.env.IMPORT_USER_MAX = conversationsImport.userMax ?? process.env.IMPORT_USER_MAX;
|
||||
process.env.IMPORT_USER_WINDOW =
|
||||
conversationsImport.userWindowInMinutes ?? process.env.IMPORT_USER_WINDOW;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = handleRateLimits;
|
||||
|
||||
@@ -46,6 +46,15 @@ const exampleConfig = {
|
||||
fetch: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'MLX',
|
||||
apiKey: 'user_provided',
|
||||
baseURL: 'http://localhost:8080/v1/',
|
||||
models: {
|
||||
default: ['Meta-Llama-3-8B-Instruct-4bit'],
|
||||
fetch: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -42,6 +42,12 @@ async function loadCustomConfig() {
|
||||
i === 0 && i++;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (customConfig.reason || customConfig.stack) {
|
||||
i === 0 && logger.error('Config file YAML format is invalid:', customConfig);
|
||||
i === 0 && i++;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof customConfig === 'string') {
|
||||
@@ -84,6 +90,10 @@ Please specify a correct \`imageOutputType\` value (case-sensitive).
|
||||
await cache.set(CacheKeys.CUSTOM_CONFIG, customConfig);
|
||||
}
|
||||
|
||||
if (result.data.modelSpecs) {
|
||||
customConfig.modelSpecs = result.data.modelSpecs;
|
||||
}
|
||||
|
||||
return customConfig;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const { modelLabel, promptPrefix, resendFiles, ...rest } = parsedBody;
|
||||
const { modelLabel, promptPrefix, resendFiles, iconURL, greeting, spec, ...rest } = parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
modelLabel,
|
||||
promptPrefix,
|
||||
resendFiles,
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
const buildOptions = (endpoint, parsedBody) => {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { promptPrefix, assistant_id, ...rest } = parsedBody;
|
||||
const { promptPrefix, assistant_id, iconURL, greeting, spec, ...rest } = parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
promptPrefix,
|
||||
assistant_id,
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const buildOptions = (endpoint, parsedBody, endpointType) => {
|
||||
const { chatGptLabel, promptPrefix, resendFiles, imageDetail, ...rest } = parsedBody;
|
||||
const { chatGptLabel, promptPrefix, resendFiles, imageDetail, iconURL, greeting, spec, ...rest } =
|
||||
parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
endpointType,
|
||||
@@ -7,6 +8,9 @@ const buildOptions = (endpoint, parsedBody, endpointType) => {
|
||||
promptPrefix,
|
||||
resendFiles,
|
||||
imageDetail,
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const { examples, modelLabel, promptPrefix, ...rest } = parsedBody;
|
||||
const { examples, modelLabel, promptPrefix, iconURL, greeting, spec, ...rest } = parsedBody;
|
||||
const endpointOption = {
|
||||
examples,
|
||||
endpoint,
|
||||
modelLabel,
|
||||
promptPrefix,
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
|
||||
@@ -4,25 +4,24 @@ const buildOptions = (endpoint, parsedBody) => {
|
||||
promptPrefix,
|
||||
agentOptions,
|
||||
tools,
|
||||
model,
|
||||
temperature,
|
||||
top_p,
|
||||
presence_penalty,
|
||||
frequency_penalty,
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
...modelOptions
|
||||
} = parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
tools: tools.map((tool) => tool.pluginKey) ?? [],
|
||||
tools:
|
||||
tools
|
||||
.map((tool) => tool?.pluginKey ?? tool)
|
||||
.filter((toolName) => typeof toolName === 'string') ?? [],
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
agentOptions,
|
||||
modelOptions: {
|
||||
model,
|
||||
temperature,
|
||||
top_p,
|
||||
presence_penalty,
|
||||
frequency_penalty,
|
||||
},
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
modelOptions,
|
||||
};
|
||||
|
||||
return endpointOption;
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const { chatGptLabel, promptPrefix, resendFiles, imageDetail, ...rest } = parsedBody;
|
||||
const { chatGptLabel, promptPrefix, resendFiles, imageDetail, iconURL, greeting, spec, ...rest } =
|
||||
parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
resendFiles,
|
||||
imageDetail,
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
|
||||
@@ -514,6 +514,10 @@ function filterFile({ req, file, image }) {
|
||||
throw new Error('No file_id provided');
|
||||
}
|
||||
|
||||
if (file.size === 0) {
|
||||
throw new Error('Empty file uploaded');
|
||||
}
|
||||
|
||||
/* parse to validate api call, throws error on fail */
|
||||
isUUID.parse(file_id);
|
||||
|
||||
|
||||
@@ -3,9 +3,59 @@ const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider');
|
||||
const { extractBaseURL, inputSchema, processModelData, logAxiosError } = require('~/utils');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { openAIApiKey, userProvidedOpenAI } = require('./Config/EndpointService').config;
|
||||
|
||||
/**
|
||||
* Extracts the base URL from the provided URL.
|
||||
* @param {string} fullURL - The full URL.
|
||||
* @returns {string} The base URL.
|
||||
*/
|
||||
function deriveBaseURL(fullURL) {
|
||||
try {
|
||||
const parsedUrl = new URL(fullURL);
|
||||
const protocol = parsedUrl.protocol;
|
||||
const hostname = parsedUrl.hostname;
|
||||
const port = parsedUrl.port;
|
||||
|
||||
// Check if the parsed URL components are meaningful
|
||||
if (!protocol || !hostname) {
|
||||
return fullURL;
|
||||
}
|
||||
|
||||
// Reconstruct the base URL
|
||||
return `${protocol}//${hostname}${port ? `:${port}` : ''}`;
|
||||
} catch (error) {
|
||||
logger.error('Failed to derive base URL', error);
|
||||
return fullURL; // Return the original URL in case of any exception
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches Ollama models from the specified base API path.
|
||||
* @param {string} baseURL
|
||||
* @returns {Promise<string[]>} The Ollama models.
|
||||
*/
|
||||
const fetchOllamaModels = async (baseURL) => {
|
||||
let models = [];
|
||||
if (!baseURL) {
|
||||
return models;
|
||||
}
|
||||
try {
|
||||
const ollamaEndpoint = deriveBaseURL(baseURL);
|
||||
/** @type {Promise<AxiosResponse<OllamaListResponse>>} */
|
||||
const response = await axios.get(`${ollamaEndpoint}/api/tags`);
|
||||
models = response.data.models.map((tag) => tag.name);
|
||||
return models;
|
||||
} catch (error) {
|
||||
const logMessage =
|
||||
'Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn\'t start with `ollama` (case-insensitive).';
|
||||
logger.error(logMessage, error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetches OpenAI models from the specified base API path or Azure, based on the provided configuration.
|
||||
*
|
||||
@@ -41,6 +91,10 @@ const fetchModels = async ({
|
||||
return models;
|
||||
}
|
||||
|
||||
if (name && name.toLowerCase().startsWith('ollama')) {
|
||||
return await fetchOllamaModels(baseURL);
|
||||
}
|
||||
|
||||
try {
|
||||
const options = {
|
||||
headers: {
|
||||
@@ -87,6 +141,7 @@ const fetchModels = async ({
|
||||
* @param {object} opts - The options for fetching the models.
|
||||
* @param {string} opts.user - The user ID to send to the API.
|
||||
* @param {boolean} [opts.azure=false] - Whether to fetch models from Azure.
|
||||
* @param {boolean} [opts.assistants=false] - Whether to fetch models from Azure.
|
||||
* @param {boolean} [opts.plugins=false] - Whether to fetch models from the plugins.
|
||||
* @param {string[]} [_models=[]] - The models to use as a fallback.
|
||||
*/
|
||||
@@ -96,7 +151,10 @@ const fetchOpenAIModels = async (opts, _models = []) => {
|
||||
const openaiBaseURL = 'https://api.openai.com/v1';
|
||||
let baseURL = openaiBaseURL;
|
||||
let reverseProxyUrl = process.env.OPENAI_REVERSE_PROXY;
|
||||
if (opts.azure) {
|
||||
|
||||
if (opts.assistants && process.env.ASSISTANTS_BASE_URL) {
|
||||
reverseProxyUrl = process.env.ASSISTANTS_BASE_URL;
|
||||
} else if (opts.azure) {
|
||||
return models;
|
||||
// const azure = getAzureCredentials();
|
||||
// baseURL = (genAzureChatCompletion(azure))
|
||||
@@ -191,10 +249,6 @@ const getOpenAIModels = async (opts) => {
|
||||
return models;
|
||||
}
|
||||
|
||||
if (opts.assistants) {
|
||||
return models;
|
||||
}
|
||||
|
||||
return await fetchOpenAIModels(opts, models);
|
||||
};
|
||||
|
||||
@@ -227,6 +281,7 @@ const getGoogleModels = () => {
|
||||
|
||||
module.exports = {
|
||||
fetchModels,
|
||||
deriveBaseURL,
|
||||
getOpenAIModels,
|
||||
getChatGPTBrowserModels,
|
||||
getAnthropicModels,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const axios = require('axios');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { fetchModels, getOpenAIModels } = require('./ModelService');
|
||||
const { fetchModels, getOpenAIModels, deriveBaseURL } = require('./ModelService');
|
||||
jest.mock('~/utils', () => {
|
||||
const originalUtils = jest.requireActual('~/utils');
|
||||
return {
|
||||
@@ -256,3 +257,119 @@ describe('getOpenAIModels sorting behavior', () => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
});
|
||||
|
||||
describe('fetchModels with Ollama specific logic', () => {
|
||||
const mockOllamaData = {
|
||||
data: {
|
||||
models: [{ name: 'Ollama-Base' }, { name: 'Ollama-Advanced' }],
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
axios.get.mockResolvedValue(mockOllamaData);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should fetch Ollama models when name starts with "ollama"', async () => {
|
||||
const models = await fetchModels({
|
||||
user: 'user789',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.ollama.test.com',
|
||||
name: 'OllamaAPI',
|
||||
});
|
||||
|
||||
expect(models).toEqual(['Ollama-Base', 'Ollama-Advanced']);
|
||||
expect(axios.get).toHaveBeenCalledWith('https://api.ollama.test.com/api/tags'); // Adjusted to expect only one argument if no options are passed
|
||||
});
|
||||
|
||||
it('should handle errors gracefully when fetching Ollama models fails', async () => {
|
||||
axios.get.mockRejectedValue(new Error('Network error'));
|
||||
const models = await fetchModels({
|
||||
user: 'user789',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.ollama.test.com',
|
||||
name: 'OllamaAPI',
|
||||
});
|
||||
|
||||
expect(models).toEqual([]);
|
||||
expect(logger.error).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return an empty array if no baseURL is provided', async () => {
|
||||
const models = await fetchModels({
|
||||
user: 'user789',
|
||||
apiKey: 'testApiKey',
|
||||
name: 'OllamaAPI',
|
||||
});
|
||||
expect(models).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not fetch Ollama models if the name does not start with "ollama"', async () => {
|
||||
// Mock axios to return a different set of models for non-Ollama API calls
|
||||
axios.get.mockResolvedValue({
|
||||
data: {
|
||||
data: [{ id: 'model-1' }, { id: 'model-2' }],
|
||||
},
|
||||
});
|
||||
|
||||
const models = await fetchModels({
|
||||
user: 'user789',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com',
|
||||
name: 'TestAPI',
|
||||
});
|
||||
|
||||
expect(models).toEqual(['model-1', 'model-2']);
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
'https://api.test.com/models', // Ensure the correct API endpoint is called
|
||||
expect.any(Object), // Ensuring some object (headers, etc.) is passed
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deriveBaseURL', () => {
|
||||
it('should extract the base URL correctly from a full URL with a port', () => {
|
||||
const fullURL = 'https://example.com:8080/path?query=123';
|
||||
const baseURL = deriveBaseURL(fullURL);
|
||||
expect(baseURL).toEqual('https://example.com:8080');
|
||||
});
|
||||
|
||||
it('should extract the base URL correctly from a full URL without a port', () => {
|
||||
const fullURL = 'https://example.com/path?query=123';
|
||||
const baseURL = deriveBaseURL(fullURL);
|
||||
expect(baseURL).toEqual('https://example.com');
|
||||
});
|
||||
|
||||
it('should handle URLs using the HTTP protocol', () => {
|
||||
const fullURL = 'http://example.com:3000/path?query=123';
|
||||
const baseURL = deriveBaseURL(fullURL);
|
||||
expect(baseURL).toEqual('http://example.com:3000');
|
||||
});
|
||||
|
||||
it('should return only the protocol and hostname if no port is specified', () => {
|
||||
const fullURL = 'http://example.com/path?query=123';
|
||||
const baseURL = deriveBaseURL(fullURL);
|
||||
expect(baseURL).toEqual('http://example.com');
|
||||
});
|
||||
|
||||
it('should handle URLs with uncommon protocols', () => {
|
||||
const fullURL = 'ftp://example.com:2121/path?query=123';
|
||||
const baseURL = deriveBaseURL(fullURL);
|
||||
expect(baseURL).toEqual('ftp://example.com:2121');
|
||||
});
|
||||
|
||||
it('should handle edge case where URL ends with a slash', () => {
|
||||
const fullURL = 'https://example.com/';
|
||||
const baseURL = deriveBaseURL(fullURL);
|
||||
expect(baseURL).toEqual('https://example.com');
|
||||
});
|
||||
|
||||
it('should return the original URL if the URL is invalid', () => {
|
||||
const invalidURL = 'htp:/example.com:8080';
|
||||
const result = deriveBaseURL(invalidURL);
|
||||
expect(result).toBe(invalidURL);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -20,6 +20,14 @@ const { redactMessage } = require('~/config/parsers');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const filteredTools = new Set([
|
||||
'ChatTool.js',
|
||||
'CodeSherpa.js',
|
||||
'CodeSherpaTools.js',
|
||||
'E2BTools.js',
|
||||
'extractionChain.js',
|
||||
]);
|
||||
|
||||
/**
|
||||
* Loads and formats tools from the specified tool directory.
|
||||
*
|
||||
@@ -30,10 +38,11 @@ const { logger } = require('~/config');
|
||||
*
|
||||
* @param {object} params - The parameters for the function.
|
||||
* @param {string} params.directory - The directory path where the tools are located.
|
||||
* @param {Set<string>} [params.filter=new Set()] - A set of filenames to exclude from loading.
|
||||
* @param {Array<string>} [params.adminFilter=[]] - Array of admin-defined tool keys to exclude from loading.
|
||||
* @returns {Record<string, FunctionTool>} An object mapping each tool's plugin key to its instance.
|
||||
*/
|
||||
function loadAndFormatTools({ directory, filter = new Set() }) {
|
||||
function loadAndFormatTools({ directory, adminFilter = [] }) {
|
||||
const filter = new Set([...adminFilter, ...filteredTools]);
|
||||
const tools = [];
|
||||
/* Structured Tools Directory */
|
||||
const files = fs.readdirSync(directory);
|
||||
|
||||
24
api/server/services/isDomainAllowed.js
Normal file
24
api/server/services/isDomainAllowed.js
Normal file
@@ -0,0 +1,24 @@
|
||||
const getCustomConfig = require('~/server/services/Config/getCustomConfig');
|
||||
|
||||
async function isDomainAllowed(email) {
|
||||
if (!email) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const domain = email.split('@')[1];
|
||||
|
||||
if (!domain) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const customConfig = await getCustomConfig();
|
||||
if (!customConfig) {
|
||||
return true;
|
||||
} else if (!customConfig?.registration?.allowedDomains) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return customConfig.registration.allowedDomains.includes(domain);
|
||||
}
|
||||
|
||||
module.exports = isDomainAllowed;
|
||||
58
api/server/services/isDomainAllowed.spec.js
Normal file
58
api/server/services/isDomainAllowed.spec.js
Normal file
@@ -0,0 +1,58 @@
|
||||
const getCustomConfig = require('~/server/services/Config/getCustomConfig');
|
||||
const isDomainAllowed = require('./isDomainAllowed');
|
||||
|
||||
jest.mock('~/server/services/Config/getCustomConfig', () => jest.fn());
|
||||
|
||||
describe('isDomainAllowed', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should return false if email is falsy', async () => {
|
||||
const email = '';
|
||||
const result = await isDomainAllowed(email);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false if domain is not present in the email', async () => {
|
||||
const email = 'test';
|
||||
const result = await isDomainAllowed(email);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true if customConfig is not available', async () => {
|
||||
const email = 'test@domain1.com';
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
const result = await isDomainAllowed(email);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true if allowedDomains is not defined in customConfig', async () => {
|
||||
const email = 'test@domain1.com';
|
||||
getCustomConfig.mockResolvedValue({});
|
||||
const result = await isDomainAllowed(email);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true if domain is included in the allowedDomains', async () => {
|
||||
const email = 'user@domain1.com';
|
||||
getCustomConfig.mockResolvedValue({
|
||||
registration: {
|
||||
allowedDomains: ['domain1.com', 'domain2.com'],
|
||||
},
|
||||
});
|
||||
const result = await isDomainAllowed(email);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false if domain is not included in the allowedDomains', async () => {
|
||||
const email = 'user@domain3.com';
|
||||
getCustomConfig.mockResolvedValue({
|
||||
registration: {
|
||||
allowedDomains: ['domain1.com', 'domain2.com'],
|
||||
},
|
||||
});
|
||||
const result = await isDomainAllowed(email);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
74
api/server/services/start/interface.js
Normal file
74
api/server/services/start/interface.js
Normal file
@@ -0,0 +1,74 @@
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Loads the default interface object.
|
||||
* @param {TCustomConfig | undefined} config - The loaded custom configuration.
|
||||
* @param {TConfigDefaults} configDefaults - The custom configuration default values.
|
||||
* @returns {TCustomConfig['interface']} The default interface object.
|
||||
*/
|
||||
function loadDefaultInterface(config, configDefaults) {
|
||||
const { interface: interfaceConfig } = config ?? {};
|
||||
const { interface: defaults } = configDefaults;
|
||||
const hasModelSpecs = config?.modelSpecs?.list?.length > 0;
|
||||
|
||||
const loadedInterface = {
|
||||
endpointsMenu:
|
||||
interfaceConfig?.endpointsMenu ?? (hasModelSpecs ? false : defaults.endpointsMenu),
|
||||
modelSelect: interfaceConfig?.modelSelect ?? (hasModelSpecs ? false : defaults.modelSelect),
|
||||
parameters: interfaceConfig?.parameters ?? (hasModelSpecs ? false : defaults.parameters),
|
||||
presets: interfaceConfig?.presets ?? (hasModelSpecs ? false : defaults.presets),
|
||||
sidePanel: interfaceConfig?.sidePanel ?? defaults.sidePanel,
|
||||
privacyPolicy: interfaceConfig?.privacyPolicy ?? defaults.privacyPolicy,
|
||||
termsOfService: interfaceConfig?.termsOfService ?? defaults.termsOfService,
|
||||
};
|
||||
|
||||
let i = 0;
|
||||
const logSettings = () => {
|
||||
// log interface object and model specs object (without list) for reference
|
||||
logger.warn(`\`interface\` settings:\n${JSON.stringify(loadedInterface, null, 2)}`);
|
||||
logger.warn(
|
||||
`\`modelSpecs\` settings:\n${JSON.stringify(
|
||||
{ ...(config?.modelSpecs ?? {}), list: undefined },
|
||||
null,
|
||||
2,
|
||||
)}`,
|
||||
);
|
||||
};
|
||||
|
||||
// warn about config.modelSpecs.prioritize if true and presets are enabled, that default presets will conflict with prioritizing model specs.
|
||||
if (config?.modelSpecs?.prioritize && loadedInterface.presets) {
|
||||
logger.warn(
|
||||
'Note: Prioritizing model specs can conflict with default presets if a default preset is set. It\'s recommended to disable presets from the interface or disable use of a default preset.',
|
||||
);
|
||||
i === 0 && i++;
|
||||
}
|
||||
|
||||
// warn about config.modelSpecs.enforce if true and if any of these, endpointsMenu, modelSelect, presets, or parameters are enabled, that enforcing model specs can conflict with these options.
|
||||
if (
|
||||
config?.modelSpecs?.enforce &&
|
||||
(loadedInterface.endpointsMenu ||
|
||||
loadedInterface.modelSelect ||
|
||||
loadedInterface.presets ||
|
||||
loadedInterface.parameters)
|
||||
) {
|
||||
logger.warn(
|
||||
'Note: Enforcing model specs can conflict with the interface options: endpointsMenu, modelSelect, presets, and parameters. It\'s recommended to disable these options from the interface or disable enforcing model specs.',
|
||||
);
|
||||
i === 0 && i++;
|
||||
}
|
||||
// warn if enforce is true and prioritize is not, that enforcing model specs without prioritizing them can lead to unexpected behavior.
|
||||
if (config?.modelSpecs?.enforce && !config?.modelSpecs?.prioritize) {
|
||||
logger.warn(
|
||||
'Note: Enforcing model specs without prioritizing them can lead to unexpected behavior. It\'s recommended to enable prioritizing model specs if enforcing them.',
|
||||
);
|
||||
i === 0 && i++;
|
||||
}
|
||||
|
||||
if (i > 0) {
|
||||
logSettings();
|
||||
}
|
||||
|
||||
return loadedInterface;
|
||||
}
|
||||
|
||||
module.exports = { loadDefaultInterface };
|
||||
98
api/server/utils/import/__data__/chatbotui-export.json
Normal file
98
api/server/utils/import/__data__/chatbotui-export.json
Normal file
@@ -0,0 +1,98 @@
|
||||
{
|
||||
"version": 4,
|
||||
"history": [
|
||||
{
|
||||
"id": "493cb218-9ebb-4d35-af8b-5aded3f351be",
|
||||
"name": "Hello what are you able to do?",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"type": "text",
|
||||
"content": "Hello what are you able to do?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"type": "text",
|
||||
"content": "Hello! As an AI developed by OpenAI, I'm designed to assist with a wide range of tasks, including but not limited to:\n\n1. **Answering Questions**: I can provide explanations on various topics from science to history, technology, and more.\n2. **Providing Information**: Whether you need the latest knowledge (up to my last training data), tips, statistics, or how-to guides, I can help.\n3. **Learning and Education**: I can assist with learning new skills, languages, or subjects.\n4. **Writing and Editing**: From composing texts to editing them, I can assist in creating content such as essays, emails, and creative writing.\n5. **Programming Help**: Guidance in understanding programming concepts, debugging code, or learning about algorithms.\n6. **Entertainment**: I can tell jokes, stories, or even generate ideas for games and activities.\n7. **Brainstorming**: Whether it's ideas for a birthday party or strategies for a marketing plan, I can help brainstorm.\n8. **Conversations**: I can have in-depth or casual discussions on a broad range of topics.\n9. **Mental Health Support**: While I'm not a substitute for professional help, I can lend an ear and offer some general advice and support.\n10. **Travel Information**: Advice on destinations, tips for travel, and information about different cultures.\n\nPlease note that my responses are based on patterns and knowledge from text up to my last update, so I might not be aware of the very latest data or events."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"type": "text",
|
||||
"content": "I like Entertainment part. Go ahead. Entertain me."
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"type": "text",
|
||||
"content": "Certainly! Let's begin with a light-hearted joke:\n\nWhy don't skeletons fight each other?\n\nThey don't have the guts.\n\nIf you're in the mood for a story instead, let's start with a short tale:\n\n---\n\n**Title: The Lost Key**\n\nIn a quiet town nestled between rolling green hills and a silver-sheened lake, there lived a locksmith named Eli. Eli had a peculiar ability; he could craft keys that didn't just unlock doors but also opened pathways to hidden places and secret realms. However, Eli never ventured through these magical doors; he was content knowing that he had the power to unveil them.\n\nOne day, Eli crafted his masterpiece—a key forged from an unknown, shimmering metal that seemed to hum with an inner light. The locksmith felt a deep connection to this key, as if it were calling to him to seek the door it was destined to open. Despite his usual reservations, curiosity got the better of him, and he embarked on a journey to find the matching lock.\n\nHe traveled through the town, greeting familiar faces and searching every nook and cranny, but to no avail. Eli was about to give up when he noticed a peculiar cat with a coat as dark as the night sky, its eyes glinting with a hint of stars. The cat seemed to beckon him to follow.\n\nLed by the cosmic feline through alleys and gardens, Eli arrived at a forgotten part of town where an old, vine-covered wall stood. To his astonishment, in the wall was a door that rippled like the surface of the lake when touched by a soft breeze. It was a door he had never seen before. Trembling with excitement, Eli took out the key. It fit perfectly into the lock, which turned with a satisfying click.\n\nAs the door creaked open, a brilliant light spilled forth. Eli shielded his eyes, and when he finally could see, he found himself not in another realm, but in the heart of his own town. However, everything appeared different—colors were more vibrant, sounds were clearer, and the air carried the sweet scent of blooming flowers.\n\nThe town's people approached him, smiling. \"We've been waiting for you,\" they said. \"The key wasn't meant to lead you to another world, but to allow you to see your own world differently. To show you that magic exists here, with all of us.\"\n\nEli's life transformed that day. He realized that every smile, every act of kindness, and every moment of connection was a door to a world just as magical as those he could unlock with his keys.\n\nAnd as for the mysterious cosmic cat, it would appear now and then, always at the edge of his vision, reminding him that the real magic was here all along, hidden in plain sight.\n\n---\n\nI hope you enjoyed the joke and the short story. If you'd like to continue the entertainment, feel free to guide me on what you'd like to hear or discuss next!"
|
||||
}
|
||||
],
|
||||
"model": {
|
||||
"id": "gpt-4-1106-preview",
|
||||
"name": "GPT-4 Turbo Preview",
|
||||
"maxLength": 384000,
|
||||
"tokenLimit": 128000
|
||||
},
|
||||
"prompt": "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.",
|
||||
"temperature": 1,
|
||||
"folderId": null
|
||||
},
|
||||
{
|
||||
"id": "f883fb40-6e5c-45fd-92d4-883a5595560f",
|
||||
"name": "Give me the code that inverts ...",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"type": "text",
|
||||
"content": "Give me the code that inverts binary tree in COBOL"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"type": "text",
|
||||
"content": "COBOL is not a language that is commonly used for tasks like inverting a binary tree. It is more suited for business applications and mainframe programming. However, if you are looking for a general idea of how you might approach this in COBOL, here is a simplified example:\n\n```cobol\nIDENTIFICATION DIVISION.\nPROGRAM-ID. InvertBinaryTree.\n\nDATA DIVISION.\nWORKING-STORAGE SECTION.\n01 Node.\n 05 Left-Child PIC 9.\n 05 Right-Child PIC 9.\n\nPROCEDURE DIVISION.\nInvert-Binary-Tree.\n MOVE 1 TO Left-Child\n MOVE 2 TO Right-Child\n\n DISPLAY \"Original Binary Tree:\"\n DISPLAY \"Left Child: \" Left-Child\n DISPLAY \"Right Child: \" Right-Child\n\n PERFORM Swap-Children\n\n DISPLAY \"Inverted Binary Tree:\"\n DISPLAY \"Left Child: \" Left-Child\n DISPLAY \"Right Child: \" Right-Child\n\n STOP RUN.\n\nSwap-Children.\n MOVE Left-Child TO Node\n MOVE Right-Child TO Left-Child\n MOVE Node TO Right-Child\n```\n\nThis is a very basic example and may not cover all the complexities of inverting a binary tree. You would need to adapt and expand this code significantly for a more complex binary tree structure."
|
||||
}
|
||||
],
|
||||
"model": {
|
||||
"id": "gpt-3.5-turbo",
|
||||
"name": "GPT-3.5"
|
||||
},
|
||||
"prompt": "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.",
|
||||
"temperature": 0.5,
|
||||
"folderId": null
|
||||
}
|
||||
],
|
||||
"folders": [
|
||||
{
|
||||
"id": "cdc857de-e669-498d-8fac-edc4995c9d7a",
|
||||
"name": "New folder",
|
||||
"type": "prompt"
|
||||
}
|
||||
],
|
||||
"prompts": [
|
||||
{
|
||||
"id": "a61573d8-6686-487c-9c5d-cd79c6d201ee",
|
||||
"name": "Prompt 1",
|
||||
"description": "",
|
||||
"content": "",
|
||||
"model": {
|
||||
"id": "gpt-4",
|
||||
"name": "GPT-4",
|
||||
"maxLength": 24000,
|
||||
"tokenLimit": 8000
|
||||
},
|
||||
"folderId": null
|
||||
},
|
||||
{
|
||||
"id": "9bf456e3-61fc-494d-b940-55ec934e7a04",
|
||||
"name": "Prompt 2",
|
||||
"description": "afgdfsg",
|
||||
"content": "adfdsfsadf",
|
||||
"model": {
|
||||
"id": "gpt-4",
|
||||
"name": "GPT-4",
|
||||
"maxLength": 24000,
|
||||
"tokenLimit": 8000
|
||||
},
|
||||
"folderId": null
|
||||
}
|
||||
]
|
||||
}
|
||||
1224
api/server/utils/import/__data__/chatgpt-export.json
Normal file
1224
api/server/utils/import/__data__/chatgpt-export.json
Normal file
File diff suppressed because one or more lines are too long
429
api/server/utils/import/__data__/chatgpt-tree.json
Normal file
429
api/server/utils/import/__data__/chatgpt-tree.json
Normal file
@@ -0,0 +1,429 @@
|
||||
[
|
||||
{
|
||||
"title": "Assist user with summary",
|
||||
"create_time": 1714585031.148505,
|
||||
"update_time": 1714585060.879308,
|
||||
"mapping": {
|
||||
"d38605d2-7b2c-43de-b044-22ce472c749b": {
|
||||
"id": "d38605d2-7b2c-43de-b044-22ce472c749b",
|
||||
"message": {
|
||||
"id": "d38605d2-7b2c-43de-b044-22ce472c749b",
|
||||
"author": {
|
||||
"role": "system",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": null,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [""]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 0,
|
||||
"metadata": {
|
||||
"is_visually_hidden_from_conversation": true
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa1f70c-100e-46f0-999e-10c8565f047f",
|
||||
"children": ["aaa297ba-e2da-440e-84f4-e62e7be8b003"]
|
||||
},
|
||||
"aaa1f70c-100e-46f0-999e-10c8565f047f": {
|
||||
"id": "aaa1f70c-100e-46f0-999e-10c8565f047f",
|
||||
"message": null,
|
||||
"parent": null,
|
||||
"children": ["d38605d2-7b2c-43de-b044-22ce472c749b"]
|
||||
},
|
||||
"aaa297ba-e2da-440e-84f4-e62e7be8b003": {
|
||||
"id": "aaa297ba-e2da-440e-84f4-e62e7be8b003",
|
||||
"message": {
|
||||
"id": "aaa297ba-e2da-440e-84f4-e62e7be8b003",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585031.150442,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["hi there"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"request_id": "87d189bb49d412c5-IAD",
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "d38605d2-7b2c-43de-b044-22ce472c749b",
|
||||
"children": ["bda8a275-886d-4f59-b38c-d7037144f0d5"]
|
||||
},
|
||||
"bda8a275-886d-4f59-b38c-d7037144f0d5": {
|
||||
"id": "bda8a275-886d-4f59-b38c-d7037144f0d5",
|
||||
"message": {
|
||||
"id": "bda8a275-886d-4f59-b38c-d7037144f0d5",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585031.757056,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["Hello! How can I assist you today?"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [100260]
|
||||
},
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa297ba-e2da-440e-84f4-e62e7be8b003",
|
||||
"is_complete": true,
|
||||
"request_id": "87d189bb49d412c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa297ba-e2da-440e-84f4-e62e7be8b003",
|
||||
"children": ["aaa24023-b02f-4d49-b568-5856b41750c0", "aaa236a3-cdfc-4eb1-b5c5-790c6641f880"]
|
||||
},
|
||||
"aaa24023-b02f-4d49-b568-5856b41750c0": {
|
||||
"id": "aaa24023-b02f-4d49-b568-5856b41750c0",
|
||||
"message": {
|
||||
"id": "aaa24023-b02f-4d49-b568-5856b41750c0",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585034.306995,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["so cool bro"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"request_id": "87d189cf3df512c5-IAD",
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "bda8a275-886d-4f59-b38c-d7037144f0d5",
|
||||
"children": ["23afbea9-ca08-49f2-b417-e7ae58a1c97d"]
|
||||
},
|
||||
"23afbea9-ca08-49f2-b417-e7ae58a1c97d": {
|
||||
"id": "23afbea9-ca08-49f2-b417-e7ae58a1c97d",
|
||||
"message": {
|
||||
"id": "23afbea9-ca08-49f2-b417-e7ae58a1c97d",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585034.755907,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["Thanks! What brings you here today?"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [100260]
|
||||
},
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"is_complete": true,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa24023-b02f-4d49-b568-5856b41750c0",
|
||||
"request_id": "87d189cf3df512c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa24023-b02f-4d49-b568-5856b41750c0",
|
||||
"children": ["aaa292cc-1842-4dbf-bd79-13cf7150366a"]
|
||||
},
|
||||
"aaa292cc-1842-4dbf-bd79-13cf7150366a": {
|
||||
"id": "aaa292cc-1842-4dbf-bd79-13cf7150366a",
|
||||
"message": {
|
||||
"id": "aaa292cc-1842-4dbf-bd79-13cf7150366a",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585037.56986,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["tell me a story"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"request_id": "87d189e3dac712c5-IAD",
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "23afbea9-ca08-49f2-b417-e7ae58a1c97d",
|
||||
"children": ["ada93f81-f59e-4b31-933d-1357efd68bfc"]
|
||||
},
|
||||
"ada93f81-f59e-4b31-933d-1357efd68bfc": {
|
||||
"id": "ada93f81-f59e-4b31-933d-1357efd68bfc",
|
||||
"message": {
|
||||
"id": "ada93f81-f59e-4b31-933d-1357efd68bfc",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585045.606752,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
"Sure! Here's a short story for you:\n\n---\n\nOnce upon a time, in a small village nestled between rolling"
|
||||
]
|
||||
},
|
||||
"status": "in_progress",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa292cc-1842-4dbf-bd79-13cf7150366a",
|
||||
"finish_details": {
|
||||
"type": "interrupted"
|
||||
},
|
||||
"request_id": "87d189e3dac712c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa292cc-1842-4dbf-bd79-13cf7150366a",
|
||||
"children": []
|
||||
},
|
||||
"aaa236a3-cdfc-4eb1-b5c5-790c6641f880": {
|
||||
"id": "aaa236a3-cdfc-4eb1-b5c5-790c6641f880",
|
||||
"message": {
|
||||
"id": "aaa236a3-cdfc-4eb1-b5c5-790c6641f880",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585050.906034,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["hi again"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"request_id": "87d18a36cf9312c5-IAD",
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "bda8a275-886d-4f59-b38c-d7037144f0d5",
|
||||
"children": ["db88eddf-3622-4246-8527-b6eaf0e9e8cd"]
|
||||
},
|
||||
"db88eddf-3622-4246-8527-b6eaf0e9e8cd": {
|
||||
"id": "db88eddf-3622-4246-8527-b6eaf0e9e8cd",
|
||||
"message": {
|
||||
"id": "db88eddf-3622-4246-8527-b6eaf0e9e8cd",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585051.690729,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["Hey! Welcome back. What's on your mind?"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [100260]
|
||||
},
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"is_complete": true,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa236a3-cdfc-4eb1-b5c5-790c6641f880",
|
||||
"request_id": "87d18a36cf9312c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa236a3-cdfc-4eb1-b5c5-790c6641f880",
|
||||
"children": ["aaa20127-b9e3-44f6-afbe-a2475838625a"]
|
||||
},
|
||||
"aaa20127-b9e3-44f6-afbe-a2475838625a": {
|
||||
"id": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"message": {
|
||||
"id": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585055.908847,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["tell me a joke"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"request_id": "87d18a6e39a312c5-IAD",
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "db88eddf-3622-4246-8527-b6eaf0e9e8cd",
|
||||
"children": ["d0d2a7df-d2fc-4df9-bf0a-1c5121e227ae", "f63b8e17-aa5c-4ca6-a1bf-d4d285e269b8"]
|
||||
},
|
||||
"d0d2a7df-d2fc-4df9-bf0a-1c5121e227ae": {
|
||||
"id": "d0d2a7df-d2fc-4df9-bf0a-1c5121e227ae",
|
||||
"message": {
|
||||
"id": "d0d2a7df-d2fc-4df9-bf0a-1c5121e227ae",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585056.580956,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
"Sure, here's one for you:\n\nWhy don't scientists trust atoms?\n\nBecause they make up everything!"
|
||||
]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [100260]
|
||||
},
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"is_complete": true,
|
||||
"request_id": "87d18a55ca6212c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"children": []
|
||||
},
|
||||
"f63b8e17-aa5c-4ca6-a1bf-d4d285e269b8": {
|
||||
"id": "f63b8e17-aa5c-4ca6-a1bf-d4d285e269b8",
|
||||
"message": {
|
||||
"id": "f63b8e17-aa5c-4ca6-a1bf-d4d285e269b8",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585060.598792,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
"Sure, here's one for you:\n\nWhy don't scientists trust atoms?\n\nBecause they make up everything!"
|
||||
]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [100260]
|
||||
},
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"is_complete": true,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"request_id": "87d18a6e39a312c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"children": []
|
||||
}
|
||||
},
|
||||
"moderation_results": [],
|
||||
"current_node": "f63b8e17-aa5c-4ca6-a1bf-d4d285e269b8",
|
||||
"plugin_ids": null,
|
||||
"conversation_id": "d5dc5307-6807-41a0-8b04-4acee626eeb7",
|
||||
"conversation_template_id": null,
|
||||
"gizmo_id": null,
|
||||
"is_archived": false,
|
||||
"safe_urls": [],
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"id": "d5dc5307-6807-41a0-8b04-4acee626eeb7"
|
||||
}
|
||||
]
|
||||
143
api/server/utils/import/__data__/librechat-export.json
Normal file
143
api/server/utils/import/__data__/librechat-export.json
Normal file
@@ -0,0 +1,143 @@
|
||||
{
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"endpoint": "openAI",
|
||||
"title": "Conversation 1. Web Search",
|
||||
"exportAt": "16:33:32 GMT+0200 (Central European Summer Time)",
|
||||
"branches": true,
|
||||
"recursive": true,
|
||||
"options": {
|
||||
"presetId": null,
|
||||
"model": "gpt-3.5-turbo",
|
||||
"chatGptLabel": null,
|
||||
"promptPrefix": null,
|
||||
"temperature": 1,
|
||||
"top_p": 1,
|
||||
"presence_penalty": 0,
|
||||
"frequency_penalty": 0,
|
||||
"resendFiles": true,
|
||||
"imageDetail": "auto",
|
||||
"endpoint": "openAI",
|
||||
"title": "VW Transporter 2014 Fuel Consumption. Web Search"
|
||||
},
|
||||
"messagesTree": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40b6",
|
||||
"messageId": "b123942f-ca1a-4b16-9e1f-ea4af5171168",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.230Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": true,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "00000000-0000-0000-0000-000000000000",
|
||||
"sender": "user",
|
||||
"text": "What is the fuel consumption of vw transporter with 8 people in l/km",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.230Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40b8",
|
||||
"messageId": "549a4f45-cf93-4e3b-ae62-1abf02afbfc8",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.242Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": false,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "b123942f-ca1a-4b16-9e1f-ea4af5171168",
|
||||
"sender": "GPT-3.5",
|
||||
"text": "The fuel consumption of a Volkswagen Transporter can vary based on the specific model and driving conditions. For example, the 2021 Volkswagen Transporter T6.1 L2H1 2.0 TDI 110HP, which is one of the recent models, doesn't have its fuel consumption data explicitly mentioned in the sources I found. However, a review of a Volkswagen Transporter model, which is equipped with a 2.0-litre four-cylinder turbo diesel engine and a seven-speed dual-clutch automatic transmission, mentions a combined cycle fuel consumption figure of 8.2 litres per 100km. It's important to note that this figure might vary, especially when the vehicle is carrying eight people, as more weight can lead to increased fuel consumption.\n\nFor more detailed information, you might need to refer to the specific model's manual or contact a Volkswagen dealer. Also, keep in mind that real-world fuel consumption can be influenced by factors such as driving style, road conditions, and maintenance of the vehicle ([2021 Volkswagen Transporter T6.1 L2H1 2.0 TDI 110HP specs, dimensions](https://www.ultimatespecs.com/car-specs/Volkswagen/118424/2021-Volkswagen-Transporter-T61-L2H1-20-TDI-110HP.html)) ([Volkswagen Transporter Review - Drive](https://www.drive.com.au/reviews/volkswagen-transporter-review/)).",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.242Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40ba",
|
||||
"messageId": "880e5357-3e0c-4218-b351-fd3fc184adef",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.247Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": true,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "549a4f45-cf93-4e3b-ae62-1abf02afbfc8",
|
||||
"sender": "user",
|
||||
"text": "What about 10 year old model",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.247Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40bc",
|
||||
"messageId": "e9796d11-3bdf-4e25-9f0e-4802bbbb8c6d",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.256Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": false,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "880e5357-3e0c-4218-b351-fd3fc184adef",
|
||||
"sender": "GPT-3.5",
|
||||
"text": "The fuel consumption of a 2014 Volkswagen Transporter, specifically the TDI340 model with a 2.0 L engine, is reported as follows:\n- Combined: 8.2 L/100km\n- Highway: 6.9 L/100km\n- City: 10.2 L/100km\n\nThese figures can vary based on driving conditions and the vehicle's load. The actual consumption might be higher when carrying eight people due to the additional weight ([Volkswagen Transporter TDI340 2014 - Pricing & Specifications | carsales.com.au](https://www.carsales.com.au/volkswagen/transporter/price/2014/tdi340-381631/)).",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.256Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40be",
|
||||
"messageId": "04408c06-62dc-4961-8ef5-4336b68e7a0a",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.261Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": true,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "e9796d11-3bdf-4e25-9f0e-4802bbbb8c6d",
|
||||
"sender": "user",
|
||||
"text": "Is it for fully loaded car?",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.261Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40c0",
|
||||
"messageId": "645b925b-ef14-451f-9be3-814de55a5ae4",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.266Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": false,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "04408c06-62dc-4961-8ef5-4336b68e7a0a",
|
||||
"sender": "GPT-3.5",
|
||||
"text": "The fuel consumption figures for the 2014 Volkswagen Transporter TDI340, such as the combined 8.2 L/100km, are typically based on standardized tests. These tests don't necessarily reflect a fully loaded vehicle. The actual fuel consumption can vary and is likely to be higher when the vehicle is fully loaded with passengers or cargo, due to the increased weight and potential changes in driving conditions such as more frequent acceleration and deceleration ([Volkswagen Transporter TDI340 2014 - Pricing & Specifications | carsales.com.au](https://www.carsales.com.au/volkswagen/transporter/price/2014/tdi340-381631/)).",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.266Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
153
api/server/utils/import/__data__/librechat-tree.json
Normal file
153
api/server/utils/import/__data__/librechat-tree.json
Normal file
@@ -0,0 +1,153 @@
|
||||
{
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"endpoint": "openAI",
|
||||
"title": "Troubleshooting Python Virtual Environment Activation Issue",
|
||||
"exportAt": "13:18:04 GMT-0400 (Eastern Daylight Time)",
|
||||
"branches": true,
|
||||
"recursive": true,
|
||||
"options": {
|
||||
"presetId": null,
|
||||
"model": "gpt-4-turbo",
|
||||
"chatGptLabel": "Software Engineer",
|
||||
"promptPrefix": "You are an expert software engineer.",
|
||||
"temperature": 0.8,
|
||||
"top_p": 1,
|
||||
"presence_penalty": 0,
|
||||
"frequency_penalty": 0,
|
||||
"resendFiles": true,
|
||||
"imageDetail": "auto",
|
||||
"endpoint": "openAI",
|
||||
"title": "Troubleshooting Python Virtual Environment Activation Issue"
|
||||
},
|
||||
"messagesTree": [
|
||||
{
|
||||
"_id": "66326f3f04bed94b7f5be68d",
|
||||
"messageId": "9501f99d-9bbb-40cb-bbb2-16d79aeceb72",
|
||||
"__v": 0,
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"createdAt": "2024-05-01T16:35:11.530Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": true,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "00000000-0000-0000-0000-000000000000",
|
||||
"sender": "User",
|
||||
"text": "How do I use source venv/bin/activate?\nbash: venv/bin/activate: No such file or directory",
|
||||
"tokenCount": 59,
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-05-01T16:35:39.114Z",
|
||||
"user": "64d4f005d73ef522e1f48818",
|
||||
"children": [
|
||||
{
|
||||
"_id": "66326f4004bed94b7f5be699",
|
||||
"messageId": "4c395510-fdfd-4556-b034-f9440f025e91",
|
||||
"__v": 0,
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"createdAt": "2024-05-01T16:35:12.604Z",
|
||||
"error": false,
|
||||
"isCreatedByUser": false,
|
||||
"isEdited": false,
|
||||
"model": "gpt-4-turbo",
|
||||
"parentMessageId": "9501f99d-9bbb-40cb-bbb2-16d79aeceb72",
|
||||
"sender": "Software Engineer",
|
||||
"text": "It looks like you're trying to activate a Python virtual environment but encountering an issue because the system cannot find the `activate` script in the `venv/bin/` directory. This typically happens if the virtual environment has not been created yet or if the path specified is incorrect. Here are a few steps to troubleshoot and resolve this issue:\n\n1. **Check if the Virtual Environment Exists**:\n Ensure that the virtual environment directory (`venv`) exists in your current directory (`~/denis-libre`). You can check this by listing the directories:\n\n ```bash\n ls -l\n ```\n\n Look for `venv` in the output. If it's not there, you'll need to create it.\n\n2. **Create the Virtual Environment**:\n If the virtual environment does not exist, you can create one by using the following command:\n\n ```bash\n python3 -m venv venv\n ```\n\n This command creates a new virtual environment named `venv` in your current directory.\n\n3. **Activate the Virtual Environment**:\n Once you have confirmed that the `venv` directory exists, try activating the virtual environment again:\n\n ```bash\n source venv/bin/activate\n ```\n\n If you still receive an error, double-check the path. If you are on a Windows system using Git Bash or similar, the path might be `venv\\Scripts\\activate` instead.\n\n4. **Check for Environment Specific Issues**:\n - If you are using a different shell (like `csh` or `fish`), the activation script could be different (e.g., `activate.csh` or `activate.fish`).\n - Ensure there’s no typo in your command or path misconfiguration.\n\n5. **Permissions Issue**:\n Make sure you have the necessary permissions to access the `venv` directory and its contents. If not, you might need to modify the permissions:\n\n ```bash\n chmod -R 755 venv\n ```\n\n6. **Using the Correct Python Version**:\n Confirm that you are using the correct version of Python for which the virtual environment is supposed to be set up. Sometimes, using a different version might cause issues.\n\nBy following these steps, you should be able to resolve the issue with activating your Python virtual environment. If the problem persists, there might be a more specific issue with your Python installation or system configuration.",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-05-01T16:35:39.104Z",
|
||||
"user": "64d4f005d73ef522e1f48818",
|
||||
"endpoint": "openAI",
|
||||
"finish_reason": "stop",
|
||||
"tokenCount": 504,
|
||||
"children": [
|
||||
{
|
||||
"messageId": "6e4c95ca-fcb1-4869-adc3-b4bcec83a270",
|
||||
"parentMessageId": "4c395510-fdfd-4556-b034-f9440f025e91",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"sender": "User",
|
||||
"text": "thanks",
|
||||
"isCreatedByUser": true,
|
||||
"tokenCount": 5,
|
||||
"children": [
|
||||
{
|
||||
"messageId": "5da2cdce-87f7-46b4-87c5-e8224e5d5125",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"parentMessageId": "6e4c95ca-fcb1-4869-adc3-b4bcec83a270",
|
||||
"isCreatedByUser": false,
|
||||
"model": "gpt-4-turbo",
|
||||
"sender": "Software Engineer",
|
||||
"text": "You're welcome! If you have any more questions or need further assistance, feel free to ask. Good luck with your project!",
|
||||
"promptTokens": 586,
|
||||
"endpoint": "openAI",
|
||||
"finish_reason": "stop",
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"messageId": "64ed7568-bdcf-42a9-b34a-73386d4d99dc",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"parentMessageId": "6e4c95ca-fcb1-4869-adc3-b4bcec83a270",
|
||||
"isCreatedByUser": false,
|
||||
"model": "gpt-4-turbo",
|
||||
"sender": "Software Engineer",
|
||||
"text": "You're welcome! If you have any more questions or need further assistance, feel free to ask. Happy coding!",
|
||||
"promptTokens": 586,
|
||||
"endpoint": "openAI",
|
||||
"finish_reason": "stop",
|
||||
"children": [
|
||||
{
|
||||
"messageId": "f82cca6f-7d2b-4fc1-9d5a-636acc137368",
|
||||
"parentMessageId": "64ed7568-bdcf-42a9-b34a-73386d4d99dc",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"sender": "User",
|
||||
"text": "does coding make you happy",
|
||||
"isCreatedByUser": true,
|
||||
"tokenCount": 9,
|
||||
"children": [
|
||||
{
|
||||
"messageId": "ad7b6ec6-aaa4-434c-9188-def6aadbb2bf",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"parentMessageId": "f82cca6f-7d2b-4fc1-9d5a-636acc137368",
|
||||
"isCreatedByUser": false,
|
||||
"model": "gpt-4-turbo",
|
||||
"sender": "Software Engineer",
|
||||
"text": "As a software engineer, coding can indeed be a source of happiness and satisfaction for many professionals in the field. Here’s why coding might make someone happy:\n\n1. **Problem Solving**: Coding offers a constant stream of challenges that require problem-solving skills. Solving these problems can be immensely gratifying and can provide a sense of accomplishment.\n\n2. **Creativity**: Writing software is often a creative process, similar to composing music or painting. Developers can express themselves through their code, which can be a fulfilling experience.\n\n3. **Continuous Learning**: The tech field is always evolving, offering endless opportunities to learn new languages, tools, and techniques. For many, this continuous learning keeps the job exciting and engaging.\n\n4. **Impact**: Software engineers often work on applications that impact the lives of millions of people. Knowing that your work is helping others can be a powerful source of happiness.\n\n5. **Community**: The coding community is known for its collaboration and sharing of knowledge through platforms like GitHub, Stack Overflow, and various forums. Being a part of this community can provide a sense of belonging and support.\n\n6. **Career Opportunities**: Coding skills are in high demand, which can lead to job security, good salaries, and the opportunity to work in a variety of industries. This can contribute to overall life satisfaction and stability.\n\nHowever, like any job, coding can also be stressful at times. Deadlines, bugs, difficult problems, and other challenges can cause frustration. The key is to find a balance, continue learning, and choose projects that align with your interests and values. If these elements are in place, coding can indeed be a very happy career choice.",
|
||||
"promptTokens": 622,
|
||||
"endpoint": "openAI",
|
||||
"finish_reason": "stop",
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"messageId": "d3e12088-962e-42ce-900d-edd6816acae5",
|
||||
"parentMessageId": "4c395510-fdfd-4556-b034-f9440f025e91",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"sender": "User",
|
||||
"text": "lol",
|
||||
"isCreatedByUser": true,
|
||||
"tokenCount": 5,
|
||||
"children": [
|
||||
{
|
||||
"messageId": "1f062c99-ff0a-4cf4-a1cf-7150261a24e2",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"parentMessageId": "d3e12088-962e-42ce-900d-edd6816acae5",
|
||||
"isCreatedByUser": false,
|
||||
"model": "gpt-4-turbo",
|
||||
"sender": "Software Engineer",
|
||||
"text": "It looks like you might have been amused or found something funny about the situation! If you have any specific questions or need further help with your virtual environment setup or anything else related to software engineering, feel free to ask!",
|
||||
"promptTokens": 586,
|
||||
"endpoint": "openAI",
|
||||
"finish_reason": "stop",
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
314
api/server/utils/import/fork.js
Normal file
314
api/server/utils/import/fork.js
Normal file
@@ -0,0 +1,314 @@
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { EModelEndpoint, Constants, ForkOptions } = require('librechat-data-provider');
|
||||
const { createImportBatchBuilder } = require('./importBatchBuilder');
|
||||
const BaseClient = require('~/app/clients/BaseClient');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const { getMessages } = require('~/models/Message');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {object} params - The parameters for the importer.
|
||||
* @param {string} params.originalConvoId - The ID of the conversation to fork.
|
||||
* @param {string} params.targetMessageId - The ID of the message to fork from.
|
||||
* @param {string} params.requestUserId - The ID of the user making the request.
|
||||
* @param {string} [params.newTitle] - Optional new title for the forked conversation uses old title if not provided
|
||||
* @param {string} [params.option=''] - Optional flag for fork option
|
||||
* @param {boolean} [params.records=false] - Optional flag for returning actual database records or resulting conversation and messages.
|
||||
* @param {boolean} [params.splitAtTarget=false] - Optional flag for splitting the messages at the target message level.
|
||||
* @param {string} [params.latestMessageId] - latestMessageId - Required if splitAtTarget is true.
|
||||
* @param {(userId: string) => ImportBatchBuilder} [params.builderFactory] - Optional factory function for creating an ImportBatchBuilder instance.
|
||||
* @returns {Promise<TForkConvoResponse>} The response after forking the conversation.
|
||||
*/
|
||||
async function forkConversation({
|
||||
originalConvoId,
|
||||
targetMessageId: targetId,
|
||||
requestUserId,
|
||||
newTitle,
|
||||
option = ForkOptions.TARGET_LEVEL,
|
||||
records = false,
|
||||
splitAtTarget = false,
|
||||
latestMessageId,
|
||||
builderFactory = createImportBatchBuilder,
|
||||
}) {
|
||||
try {
|
||||
const originalConvo = await getConvo(requestUserId, originalConvoId);
|
||||
let originalMessages = await getMessages({
|
||||
user: requestUserId,
|
||||
conversationId: originalConvoId,
|
||||
});
|
||||
|
||||
let targetMessageId = targetId;
|
||||
if (splitAtTarget && !latestMessageId) {
|
||||
throw new Error('Latest `messageId` is required for forking from target message.');
|
||||
} else if (splitAtTarget) {
|
||||
originalMessages = splitAtTargetLevel(originalMessages, targetId);
|
||||
targetMessageId = latestMessageId;
|
||||
}
|
||||
|
||||
const importBatchBuilder = builderFactory(requestUserId);
|
||||
importBatchBuilder.startConversation(originalConvo.endpoint ?? EModelEndpoint.openAI);
|
||||
|
||||
let messagesToClone = [];
|
||||
|
||||
if (option === ForkOptions.DIRECT_PATH) {
|
||||
// Direct path only
|
||||
messagesToClone = BaseClient.getMessagesForConversation({
|
||||
messages: originalMessages,
|
||||
parentMessageId: targetMessageId,
|
||||
});
|
||||
} else if (option === ForkOptions.INCLUDE_BRANCHES) {
|
||||
// Direct path and siblings
|
||||
messagesToClone = getAllMessagesUpToParent(originalMessages, targetMessageId);
|
||||
} else if (option === ForkOptions.TARGET_LEVEL || !option) {
|
||||
// Direct path, siblings, and all descendants
|
||||
messagesToClone = getMessagesUpToTargetLevel(originalMessages, targetMessageId);
|
||||
}
|
||||
|
||||
const idMapping = new Map();
|
||||
|
||||
for (const message of messagesToClone) {
|
||||
const newMessageId = uuidv4();
|
||||
idMapping.set(message.messageId, newMessageId);
|
||||
|
||||
const clonedMessage = {
|
||||
...message,
|
||||
messageId: newMessageId,
|
||||
parentMessageId:
|
||||
message.parentMessageId && message.parentMessageId !== Constants.NO_PARENT
|
||||
? idMapping.get(message.parentMessageId)
|
||||
: Constants.NO_PARENT,
|
||||
};
|
||||
|
||||
importBatchBuilder.saveMessage(clonedMessage);
|
||||
}
|
||||
|
||||
const result = importBatchBuilder.finishConversation(
|
||||
newTitle || originalConvo.title,
|
||||
new Date(),
|
||||
originalConvo,
|
||||
);
|
||||
await importBatchBuilder.saveBatch();
|
||||
logger.debug(
|
||||
`user: ${requestUserId} | New conversation "${
|
||||
newTitle || originalConvo.title
|
||||
}" forked from conversation ID ${originalConvoId}`,
|
||||
);
|
||||
|
||||
if (!records) {
|
||||
return result;
|
||||
}
|
||||
|
||||
const conversation = await getConvo(requestUserId, result.conversation.conversationId);
|
||||
const messages = await getMessages({
|
||||
user: requestUserId,
|
||||
conversationId: conversation.conversationId,
|
||||
});
|
||||
|
||||
return {
|
||||
conversation,
|
||||
messages,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`user: ${requestUserId} | Error forking conversation from original ID ${originalConvoId}`,
|
||||
error,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves all messages up to the root from the target message.
|
||||
* @param {TMessage[]} messages - The list of messages to search.
|
||||
* @param {string} targetMessageId - The ID of the target message.
|
||||
* @returns {TMessage[]} The list of messages up to the root from the target message.
|
||||
*/
|
||||
function getAllMessagesUpToParent(messages, targetMessageId) {
|
||||
const targetMessage = messages.find((msg) => msg.messageId === targetMessageId);
|
||||
if (!targetMessage) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const pathToRoot = new Set();
|
||||
const visited = new Set();
|
||||
let current = targetMessage;
|
||||
|
||||
while (current) {
|
||||
if (visited.has(current.messageId)) {
|
||||
break;
|
||||
}
|
||||
|
||||
visited.add(current.messageId);
|
||||
pathToRoot.add(current.messageId);
|
||||
|
||||
const currentParentId = current.parentMessageId ?? Constants.NO_PARENT;
|
||||
if (currentParentId === Constants.NO_PARENT) {
|
||||
break;
|
||||
}
|
||||
|
||||
current = messages.find((msg) => msg.messageId === currentParentId);
|
||||
}
|
||||
|
||||
// Include all messages that are in the path or whose parent is in the path
|
||||
// Exclude children of the target message
|
||||
return messages.filter(
|
||||
(msg) =>
|
||||
(pathToRoot.has(msg.messageId) && msg.messageId !== targetMessageId) ||
|
||||
(pathToRoot.has(msg.parentMessageId) && msg.parentMessageId !== targetMessageId) ||
|
||||
msg.messageId === targetMessageId,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves all messages up to the root from the target message and its neighbors.
|
||||
* @param {TMessage[]} messages - The list of messages to search.
|
||||
* @param {string} targetMessageId - The ID of the target message.
|
||||
* @returns {TMessage[]} The list of inclusive messages up to the root from the target message.
|
||||
*/
|
||||
function getMessagesUpToTargetLevel(messages, targetMessageId) {
|
||||
if (messages.length === 1 && messages[0] && messages[0].messageId === targetMessageId) {
|
||||
return messages;
|
||||
}
|
||||
|
||||
// Create a map of parentMessageId to children messages
|
||||
const parentToChildrenMap = new Map();
|
||||
for (const message of messages) {
|
||||
if (!parentToChildrenMap.has(message.parentMessageId)) {
|
||||
parentToChildrenMap.set(message.parentMessageId, []);
|
||||
}
|
||||
parentToChildrenMap.get(message.parentMessageId).push(message);
|
||||
}
|
||||
|
||||
// Retrieve the target message
|
||||
const targetMessage = messages.find((msg) => msg.messageId === targetMessageId);
|
||||
if (!targetMessage) {
|
||||
logger.error('Target message not found.');
|
||||
return [];
|
||||
}
|
||||
|
||||
const visited = new Set();
|
||||
|
||||
const rootMessages = parentToChildrenMap.get(Constants.NO_PARENT) || [];
|
||||
let currentLevel = rootMessages.length > 0 ? [...rootMessages] : [targetMessage];
|
||||
const results = new Set(currentLevel);
|
||||
|
||||
// Check if the target message is at the root level
|
||||
if (
|
||||
currentLevel.some((msg) => msg.messageId === targetMessageId) &&
|
||||
targetMessage.parentMessageId === Constants.NO_PARENT
|
||||
) {
|
||||
return Array.from(results);
|
||||
}
|
||||
|
||||
// Iterate level by level until the target is found
|
||||
let targetFound = false;
|
||||
while (!targetFound && currentLevel.length > 0) {
|
||||
const nextLevel = [];
|
||||
for (const node of currentLevel) {
|
||||
if (visited.has(node.messageId)) {
|
||||
logger.warn('Cycle detected in message tree');
|
||||
continue;
|
||||
}
|
||||
visited.add(node.messageId);
|
||||
const children = parentToChildrenMap.get(node.messageId) || [];
|
||||
for (const child of children) {
|
||||
if (visited.has(child.messageId)) {
|
||||
logger.warn('Cycle detected in message tree');
|
||||
continue;
|
||||
}
|
||||
nextLevel.push(child);
|
||||
results.add(child);
|
||||
if (child.messageId === targetMessageId) {
|
||||
targetFound = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
return Array.from(results);
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits the conversation at the targeted message level, including the target, its siblings, and all descendant messages.
|
||||
* All target level messages have their parentMessageId set to the root.
|
||||
* @param {TMessage[]} messages - The list of messages to analyze.
|
||||
* @param {string} targetMessageId - The ID of the message to start the split from.
|
||||
* @returns {TMessage[]} The list of messages at and below the target level.
|
||||
*/
|
||||
function splitAtTargetLevel(messages, targetMessageId) {
|
||||
// Create a map of parentMessageId to children messages
|
||||
const parentToChildrenMap = new Map();
|
||||
for (const message of messages) {
|
||||
if (!parentToChildrenMap.has(message.parentMessageId)) {
|
||||
parentToChildrenMap.set(message.parentMessageId, []);
|
||||
}
|
||||
parentToChildrenMap.get(message.parentMessageId).push(message);
|
||||
}
|
||||
|
||||
// Retrieve the target message
|
||||
const targetMessage = messages.find((msg) => msg.messageId === targetMessageId);
|
||||
if (!targetMessage) {
|
||||
logger.error('Target message not found.');
|
||||
return [];
|
||||
}
|
||||
|
||||
// Initialize the search with root messages
|
||||
const rootMessages = parentToChildrenMap.get(Constants.NO_PARENT) || [];
|
||||
let currentLevel = [...rootMessages];
|
||||
let currentLevelIndex = 0;
|
||||
const levelMap = {};
|
||||
|
||||
// Map messages to their levels
|
||||
rootMessages.forEach((msg) => {
|
||||
levelMap[msg.messageId] = 0;
|
||||
});
|
||||
|
||||
// Search for the target level
|
||||
while (currentLevel.length > 0) {
|
||||
const nextLevel = [];
|
||||
for (const node of currentLevel) {
|
||||
const children = parentToChildrenMap.get(node.messageId) || [];
|
||||
for (const child of children) {
|
||||
nextLevel.push(child);
|
||||
levelMap[child.messageId] = currentLevelIndex + 1;
|
||||
}
|
||||
}
|
||||
currentLevel = nextLevel;
|
||||
currentLevelIndex++;
|
||||
}
|
||||
|
||||
// Determine the target level
|
||||
const targetLevel = levelMap[targetMessageId];
|
||||
if (targetLevel === undefined) {
|
||||
logger.error('Target level not found.');
|
||||
return [];
|
||||
}
|
||||
|
||||
// Filter messages at or below the target level
|
||||
const filteredMessages = messages
|
||||
.map((msg) => {
|
||||
const messageLevel = levelMap[msg.messageId];
|
||||
if (messageLevel < targetLevel) {
|
||||
return null;
|
||||
} else if (messageLevel === targetLevel) {
|
||||
return {
|
||||
...msg,
|
||||
parentMessageId: Constants.NO_PARENT,
|
||||
};
|
||||
}
|
||||
|
||||
return msg;
|
||||
})
|
||||
.filter((msg) => msg !== null);
|
||||
|
||||
return filteredMessages;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
forkConversation,
|
||||
splitAtTargetLevel,
|
||||
getAllMessagesUpToParent,
|
||||
getMessagesUpToTargetLevel,
|
||||
};
|
||||
574
api/server/utils/import/fork.spec.js
Normal file
574
api/server/utils/import/fork.spec.js
Normal file
@@ -0,0 +1,574 @@
|
||||
const { Constants, ForkOptions } = require('librechat-data-provider');
|
||||
|
||||
jest.mock('~/models/Conversation', () => ({
|
||||
getConvo: jest.fn(),
|
||||
bulkSaveConvos: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/models/Message', () => ({
|
||||
getMessages: jest.fn(),
|
||||
bulkSaveMessages: jest.fn(),
|
||||
}));
|
||||
|
||||
let mockIdCounter = 0;
|
||||
jest.mock('uuid', () => {
|
||||
return {
|
||||
v4: jest.fn(() => {
|
||||
mockIdCounter++;
|
||||
return mockIdCounter.toString();
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
const {
|
||||
forkConversation,
|
||||
splitAtTargetLevel,
|
||||
getAllMessagesUpToParent,
|
||||
getMessagesUpToTargetLevel,
|
||||
} = require('./fork');
|
||||
const { getConvo, bulkSaveConvos } = require('~/models/Conversation');
|
||||
const { getMessages, bulkSaveMessages } = require('~/models/Message');
|
||||
const BaseClient = require('~/app/clients/BaseClient');
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TMessage[]} messages - The list of messages to visualize.
|
||||
* @param {string | null} parentId - The parent message ID.
|
||||
* @param {string} prefix - The prefix to use for each line.
|
||||
* @returns
|
||||
*/
|
||||
function printMessageTree(messages, parentId = Constants.NO_PARENT, prefix = '') {
|
||||
let treeVisual = '';
|
||||
|
||||
const childMessages = messages.filter((msg) => msg.parentMessageId === parentId);
|
||||
for (let index = 0; index < childMessages.length; index++) {
|
||||
const msg = childMessages[index];
|
||||
const isLast = index === childMessages.length - 1;
|
||||
const connector = isLast ? '└── ' : '├── ';
|
||||
|
||||
treeVisual += `${prefix}${connector}[${msg.messageId}]: ${
|
||||
msg.parentMessageId !== Constants.NO_PARENT ? `Child of ${msg.parentMessageId}` : 'Root'
|
||||
}\n`;
|
||||
treeVisual += printMessageTree(messages, msg.messageId, prefix + (isLast ? ' ' : '| '));
|
||||
}
|
||||
|
||||
return treeVisual;
|
||||
}
|
||||
|
||||
const mockMessages = [
|
||||
{
|
||||
messageId: '0',
|
||||
parentMessageId: Constants.NO_PARENT,
|
||||
text: 'Root message 1',
|
||||
createdAt: '2021-01-01',
|
||||
},
|
||||
{
|
||||
messageId: '1',
|
||||
parentMessageId: Constants.NO_PARENT,
|
||||
text: 'Root message 2',
|
||||
createdAt: '2021-01-01',
|
||||
},
|
||||
{ messageId: '2', parentMessageId: '1', text: 'Child of 1', createdAt: '2021-01-02' },
|
||||
{ messageId: '3', parentMessageId: '1', text: 'Child of 1', createdAt: '2021-01-03' },
|
||||
{ messageId: '4', parentMessageId: '2', text: 'Child of 2', createdAt: '2021-01-04' },
|
||||
{ messageId: '5', parentMessageId: '2', text: 'Child of 2', createdAt: '2021-01-05' },
|
||||
{ messageId: '6', parentMessageId: '3', text: 'Child of 3', createdAt: '2021-01-06' },
|
||||
{ messageId: '7', parentMessageId: '3', text: 'Child of 3', createdAt: '2021-01-07' },
|
||||
{ messageId: '8', parentMessageId: '7', text: 'Child of 7', createdAt: '2021-01-07' },
|
||||
];
|
||||
|
||||
const mockConversation = { convoId: 'abc123', title: 'Original Title' };
|
||||
|
||||
describe('forkConversation', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockIdCounter = 0;
|
||||
getConvo.mockResolvedValue(mockConversation);
|
||||
getMessages.mockResolvedValue(mockMessages);
|
||||
bulkSaveConvos.mockResolvedValue(null);
|
||||
bulkSaveMessages.mockResolvedValue(null);
|
||||
});
|
||||
|
||||
test('should fork conversation without branches', async () => {
|
||||
const result = await forkConversation({
|
||||
originalConvoId: 'abc123',
|
||||
targetMessageId: '3',
|
||||
requestUserId: 'user1',
|
||||
option: ForkOptions.DIRECT_PATH,
|
||||
});
|
||||
console.debug('forkConversation: direct path\n', printMessageTree(result.messages));
|
||||
|
||||
// Reversed order due to setup in function
|
||||
const expectedMessagesTexts = ['Child of 1', 'Root message 2'];
|
||||
expect(getMessages).toHaveBeenCalled();
|
||||
expect(bulkSaveMessages).toHaveBeenCalledWith(
|
||||
expect.arrayContaining(
|
||||
expectedMessagesTexts.map((text) => expect.objectContaining({ text })),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test('should fork conversation without branches (deeper)', async () => {
|
||||
const result = await forkConversation({
|
||||
originalConvoId: 'abc123',
|
||||
targetMessageId: '8',
|
||||
requestUserId: 'user1',
|
||||
option: ForkOptions.DIRECT_PATH,
|
||||
});
|
||||
console.debug('forkConversation: direct path (deeper)\n', printMessageTree(result.messages));
|
||||
|
||||
const expectedMessagesTexts = ['Child of 7', 'Child of 3', 'Child of 1', 'Root message 2'];
|
||||
expect(getMessages).toHaveBeenCalled();
|
||||
expect(bulkSaveMessages).toHaveBeenCalledWith(
|
||||
expect.arrayContaining(
|
||||
expectedMessagesTexts.map((text) => expect.objectContaining({ text })),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test('should fork conversation with branches', async () => {
|
||||
const result = await forkConversation({
|
||||
originalConvoId: 'abc123',
|
||||
targetMessageId: '3',
|
||||
requestUserId: 'user1',
|
||||
option: ForkOptions.INCLUDE_BRANCHES,
|
||||
});
|
||||
|
||||
console.debug('forkConversation: include branches\n', printMessageTree(result.messages));
|
||||
|
||||
const expectedMessagesTexts = ['Root message 2', 'Child of 1', 'Child of 1'];
|
||||
expect(getMessages).toHaveBeenCalled();
|
||||
expect(bulkSaveMessages).toHaveBeenCalledWith(
|
||||
expect.arrayContaining(
|
||||
expectedMessagesTexts.map((text) => expect.objectContaining({ text })),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test('should fork conversation up to target level', async () => {
|
||||
const result = await forkConversation({
|
||||
originalConvoId: 'abc123',
|
||||
targetMessageId: '3',
|
||||
requestUserId: 'user1',
|
||||
option: ForkOptions.TARGET_LEVEL,
|
||||
});
|
||||
|
||||
console.debug('forkConversation: target level\n', printMessageTree(result.messages));
|
||||
|
||||
const expectedMessagesTexts = ['Root message 1', 'Root message 2', 'Child of 1', 'Child of 1'];
|
||||
expect(getMessages).toHaveBeenCalled();
|
||||
expect(bulkSaveMessages).toHaveBeenCalledWith(
|
||||
expect.arrayContaining(
|
||||
expectedMessagesTexts.map((text) => expect.objectContaining({ text })),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test('should handle errors during message fetching', async () => {
|
||||
getMessages.mockRejectedValue(new Error('Failed to fetch messages'));
|
||||
|
||||
await expect(
|
||||
forkConversation({
|
||||
originalConvoId: 'abc123',
|
||||
targetMessageId: '3',
|
||||
requestUserId: 'user1',
|
||||
}),
|
||||
).rejects.toThrow('Failed to fetch messages');
|
||||
});
|
||||
});
|
||||
|
||||
const mockMessagesComplex = [
|
||||
{ messageId: '7', parentMessageId: Constants.NO_PARENT, text: 'Message 7' },
|
||||
{ messageId: '8', parentMessageId: Constants.NO_PARENT, text: 'Message 8' },
|
||||
{ messageId: '5', parentMessageId: '7', text: 'Message 5' },
|
||||
{ messageId: '6', parentMessageId: '7', text: 'Message 6' },
|
||||
{ messageId: '9', parentMessageId: '8', text: 'Message 9' },
|
||||
{ messageId: '2', parentMessageId: '5', text: 'Message 2' },
|
||||
{ messageId: '3', parentMessageId: '5', text: 'Message 3' },
|
||||
{ messageId: '1', parentMessageId: '6', text: 'Message 1' },
|
||||
{ messageId: '4', parentMessageId: '6', text: 'Message 4' },
|
||||
{ messageId: '10', parentMessageId: '3', text: 'Message 10' },
|
||||
];
|
||||
|
||||
describe('getMessagesUpToTargetLevel', () => {
|
||||
test('should get all messages up to target level', async () => {
|
||||
const result = getMessagesUpToTargetLevel(mockMessagesComplex, '5');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'[getMessagesUpToTargetLevel] should get all messages up to target level\n',
|
||||
mappedResult,
|
||||
);
|
||||
console.debug('mockMessages\n', printMessageTree(mockMessagesComplex));
|
||||
console.debug('result\n', printMessageTree(result));
|
||||
expect(mappedResult).toEqual(['7', '8', '5', '6', '9']);
|
||||
});
|
||||
|
||||
test('should get all messages if target is deepest level', async () => {
|
||||
const result = getMessagesUpToTargetLevel(mockMessagesComplex, '10');
|
||||
expect(result.length).toEqual(mockMessagesComplex.length);
|
||||
});
|
||||
|
||||
test('should return target if only message', async () => {
|
||||
const result = getMessagesUpToTargetLevel(
|
||||
[mockMessagesComplex[mockMessagesComplex.length - 1]],
|
||||
'10',
|
||||
);
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'[getMessagesUpToTargetLevel] should return target if only message\n',
|
||||
mappedResult,
|
||||
);
|
||||
console.debug('mockMessages\n', printMessageTree(mockMessages));
|
||||
console.debug('result\n', printMessageTree(result));
|
||||
expect(mappedResult).toEqual(['10']);
|
||||
});
|
||||
|
||||
test('should return empty array if target message ID does not exist', async () => {
|
||||
const result = getMessagesUpToTargetLevel(mockMessagesComplex, '123');
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
test('should return correct messages when target is a root message', async () => {
|
||||
const result = getMessagesUpToTargetLevel(mockMessagesComplex, '7');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
expect(mappedResult).toEqual(['7', '8']);
|
||||
});
|
||||
|
||||
test('should correctly handle single message with non-matching ID', async () => {
|
||||
const singleMessage = [
|
||||
{ messageId: '30', parentMessageId: Constants.NO_PARENT, text: 'Message 30' },
|
||||
];
|
||||
const result = getMessagesUpToTargetLevel(singleMessage, '31');
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
test('should correctly handle case with circular dependencies', async () => {
|
||||
const circularMessages = [
|
||||
{ messageId: '40', parentMessageId: '42', text: 'Message 40' },
|
||||
{ messageId: '41', parentMessageId: '40', text: 'Message 41' },
|
||||
{ messageId: '42', parentMessageId: '41', text: 'Message 42' },
|
||||
];
|
||||
const result = getMessagesUpToTargetLevel(circularMessages, '40');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
expect(new Set(mappedResult)).toEqual(new Set(['40', '41', '42']));
|
||||
});
|
||||
|
||||
test('should return all messages when all are interconnected and target is deep in hierarchy', async () => {
|
||||
const interconnectedMessages = [
|
||||
{ messageId: '50', parentMessageId: Constants.NO_PARENT, text: 'Root Message' },
|
||||
{ messageId: '51', parentMessageId: '50', text: 'Child Level 1' },
|
||||
{ messageId: '52', parentMessageId: '51', text: 'Child Level 2' },
|
||||
{ messageId: '53', parentMessageId: '52', text: 'Child Level 3' },
|
||||
];
|
||||
const result = getMessagesUpToTargetLevel(interconnectedMessages, '53');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
expect(mappedResult).toEqual(['50', '51', '52', '53']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllMessagesUpToParent', () => {
|
||||
const mockMessages = [
|
||||
{ messageId: '11', parentMessageId: Constants.NO_PARENT, text: 'Message 11' },
|
||||
{ messageId: '12', parentMessageId: Constants.NO_PARENT, text: 'Message 12' },
|
||||
{ messageId: '13', parentMessageId: '11', text: 'Message 13' },
|
||||
{ messageId: '14', parentMessageId: '12', text: 'Message 14' },
|
||||
{ messageId: '15', parentMessageId: '13', text: 'Message 15' },
|
||||
{ messageId: '16', parentMessageId: '13', text: 'Message 16' },
|
||||
{ messageId: '21', parentMessageId: '13', text: 'Message 21' },
|
||||
{ messageId: '17', parentMessageId: '14', text: 'Message 17' },
|
||||
{ messageId: '18', parentMessageId: '16', text: 'Message 18' },
|
||||
{ messageId: '19', parentMessageId: '18', text: 'Message 19' },
|
||||
{ messageId: '20', parentMessageId: '19', text: 'Message 20' },
|
||||
];
|
||||
|
||||
test('should handle empty message list', async () => {
|
||||
const result = getAllMessagesUpToParent([], '10');
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle target message not found', async () => {
|
||||
const result = getAllMessagesUpToParent(mockMessages, 'invalid-id');
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle single level tree (no parents)', async () => {
|
||||
const result = getAllMessagesUpToParent(
|
||||
[
|
||||
{ messageId: '11', parentMessageId: Constants.NO_PARENT, text: 'Message 11' },
|
||||
{ messageId: '12', parentMessageId: Constants.NO_PARENT, text: 'Message 12' },
|
||||
],
|
||||
'11',
|
||||
);
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
expect(mappedResult).toEqual(['11']);
|
||||
});
|
||||
|
||||
test('should correctly retrieve messages in a deeply nested structure', async () => {
|
||||
const result = getAllMessagesUpToParent(mockMessages, '20');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
expect(mappedResult).toContain('11');
|
||||
expect(mappedResult).toContain('13');
|
||||
expect(mappedResult).toContain('16');
|
||||
expect(mappedResult).toContain('18');
|
||||
expect(mappedResult).toContain('19');
|
||||
expect(mappedResult).toContain('20');
|
||||
});
|
||||
|
||||
test('should return only the target message if it has no parent', async () => {
|
||||
const result = getAllMessagesUpToParent(mockMessages, '11');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
expect(mappedResult).toEqual(['11']);
|
||||
});
|
||||
|
||||
test('should handle messages without a parent ID defined', async () => {
|
||||
const additionalMessages = [
|
||||
...mockMessages,
|
||||
{ messageId: '22', text: 'Message 22' }, // No parentMessageId field
|
||||
];
|
||||
const result = getAllMessagesUpToParent(additionalMessages, '22');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
expect(mappedResult).toEqual(['22']);
|
||||
});
|
||||
|
||||
test('should retrieve all messages from the target to the root (including indirect ancestors)', async () => {
|
||||
const result = getAllMessagesUpToParent(mockMessages, '18');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'[getAllMessagesUpToParent] should retrieve all messages from the target to the root\n',
|
||||
mappedResult,
|
||||
);
|
||||
console.debug('mockMessages\n', printMessageTree(mockMessages));
|
||||
console.debug('result\n', printMessageTree(result));
|
||||
expect(mappedResult).toEqual(['11', '13', '15', '16', '21', '18']);
|
||||
});
|
||||
|
||||
test('should handle circular dependencies gracefully', () => {
|
||||
const mockMessages = [
|
||||
{ messageId: '1', parentMessageId: '2' },
|
||||
{ messageId: '2', parentMessageId: '3' },
|
||||
{ messageId: '3', parentMessageId: '1' },
|
||||
];
|
||||
|
||||
const targetMessageId = '1';
|
||||
const result = getAllMessagesUpToParent(mockMessages, targetMessageId);
|
||||
|
||||
const uniqueIds = new Set(result.map((msg) => msg.messageId));
|
||||
expect(uniqueIds.size).toBe(result.length);
|
||||
expect(result.map((msg) => msg.messageId).sort()).toEqual(['1', '2', '3'].sort());
|
||||
});
|
||||
|
||||
test('should return target if only message', async () => {
|
||||
const result = getAllMessagesUpToParent([mockMessages[mockMessages.length - 1]], '20');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'[getAllMessagesUpToParent] should return target if only message\n',
|
||||
mappedResult,
|
||||
);
|
||||
console.debug('mockMessages\n', printMessageTree(mockMessages));
|
||||
console.debug('result\n', printMessageTree(result));
|
||||
expect(mappedResult).toEqual(['20']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMessagesForConversation', () => {
|
||||
const mockMessages = [
|
||||
{ messageId: '11', parentMessageId: Constants.NO_PARENT, text: 'Message 11' },
|
||||
{ messageId: '12', parentMessageId: Constants.NO_PARENT, text: 'Message 12' },
|
||||
{ messageId: '13', parentMessageId: '11', text: 'Message 13' },
|
||||
{ messageId: '14', parentMessageId: '12', text: 'Message 14' },
|
||||
{ messageId: '15', parentMessageId: '13', text: 'Message 15' },
|
||||
{ messageId: '16', parentMessageId: '13', text: 'Message 16' },
|
||||
{ messageId: '21', parentMessageId: '13', text: 'Message 21' },
|
||||
{ messageId: '17', parentMessageId: '14', text: 'Message 17' },
|
||||
{ messageId: '18', parentMessageId: '16', text: 'Message 18' },
|
||||
{ messageId: '19', parentMessageId: '18', text: 'Message 19' },
|
||||
{ messageId: '20', parentMessageId: '19', text: 'Message 20' },
|
||||
];
|
||||
|
||||
test('should provide the direct path to the target without branches', async () => {
|
||||
const result = BaseClient.getMessagesForConversation({
|
||||
messages: mockMessages,
|
||||
parentMessageId: '18',
|
||||
});
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'[getMessagesForConversation] should provide the direct path to the target without branches\n',
|
||||
mappedResult,
|
||||
);
|
||||
console.debug('mockMessages\n', printMessageTree(mockMessages));
|
||||
console.debug('result\n', printMessageTree(result));
|
||||
expect(new Set(mappedResult)).toEqual(new Set(['11', '13', '16', '18']));
|
||||
});
|
||||
|
||||
test('should return target if only message', async () => {
|
||||
const result = BaseClient.getMessagesForConversation({
|
||||
messages: [mockMessages[mockMessages.length - 1]],
|
||||
parentMessageId: '20',
|
||||
});
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'[getMessagesForConversation] should return target if only message\n',
|
||||
mappedResult,
|
||||
);
|
||||
console.debug('mockMessages\n', printMessageTree(mockMessages));
|
||||
console.debug('result\n', printMessageTree(result));
|
||||
expect(new Set(mappedResult)).toEqual(new Set(['20']));
|
||||
});
|
||||
|
||||
test('should break on detecting a circular dependency', async () => {
|
||||
const mockMessagesWithCycle = [
|
||||
...mockMessagesComplex,
|
||||
{ messageId: '100', parentMessageId: '101', text: 'Message 100' },
|
||||
{ messageId: '101', parentMessageId: '100', text: 'Message 101' }, // introduces circular dependency
|
||||
];
|
||||
|
||||
const result = BaseClient.getMessagesForConversation({
|
||||
messages: mockMessagesWithCycle,
|
||||
parentMessageId: '100',
|
||||
});
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'[getMessagesForConversation] should break on detecting a circular dependency\n',
|
||||
mappedResult,
|
||||
);
|
||||
expect(mappedResult).toEqual(['101', '100']);
|
||||
});
|
||||
|
||||
// Testing with mockMessagesComplex
|
||||
test('should correctly find the conversation path including root messages', async () => {
|
||||
const result = BaseClient.getMessagesForConversation({
|
||||
messages: mockMessagesComplex,
|
||||
parentMessageId: '2',
|
||||
});
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'[getMessagesForConversation] should correctly find the conversation path including root messages\n',
|
||||
mappedResult,
|
||||
);
|
||||
expect(new Set(mappedResult)).toEqual(new Set(['7', '5', '2']));
|
||||
});
|
||||
|
||||
// Testing summary feature
|
||||
test('should stop at summary if option is enabled', async () => {
|
||||
const messagesWithSummary = [
|
||||
...mockMessagesComplex,
|
||||
{ messageId: '11', parentMessageId: '7', text: 'Message 11', summary: 'Summary for 11' },
|
||||
];
|
||||
|
||||
const result = BaseClient.getMessagesForConversation({
|
||||
messages: messagesWithSummary,
|
||||
parentMessageId: '11',
|
||||
summary: true,
|
||||
});
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'[getMessagesForConversation] should stop at summary if option is enabled\n',
|
||||
mappedResult,
|
||||
);
|
||||
expect(mappedResult).toEqual(['11']); // Should include only the summarizing message
|
||||
});
|
||||
|
||||
// Testing no parent condition
|
||||
test('should return only the root message if no parent exists', async () => {
|
||||
const result = BaseClient.getMessagesForConversation({
|
||||
messages: mockMessagesComplex,
|
||||
parentMessageId: '8',
|
||||
});
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'[getMessagesForConversation] should return only the root message if no parent exists\n',
|
||||
mappedResult,
|
||||
);
|
||||
expect(mappedResult).toEqual(['8']); // The message with no parent in the thread
|
||||
});
|
||||
});
|
||||
|
||||
describe('splitAtTargetLevel', () => {
|
||||
/* const mockMessagesComplex = [
|
||||
{ messageId: '7', parentMessageId: Constants.NO_PARENT, text: 'Message 7' },
|
||||
{ messageId: '8', parentMessageId: Constants.NO_PARENT, text: 'Message 8' },
|
||||
{ messageId: '5', parentMessageId: '7', text: 'Message 5' },
|
||||
{ messageId: '6', parentMessageId: '7', text: 'Message 6' },
|
||||
{ messageId: '9', parentMessageId: '8', text: 'Message 9' },
|
||||
{ messageId: '2', parentMessageId: '5', text: 'Message 2' },
|
||||
{ messageId: '3', parentMessageId: '5', text: 'Message 3' },
|
||||
{ messageId: '1', parentMessageId: '6', text: 'Message 1' },
|
||||
{ messageId: '4', parentMessageId: '6', text: 'Message 4' },
|
||||
{ messageId: '10', parentMessageId: '3', text: 'Message 10' },
|
||||
];
|
||||
|
||||
mockMessages
|
||||
├── [7]: Root
|
||||
| ├── [5]: Child of 7
|
||||
| | ├── [2]: Child of 5
|
||||
| | └── [3]: Child of 5
|
||||
| | └── [10]: Child of 3
|
||||
| └── [6]: Child of 7
|
||||
| ├── [1]: Child of 6
|
||||
| └── [4]: Child of 6
|
||||
└── [8]: Root
|
||||
└── [9]: Child of 8
|
||||
*/
|
||||
test('should include target message level and all descendants (1/2)', () => {
|
||||
console.debug('splitAtTargetLevel: mockMessages\n', printMessageTree(mockMessagesComplex));
|
||||
const result = splitAtTargetLevel(mockMessagesComplex, '2');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'splitAtTargetLevel: include target message level and all descendants (1/2)\n',
|
||||
printMessageTree(result),
|
||||
);
|
||||
expect(mappedResult).toEqual(['2', '3', '1', '4', '10']);
|
||||
});
|
||||
|
||||
test('should include target message level and all descendants (2/2)', () => {
|
||||
console.debug('splitAtTargetLevel: mockMessages\n', printMessageTree(mockMessagesComplex));
|
||||
const result = splitAtTargetLevel(mockMessagesComplex, '5');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'splitAtTargetLevel: include target message level and all descendants (2/2)\n',
|
||||
printMessageTree(result),
|
||||
);
|
||||
expect(mappedResult).toEqual(['5', '6', '9', '2', '3', '1', '4', '10']);
|
||||
});
|
||||
|
||||
test('should handle when target message is root', () => {
|
||||
const result = splitAtTargetLevel(mockMessagesComplex, '7');
|
||||
console.debug('splitAtTargetLevel: target level is root message\n', printMessageTree(result));
|
||||
expect(result.length).toBe(mockMessagesComplex.length);
|
||||
});
|
||||
|
||||
test('should handle when target message is deepest, lonely child', () => {
|
||||
const result = splitAtTargetLevel(mockMessagesComplex, '10');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'splitAtTargetLevel: target message is deepest, lonely child\n',
|
||||
printMessageTree(result),
|
||||
);
|
||||
expect(mappedResult).toEqual(['10']);
|
||||
});
|
||||
|
||||
test('should handle when target level is last with many neighbors', () => {
|
||||
const mockMessages = [
|
||||
...mockMessagesComplex,
|
||||
{ messageId: '11', parentMessageId: '10', text: 'Message 11' },
|
||||
{ messageId: '12', parentMessageId: '10', text: 'Message 12' },
|
||||
{ messageId: '13', parentMessageId: '10', text: 'Message 13' },
|
||||
{ messageId: '14', parentMessageId: '10', text: 'Message 14' },
|
||||
{ messageId: '15', parentMessageId: '4', text: 'Message 15' },
|
||||
{ messageId: '16', parentMessageId: '15', text: 'Message 15' },
|
||||
];
|
||||
const result = splitAtTargetLevel(mockMessages, '11');
|
||||
const mappedResult = result.map((msg) => msg.messageId);
|
||||
console.debug(
|
||||
'splitAtTargetLevel: should handle when target level is last with many neighbors\n',
|
||||
printMessageTree(result),
|
||||
);
|
||||
expect(mappedResult).toEqual(['11', '12', '13', '14', '16']);
|
||||
});
|
||||
|
||||
test('should handle non-existent target message', () => {
|
||||
// Non-existent message ID
|
||||
const result = splitAtTargetLevel(mockMessagesComplex, '99');
|
||||
expect(result.length).toBe(0);
|
||||
});
|
||||
});
|
||||
158
api/server/utils/import/importBatchBuilder.js
Normal file
158
api/server/utils/import/importBatchBuilder.js
Normal file
@@ -0,0 +1,158 @@
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { EModelEndpoint, Constants, openAISettings } = require('librechat-data-provider');
|
||||
const { bulkSaveConvos } = require('~/models/Conversation');
|
||||
const { bulkSaveMessages } = require('~/models/Message');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Factory function for creating an instance of ImportBatchBuilder.
|
||||
* @param {string} requestUserId - The ID of the user making the request.
|
||||
* @returns {ImportBatchBuilder} - The newly created ImportBatchBuilder instance.
|
||||
*/
|
||||
function createImportBatchBuilder(requestUserId) {
|
||||
return new ImportBatchBuilder(requestUserId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for building a batch of conversations and messages and pushing them to DB for Conversation Import functionality
|
||||
*/
|
||||
class ImportBatchBuilder {
|
||||
/**
|
||||
* Creates an instance of ImportBatchBuilder.
|
||||
* @param {string} requestUserId - The ID of the user making the import request.
|
||||
*/
|
||||
constructor(requestUserId) {
|
||||
this.requestUserId = requestUserId;
|
||||
this.conversations = [];
|
||||
this.messages = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a new conversation in the batch.
|
||||
* @param {string} [endpoint=EModelEndpoint.openAI] - The endpoint for the conversation. Defaults to EModelEndpoint.openAI.
|
||||
* @returns {void}
|
||||
*/
|
||||
startConversation(endpoint) {
|
||||
// we are simplifying by using a single model for the entire conversation
|
||||
this.endpoint = endpoint || EModelEndpoint.openAI;
|
||||
this.conversationId = uuidv4();
|
||||
this.lastMessageId = Constants.NO_PARENT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a user message to the current conversation.
|
||||
* @param {string} text - The text of the user message.
|
||||
* @returns {object} The saved message object.
|
||||
*/
|
||||
addUserMessage(text) {
|
||||
const message = this.saveMessage({ text, sender: 'user', isCreatedByUser: true });
|
||||
return message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a GPT message to the current conversation.
|
||||
* @param {string} text - The text of the GPT message.
|
||||
* @param {string} [model='defaultModel'] - The model used for generating the GPT message. Defaults to 'defaultModel'.
|
||||
* @param {string} [sender='GPT-3.5'] - The sender of the GPT message. Defaults to 'GPT-3.5'.
|
||||
* @returns {object} The saved message object.
|
||||
*/
|
||||
addGptMessage(text, model, sender = 'GPT-3.5') {
|
||||
const message = this.saveMessage({
|
||||
text,
|
||||
sender,
|
||||
isCreatedByUser: false,
|
||||
model: model || openAISettings.model.default,
|
||||
});
|
||||
return message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finishes the current conversation and adds it to the batch.
|
||||
* @param {string} [title='Imported Chat'] - The title of the conversation. Defaults to 'Imported Chat'.
|
||||
* @param {Date} [createdAt] - The creation date of the conversation.
|
||||
* @param {TConversation} [originalConvo] - The original conversation.
|
||||
* @returns {{ conversation: TConversation, messages: TMessage[] }} The resulting conversation and messages.
|
||||
*/
|
||||
finishConversation(title, createdAt, originalConvo = {}) {
|
||||
const convo = {
|
||||
...originalConvo,
|
||||
user: this.requestUserId,
|
||||
conversationId: this.conversationId,
|
||||
title: title || 'Imported Chat',
|
||||
createdAt: createdAt,
|
||||
updatedAt: createdAt,
|
||||
overrideTimestamp: true,
|
||||
endpoint: this.endpoint,
|
||||
model: originalConvo.model ?? openAISettings.model.default,
|
||||
};
|
||||
convo._id && delete convo._id;
|
||||
this.conversations.push(convo);
|
||||
|
||||
return { conversation: convo, messages: this.messages };
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the batch of conversations and messages to the DB.
|
||||
* @returns {Promise<void>} A promise that resolves when the batch is saved.
|
||||
* @throws {Error} If there is an error saving the batch.
|
||||
*/
|
||||
async saveBatch() {
|
||||
try {
|
||||
await bulkSaveConvos(this.conversations);
|
||||
await bulkSaveMessages(this.messages);
|
||||
logger.debug(
|
||||
`user: ${this.requestUserId} | Added ${this.conversations.length} conversations and ${this.messages.length} messages to the DB.`,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Error saving batch', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves a message to the current conversation.
|
||||
* @param {object} messageDetails - The details of the message.
|
||||
* @param {string} messageDetails.text - The text of the message.
|
||||
* @param {string} messageDetails.sender - The sender of the message.
|
||||
* @param {string} [messageDetails.messageId] - The ID of the current message.
|
||||
* @param {boolean} messageDetails.isCreatedByUser - Indicates whether the message is created by the user.
|
||||
* @param {string} [messageDetails.model] - The model used for generating the message.
|
||||
* @param {string} [messageDetails.endpoint] - The endpoint used for generating the message.
|
||||
* @param {string} [messageDetails.parentMessageId=this.lastMessageId] - The ID of the parent message.
|
||||
* @param {Partial<TMessage>} messageDetails.rest - Additional properties that may be included in the message.
|
||||
* @returns {object} The saved message object.
|
||||
*/
|
||||
saveMessage({
|
||||
text,
|
||||
sender,
|
||||
isCreatedByUser,
|
||||
model,
|
||||
messageId,
|
||||
parentMessageId = this.lastMessageId,
|
||||
endpoint,
|
||||
...rest
|
||||
}) {
|
||||
const newMessageId = messageId ?? uuidv4();
|
||||
const message = {
|
||||
...rest,
|
||||
parentMessageId,
|
||||
messageId: newMessageId,
|
||||
conversationId: this.conversationId,
|
||||
isCreatedByUser: isCreatedByUser,
|
||||
model: model || this.model,
|
||||
user: this.requestUserId,
|
||||
endpoint: endpoint ?? this.endpoint,
|
||||
unfinished: false,
|
||||
isEdited: false,
|
||||
error: false,
|
||||
sender,
|
||||
text,
|
||||
};
|
||||
message._id && delete message._id;
|
||||
this.lastMessageId = newMessageId;
|
||||
this.messages.push(message);
|
||||
return message;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ImportBatchBuilder, createImportBatchBuilder };
|
||||
295
api/server/utils/import/importers.js
Normal file
295
api/server/utils/import/importers.js
Normal file
@@ -0,0 +1,295 @@
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { EModelEndpoint, Constants, openAISettings } = require('librechat-data-provider');
|
||||
const { createImportBatchBuilder } = require('./importBatchBuilder');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
/**
|
||||
* Returns the appropriate importer function based on the provided JSON data.
|
||||
*
|
||||
* @param {Object} jsonData - The JSON data to import.
|
||||
* @returns {Function} - The importer function.
|
||||
* @throws {Error} - If the import type is not supported.
|
||||
*/
|
||||
function getImporter(jsonData) {
|
||||
// For ChatGPT
|
||||
if (Array.isArray(jsonData)) {
|
||||
logger.info('Importing ChatGPT conversation');
|
||||
return importChatGptConvo;
|
||||
}
|
||||
|
||||
// For ChatbotUI
|
||||
if (jsonData.version && Array.isArray(jsonData.history)) {
|
||||
logger.info('Importing ChatbotUI conversation');
|
||||
return importChatBotUiConvo;
|
||||
}
|
||||
|
||||
// For LibreChat
|
||||
if (jsonData.conversationId && jsonData.messagesTree) {
|
||||
logger.info('Importing LibreChat conversation');
|
||||
return importLibreChatConvo;
|
||||
}
|
||||
|
||||
throw new Error('Unsupported import type');
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports a chatbot-ui V1 conversation from a JSON file and saves it to the database.
|
||||
*
|
||||
* @param {Object} jsonData - The JSON data containing the chatbot conversation.
|
||||
* @param {string} requestUserId - The ID of the user making the import request.
|
||||
* @param {Function} [builderFactory=createImportBatchBuilder] - The factory function to create an import batch builder.
|
||||
* @returns {Promise<void>} - A promise that resolves when the import is complete.
|
||||
* @throws {Error} - If there is an error creating the conversation from the JSON file.
|
||||
*/
|
||||
async function importChatBotUiConvo(
|
||||
jsonData,
|
||||
requestUserId,
|
||||
builderFactory = createImportBatchBuilder,
|
||||
) {
|
||||
// this have been tested with chatbot-ui V1 export https://github.com/mckaywrigley/chatbot-ui/tree/b865b0555f53957e96727bc0bbb369c9eaecd83b#legacy-code
|
||||
try {
|
||||
/** @type {ImportBatchBuilder} */
|
||||
const importBatchBuilder = builderFactory(requestUserId);
|
||||
|
||||
for (const historyItem of jsonData.history) {
|
||||
importBatchBuilder.startConversation(EModelEndpoint.openAI);
|
||||
for (const message of historyItem.messages) {
|
||||
if (message.role === 'assistant') {
|
||||
importBatchBuilder.addGptMessage(message.content, historyItem.model.id);
|
||||
} else if (message.role === 'user') {
|
||||
importBatchBuilder.addUserMessage(message.content);
|
||||
}
|
||||
}
|
||||
importBatchBuilder.finishConversation(historyItem.name, new Date());
|
||||
}
|
||||
await importBatchBuilder.saveBatch();
|
||||
logger.info(`user: ${requestUserId} | ChatbotUI conversation imported`);
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Error creating conversation from ChatbotUI file`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports a LibreChat conversation from JSON.
|
||||
*
|
||||
* @param {Object} jsonData - The JSON data representing the conversation.
|
||||
* @param {string} requestUserId - The ID of the user making the import request.
|
||||
* @param {Function} [builderFactory=createImportBatchBuilder] - The factory function to create an import batch builder.
|
||||
* @returns {Promise<void>} - A promise that resolves when the import is complete.
|
||||
*/
|
||||
async function importLibreChatConvo(
|
||||
jsonData,
|
||||
requestUserId,
|
||||
builderFactory = createImportBatchBuilder,
|
||||
) {
|
||||
try {
|
||||
/** @type {ImportBatchBuilder} */
|
||||
const importBatchBuilder = builderFactory(requestUserId);
|
||||
importBatchBuilder.startConversation(EModelEndpoint.openAI);
|
||||
|
||||
let firstMessageDate = null;
|
||||
|
||||
const traverseMessages = (messages, parentMessageId = null) => {
|
||||
for (const message of messages) {
|
||||
if (!message.text) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let savedMessage;
|
||||
if (message.sender?.toLowerCase() === 'user') {
|
||||
savedMessage = importBatchBuilder.saveMessage({
|
||||
text: message.text,
|
||||
sender: 'user',
|
||||
isCreatedByUser: true,
|
||||
parentMessageId: parentMessageId,
|
||||
});
|
||||
} else {
|
||||
savedMessage = importBatchBuilder.saveMessage({
|
||||
text: message.text,
|
||||
sender: message.sender,
|
||||
isCreatedByUser: false,
|
||||
model: jsonData.options.model,
|
||||
parentMessageId: parentMessageId,
|
||||
});
|
||||
}
|
||||
|
||||
if (!firstMessageDate) {
|
||||
firstMessageDate = new Date(message.createdAt);
|
||||
}
|
||||
|
||||
if (message.children) {
|
||||
traverseMessages(message.children, savedMessage.messageId);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
traverseMessages(jsonData.messagesTree);
|
||||
|
||||
importBatchBuilder.finishConversation(jsonData.title, firstMessageDate);
|
||||
await importBatchBuilder.saveBatch();
|
||||
logger.debug(`user: ${requestUserId} | Conversation "${jsonData.title}" imported`);
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Error creating conversation from LibreChat file`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports ChatGPT conversations from provided JSON data.
|
||||
* Initializes the import process by creating a batch builder and processing each conversation in the data.
|
||||
*
|
||||
* @param {ChatGPTConvo[]} jsonData - Array of conversation objects to be imported.
|
||||
* @param {string} requestUserId - The ID of the user who initiated the import process.
|
||||
* @param {Function} builderFactory - Factory function to create a new import batch builder instance, defaults to createImportBatchBuilder.
|
||||
* @returns {Promise<void>} Promise that resolves when all conversations have been imported.
|
||||
*/
|
||||
async function importChatGptConvo(
|
||||
jsonData,
|
||||
requestUserId,
|
||||
builderFactory = createImportBatchBuilder,
|
||||
) {
|
||||
try {
|
||||
const importBatchBuilder = builderFactory(requestUserId);
|
||||
for (const conv of jsonData) {
|
||||
processConversation(conv, importBatchBuilder, requestUserId);
|
||||
}
|
||||
await importBatchBuilder.saveBatch();
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Error creating conversation from imported file`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a single conversation, adding messages to the batch builder based on author roles and handling text content.
|
||||
* It directly manages the addition of messages for different roles and handles citations for assistant messages.
|
||||
*
|
||||
* @param {ChatGPTConvo} conv - A single conversation object that contains multiple messages and other details.
|
||||
* @param {ImportBatchBuilder} importBatchBuilder - The batch builder instance used to manage and batch conversation data.
|
||||
* @param {string} requestUserId - The ID of the user who initiated the import process.
|
||||
* @returns {void}
|
||||
*/
|
||||
function processConversation(conv, importBatchBuilder, requestUserId) {
|
||||
importBatchBuilder.startConversation(EModelEndpoint.openAI);
|
||||
|
||||
// Map all message IDs to new UUIDs
|
||||
const messageMap = new Map();
|
||||
for (const [id, mapping] of Object.entries(conv.mapping)) {
|
||||
if (mapping.message && mapping.message.content.content_type) {
|
||||
const newMessageId = uuidv4();
|
||||
messageMap.set(id, newMessageId);
|
||||
}
|
||||
}
|
||||
|
||||
// Create and save messages using the mapped IDs
|
||||
const messages = [];
|
||||
for (const [id, mapping] of Object.entries(conv.mapping)) {
|
||||
const role = mapping.message?.author?.role;
|
||||
if (!mapping.message) {
|
||||
messageMap.delete(id);
|
||||
continue;
|
||||
} else if (role === 'system') {
|
||||
messageMap.delete(id);
|
||||
continue;
|
||||
}
|
||||
|
||||
const newMessageId = messageMap.get(id);
|
||||
const parentMessageId =
|
||||
mapping.parent && messageMap.has(mapping.parent)
|
||||
? messageMap.get(mapping.parent)
|
||||
: Constants.NO_PARENT;
|
||||
|
||||
const messageText = formatMessageText(mapping.message);
|
||||
|
||||
const isCreatedByUser = role === 'user';
|
||||
let sender = isCreatedByUser ? 'user' : 'GPT-3.5';
|
||||
const model = mapping.message.metadata.model_slug || openAISettings.model.default;
|
||||
if (model === 'gpt-4') {
|
||||
sender = 'GPT-4';
|
||||
}
|
||||
|
||||
messages.push({
|
||||
messageId: newMessageId,
|
||||
parentMessageId,
|
||||
text: messageText,
|
||||
sender,
|
||||
isCreatedByUser,
|
||||
model,
|
||||
user: requestUserId,
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
}
|
||||
|
||||
for (const message of messages) {
|
||||
importBatchBuilder.saveMessage(message);
|
||||
}
|
||||
|
||||
importBatchBuilder.finishConversation(conv.title, new Date(conv.create_time * 1000));
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes text content of messages authored by an assistant, inserting citation links as required.
|
||||
* Applies citation metadata to construct regex patterns and replacements for inserting links into the text.
|
||||
*
|
||||
* @param {ChatGPTMessage} messageData - The message data containing metadata about citations.
|
||||
* @param {string} messageText - The original text of the message which may be altered by inserting citation links.
|
||||
* @returns {string} - The updated message text after processing for citations.
|
||||
*/
|
||||
function processAssistantMessage(messageData, messageText) {
|
||||
const citations = messageData.metadata.citations ?? [];
|
||||
|
||||
for (const citation of citations) {
|
||||
if (
|
||||
!citation.metadata ||
|
||||
!citation.metadata.extra ||
|
||||
!citation.metadata.extra.cited_message_idx ||
|
||||
(citation.metadata.type && citation.metadata.type !== 'webpage')
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const pattern = new RegExp(
|
||||
`\\u3010${citation.metadata.extra.cited_message_idx}\\u2020.+?\\u3011`,
|
||||
'g',
|
||||
);
|
||||
const replacement = ` ([${citation.metadata.title}](${citation.metadata.url}))`;
|
||||
messageText = messageText.replace(pattern, replacement);
|
||||
}
|
||||
|
||||
return messageText;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats the text content of a message based on its content type and author role.
|
||||
* @param {ChatGPTMessage} messageData - The message data.
|
||||
* @returns {string} - The updated message text after processing.
|
||||
*/
|
||||
function formatMessageText(messageData) {
|
||||
const isText = messageData.content.content_type === 'text';
|
||||
let messageText = '';
|
||||
|
||||
if (isText && messageData.content.parts) {
|
||||
messageText = messageData.content.parts.join(' ');
|
||||
} else if (messageData.content.content_type === 'code') {
|
||||
messageText = `\`\`\`${messageData.content.language}\n${messageData.content.text}\n\`\`\``;
|
||||
} else if (messageData.content.content_type === 'execution_output') {
|
||||
messageText = `Execution Output:\n> ${messageData.content.text}`;
|
||||
} else if (messageData.content.parts) {
|
||||
for (const part of messageData.content.parts) {
|
||||
if (typeof part === 'string') {
|
||||
messageText += part + ' ';
|
||||
} else if (typeof part === 'object') {
|
||||
messageText = `\`\`\`json\n${JSON.stringify(part, null, 2)}\n\`\`\`\n`;
|
||||
}
|
||||
}
|
||||
messageText = messageText.trim();
|
||||
} else {
|
||||
messageText = `\`\`\`json\n${JSON.stringify(messageData.content, null, 2)}\n\`\`\``;
|
||||
}
|
||||
|
||||
if (isText && messageData.author.role !== 'user') {
|
||||
messageText = processAssistantMessage(messageData, messageText);
|
||||
}
|
||||
|
||||
return messageText;
|
||||
}
|
||||
|
||||
module.exports = { getImporter };
|
||||
246
api/server/utils/import/importers.spec.js
Normal file
246
api/server/utils/import/importers.spec.js
Normal file
@@ -0,0 +1,246 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { EModelEndpoint, Constants } = require('librechat-data-provider');
|
||||
const { ImportBatchBuilder } = require('./importBatchBuilder');
|
||||
const { getImporter } = require('./importers');
|
||||
|
||||
// Mocking the ImportBatchBuilder class and its methods
|
||||
jest.mock('./importBatchBuilder', () => {
|
||||
return {
|
||||
ImportBatchBuilder: jest.fn().mockImplementation(() => {
|
||||
return {
|
||||
startConversation: jest.fn().mockResolvedValue(undefined),
|
||||
addUserMessage: jest.fn().mockResolvedValue(undefined),
|
||||
addGptMessage: jest.fn().mockResolvedValue(undefined),
|
||||
saveMessage: jest.fn().mockResolvedValue(undefined),
|
||||
finishConversation: jest.fn().mockResolvedValue(undefined),
|
||||
saveBatch: jest.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
describe('importChatGptConvo', () => {
|
||||
it('should import conversation correctly', async () => {
|
||||
const expectedNumberOfMessages = 19;
|
||||
const expectedNumberOfConversations = 2;
|
||||
// Given
|
||||
const jsonData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, '__data__', 'chatgpt-export.json'), 'utf8'),
|
||||
);
|
||||
const requestUserId = 'user-123';
|
||||
const mockedBuilderFactory = jest.fn().mockReturnValue(new ImportBatchBuilder(requestUserId));
|
||||
|
||||
// When
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, mockedBuilderFactory);
|
||||
|
||||
// Then
|
||||
expect(mockedBuilderFactory).toHaveBeenCalledWith(requestUserId);
|
||||
const mockImportBatchBuilder = mockedBuilderFactory.mock.results[0].value;
|
||||
|
||||
expect(mockImportBatchBuilder.startConversation).toHaveBeenCalledWith(EModelEndpoint.openAI);
|
||||
expect(mockImportBatchBuilder.saveMessage).toHaveBeenCalledTimes(expectedNumberOfMessages); // Adjust expected number
|
||||
expect(mockImportBatchBuilder.finishConversation).toHaveBeenCalledTimes(
|
||||
expectedNumberOfConversations,
|
||||
); // Adjust expected number
|
||||
expect(mockImportBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
it('should maintain correct message hierarchy (tree parent/children relationship)', async () => {
|
||||
// Prepare test data with known hierarchy
|
||||
const jsonData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, '__data__', 'chatgpt-tree.json'), 'utf8'),
|
||||
);
|
||||
|
||||
const requestUserId = 'user-123';
|
||||
const mockedBuilderFactory = jest.fn().mockReturnValue(new ImportBatchBuilder(requestUserId));
|
||||
|
||||
// When
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, mockedBuilderFactory);
|
||||
|
||||
// Then
|
||||
expect(mockedBuilderFactory).toHaveBeenCalledWith(requestUserId);
|
||||
const mockImportBatchBuilder = mockedBuilderFactory.mock.results[0].value;
|
||||
|
||||
const entries = Object.keys(jsonData[0].mapping);
|
||||
// Filter entries that should be processed (not system and have content)
|
||||
const messageEntries = entries.filter(
|
||||
(id) =>
|
||||
jsonData[0].mapping[id].message &&
|
||||
jsonData[0].mapping[id].message.author.role !== 'system' &&
|
||||
jsonData[0].mapping[id].message.content,
|
||||
);
|
||||
|
||||
// Expect the saveMessage to be called for each valid entry
|
||||
expect(mockImportBatchBuilder.saveMessage).toHaveBeenCalledTimes(messageEntries.length);
|
||||
|
||||
const idToUUIDMap = new Map();
|
||||
// Map original IDs to dynamically generated UUIDs
|
||||
mockImportBatchBuilder.saveMessage.mock.calls.forEach((call, index) => {
|
||||
const originalId = messageEntries[index];
|
||||
idToUUIDMap.set(originalId, call[0].messageId);
|
||||
});
|
||||
|
||||
// Validate the UUID map contains all expected entries
|
||||
expect(idToUUIDMap.size).toBe(messageEntries.length);
|
||||
|
||||
// Validate correct parent-child relationships
|
||||
messageEntries.forEach((id) => {
|
||||
const { parent } = jsonData[0].mapping[id];
|
||||
|
||||
const expectedParentId = parent
|
||||
? idToUUIDMap.get(parent) ?? Constants.NO_PARENT
|
||||
: Constants.NO_PARENT;
|
||||
|
||||
const actualParentId = idToUUIDMap.get(id)
|
||||
? mockImportBatchBuilder.saveMessage.mock.calls.find(
|
||||
(call) => call[0].messageId === idToUUIDMap.get(id),
|
||||
)[0].parentMessageId
|
||||
: Constants.NO_PARENT;
|
||||
|
||||
expect(actualParentId).toBe(expectedParentId);
|
||||
});
|
||||
|
||||
expect(mockImportBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('importLibreChatConvo', () => {
|
||||
it('should import conversation correctly', async () => {
|
||||
const expectedNumberOfMessages = 6;
|
||||
const expectedNumberOfConversations = 1;
|
||||
|
||||
// Given
|
||||
const jsonData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, '__data__', 'librechat-export.json'), 'utf8'),
|
||||
);
|
||||
const requestUserId = 'user-123';
|
||||
const mockedBuilderFactory = jest.fn().mockReturnValue(new ImportBatchBuilder(requestUserId));
|
||||
|
||||
// When
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, mockedBuilderFactory);
|
||||
|
||||
// Then
|
||||
const mockImportBatchBuilder = mockedBuilderFactory.mock.results[0].value;
|
||||
expect(mockImportBatchBuilder.startConversation).toHaveBeenCalledWith(EModelEndpoint.openAI);
|
||||
expect(mockImportBatchBuilder.saveMessage).toHaveBeenCalledTimes(expectedNumberOfMessages); // Adjust expected number
|
||||
expect(mockImportBatchBuilder.finishConversation).toHaveBeenCalledTimes(
|
||||
expectedNumberOfConversations,
|
||||
); // Adjust expected number
|
||||
expect(mockImportBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
it('should maintain correct message hierarchy (tree parent/children relationship)', async () => {
|
||||
// Load test data
|
||||
const jsonData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, '__data__', 'librechat-tree.json'), 'utf8'),
|
||||
);
|
||||
const requestUserId = 'user-123';
|
||||
const mockedBuilderFactory = jest.fn().mockReturnValue(new ImportBatchBuilder(requestUserId));
|
||||
|
||||
// When
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, mockedBuilderFactory);
|
||||
|
||||
// Then
|
||||
const mockImportBatchBuilder = mockedBuilderFactory.mock.results[0].value;
|
||||
|
||||
// Create a map to track original message IDs to new UUIDs
|
||||
const idToUUIDMap = new Map();
|
||||
mockImportBatchBuilder.saveMessage.mock.calls.forEach((call) => {
|
||||
const message = call[0];
|
||||
idToUUIDMap.set(message.originalMessageId, message.messageId);
|
||||
});
|
||||
|
||||
// Function to recursively check children
|
||||
const checkChildren = (children, parentId) => {
|
||||
children.forEach((child) => {
|
||||
const childUUID = idToUUIDMap.get(child.messageId);
|
||||
const expectedParentId = idToUUIDMap.get(parentId) ?? null;
|
||||
const messageCall = mockImportBatchBuilder.saveMessage.mock.calls.find(
|
||||
(call) => call[0].messageId === childUUID,
|
||||
);
|
||||
|
||||
const actualParentId = messageCall[0].parentMessageId;
|
||||
expect(actualParentId).toBe(expectedParentId);
|
||||
|
||||
if (child.children && child.children.length > 0) {
|
||||
checkChildren(child.children, child.messageId);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// Start hierarchy validation from root messages
|
||||
checkChildren(jsonData.messagesTree, null); // Assuming root messages have no parent
|
||||
|
||||
expect(mockImportBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('importChatBotUiConvo', () => {
|
||||
it('should import custom conversation correctly', async () => {
|
||||
// Given
|
||||
const jsonData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, '__data__', 'chatbotui-export.json'), 'utf8'),
|
||||
);
|
||||
const requestUserId = 'custom-user-456';
|
||||
const mockedBuilderFactory = jest.fn().mockReturnValue(new ImportBatchBuilder(requestUserId));
|
||||
|
||||
// When
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, mockedBuilderFactory);
|
||||
|
||||
// Then
|
||||
const mockImportBatchBuilder = mockedBuilderFactory.mock.results[0].value;
|
||||
expect(mockImportBatchBuilder.startConversation).toHaveBeenCalledWith('openAI');
|
||||
|
||||
// User messages
|
||||
expect(mockImportBatchBuilder.addUserMessage).toHaveBeenCalledTimes(3);
|
||||
expect(mockImportBatchBuilder.addUserMessage).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'Hello what are you able to do?',
|
||||
);
|
||||
expect(mockImportBatchBuilder.addUserMessage).toHaveBeenNthCalledWith(
|
||||
3,
|
||||
'Give me the code that inverts binary tree in COBOL',
|
||||
);
|
||||
|
||||
// GPT messages
|
||||
expect(mockImportBatchBuilder.addGptMessage).toHaveBeenCalledTimes(3);
|
||||
expect(mockImportBatchBuilder.addGptMessage).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
expect.stringMatching(/^Hello! As an AI developed by OpenAI/),
|
||||
'gpt-4-1106-preview',
|
||||
);
|
||||
expect(mockImportBatchBuilder.addGptMessage).toHaveBeenNthCalledWith(
|
||||
3,
|
||||
expect.stringContaining('```cobol'),
|
||||
'gpt-3.5-turbo',
|
||||
);
|
||||
|
||||
expect(mockImportBatchBuilder.finishConversation).toHaveBeenCalledTimes(2);
|
||||
expect(mockImportBatchBuilder.finishConversation).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'Hello what are you able to do?',
|
||||
expect.any(Date),
|
||||
);
|
||||
expect(mockImportBatchBuilder.finishConversation).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
'Give me the code that inverts ...',
|
||||
expect.any(Date),
|
||||
);
|
||||
|
||||
expect(mockImportBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getImporter', () => {
|
||||
it('should throw an error if the import type is not supported', () => {
|
||||
// Given
|
||||
const jsonData = { unsupported: 'data' };
|
||||
|
||||
// When
|
||||
expect(() => getImporter(jsonData)).toThrow('Unsupported import type');
|
||||
});
|
||||
});
|
||||
5
api/server/utils/import/index.js
Normal file
5
api/server/utils/import/index.js
Normal file
@@ -0,0 +1,5 @@
|
||||
const importers = require('./importers');
|
||||
|
||||
module.exports = {
|
||||
...importers,
|
||||
};
|
||||
41
api/server/utils/import/jobDefinition.js
Normal file
41
api/server/utils/import/jobDefinition.js
Normal file
@@ -0,0 +1,41 @@
|
||||
const fs = require('fs').promises;
|
||||
const jobScheduler = require('~/server/utils/jobScheduler');
|
||||
const { getImporter } = require('./importers');
|
||||
const { indexSync } = require('~/lib/db');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const IMPORT_CONVERSATION_JOB_NAME = 'import conversation';
|
||||
|
||||
/**
|
||||
* Job definition for importing a conversation.
|
||||
* @param {import('agenda').Job} job - The job object.
|
||||
* @param {Function} done - The done function.
|
||||
*/
|
||||
const importConversationJob = async (job, done) => {
|
||||
const { filepath, requestUserId } = job.attrs.data;
|
||||
try {
|
||||
logger.debug(`user: ${requestUserId} | Importing conversation(s) from file...`);
|
||||
const fileData = await fs.readFile(filepath, 'utf8');
|
||||
const jsonData = JSON.parse(fileData);
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId);
|
||||
// Sync Meilisearch index
|
||||
await indexSync();
|
||||
logger.debug(`user: ${requestUserId} | Finished importing conversations`);
|
||||
done();
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Failed to import conversation: `, error);
|
||||
done(error);
|
||||
} finally {
|
||||
try {
|
||||
await fs.unlink(filepath);
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Failed to delete file: ${filepath}`, error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Call the jobScheduler.define function at startup
|
||||
jobScheduler.define(IMPORT_CONVERSATION_JOB_NAME, importConversationJob);
|
||||
|
||||
module.exports = { IMPORT_CONVERSATION_JOB_NAME };
|
||||
@@ -5,7 +5,6 @@ const handleText = require('./handleText');
|
||||
const cryptoUtils = require('./crypto');
|
||||
const citations = require('./citations');
|
||||
const sendEmail = require('./sendEmail');
|
||||
const mongoose = require('./mongoose');
|
||||
const queue = require('./queue');
|
||||
const files = require('./files');
|
||||
const math = require('./math');
|
||||
@@ -15,7 +14,6 @@ module.exports = {
|
||||
...cryptoUtils,
|
||||
...handleText,
|
||||
...citations,
|
||||
...mongoose,
|
||||
countTokens,
|
||||
removePorts,
|
||||
sendEmail,
|
||||
|
||||
99
api/server/utils/jobScheduler.js
Normal file
99
api/server/utils/jobScheduler.js
Normal file
@@ -0,0 +1,99 @@
|
||||
const Agenda = require('agenda');
|
||||
const { logger } = require('~/config');
|
||||
const mongodb = require('mongodb');
|
||||
|
||||
/**
|
||||
* Class for scheduling and running jobs.
|
||||
* The workflow is as follows: start the job scheduler, define a job, and then schedule the job using defined job name.
|
||||
*/
|
||||
class JobScheduler {
|
||||
constructor() {
|
||||
this.agenda = new Agenda({ db: { address: process.env.MONGO_URI } });
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the job scheduler.
|
||||
*/
|
||||
async start() {
|
||||
try {
|
||||
logger.info('Starting Agenda...');
|
||||
await this.agenda.start();
|
||||
logger.info('Agenda successfully started and connected to MongoDB.');
|
||||
} catch (error) {
|
||||
logger.error('Failed to start Agenda:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedules a job to start immediately.
|
||||
* @param {string} jobName - The name of the job to schedule.
|
||||
* @param {string} filepath - The filepath to pass to the job.
|
||||
* @param {string} userId - The ID of the user requesting the job.
|
||||
* @returns {Promise<{ id: string }>} - A promise that resolves with the ID of the scheduled job.
|
||||
* @throws {Error} - If the job fails to schedule.
|
||||
*/
|
||||
async now(jobName, filepath, userId) {
|
||||
try {
|
||||
const job = await this.agenda.now(jobName, { filepath, requestUserId: userId });
|
||||
logger.debug(`Job '${job.attrs.name}' scheduled successfully.`);
|
||||
return { id: job.attrs._id.toString() };
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to schedule job '${jobName}': ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the status of a job.
|
||||
* @param {string} jobId - The ID of the job to get the status of.
|
||||
* @returns {Promise<{ id: string, userId: string, name: string, failReason: string, status: string } | null>} - A promise that resolves with the job status or null if the job is not found.
|
||||
* @throws {Error} - If multiple jobs are found.
|
||||
*/
|
||||
async getJobStatus(jobId) {
|
||||
const job = await this.agenda.jobs({ _id: new mongodb.ObjectId(jobId) });
|
||||
if (!job || job.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (job.length > 1) {
|
||||
// This should never happen
|
||||
throw new Error('Multiple jobs found.');
|
||||
}
|
||||
|
||||
const jobDetails = {
|
||||
id: job[0]._id,
|
||||
userId: job[0].attrs.data.requestUserId,
|
||||
name: job[0].attrs.name,
|
||||
failReason: job[0].attrs.failReason,
|
||||
status: !job[0].attrs.lastRunAt
|
||||
? 'scheduled'
|
||||
: job[0].attrs.failedAt
|
||||
? 'failed'
|
||||
: job[0].attrs.lastFinishedAt
|
||||
? 'completed'
|
||||
: 'running',
|
||||
};
|
||||
|
||||
return jobDetails;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a new job.
|
||||
* @param {string} name - The name of the job.
|
||||
* @param {Function} jobFunction - The function to run when the job is executed.
|
||||
*/
|
||||
define(name, jobFunction) {
|
||||
this.agenda.define(name, async (job, done) => {
|
||||
try {
|
||||
await jobFunction(job, done);
|
||||
} catch (error) {
|
||||
logger.error(`Failed to run job '${name}': ${error}`);
|
||||
done(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const jobScheduler = new JobScheduler();
|
||||
jobScheduler.start();
|
||||
|
||||
module.exports = jobScheduler;
|
||||
@@ -1,25 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
/**
|
||||
* Executes a database operation within a session.
|
||||
* @param {() => Promise<any>} method - The method to execute. This method must accept a session as its first argument.
|
||||
* @param {...any} args - Additional arguments to pass to the method.
|
||||
* @returns {Promise<any>} - The result of the executed method.
|
||||
*/
|
||||
async function withSession(method, ...args) {
|
||||
const session = await mongoose.startSession();
|
||||
session.startTransaction();
|
||||
try {
|
||||
const result = await method(...args, session);
|
||||
await session.commitTransaction();
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (session.inTransaction()) {
|
||||
await session.abortTransaction();
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
await session.endSession();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { withSession };
|
||||
@@ -1,6 +1,7 @@
|
||||
// See .env.test.example for an example of the '.env.test' file.
|
||||
require('dotenv').config({ path: './test/.env.test' });
|
||||
|
||||
process.env.MONGO_URI = 'mongodb://127.0.0.1:27017/dummy-uri';
|
||||
process.env.BAN_VIOLATIONS = 'true';
|
||||
process.env.BAN_DURATION = '7200000';
|
||||
process.env.BAN_INTERVAL = '20';
|
||||
|
||||
132
api/typedefs.js
132
api/typedefs.js
@@ -7,6 +7,11 @@
|
||||
* @typedef {import('openai').OpenAI} OpenAI
|
||||
* @memberof typedefs
|
||||
*/
|
||||
/**
|
||||
* @exports AxiosResponse
|
||||
* @typedef {import('axios').AxiosResponse} AxiosResponse
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports Anthropic
|
||||
@@ -295,6 +300,18 @@
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TStartupConfig
|
||||
* @typedef {import('librechat-data-provider').TStartupConfig} TStartupConfig
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TConfigDefaults
|
||||
* @typedef {import('librechat-data-provider').TConfigDefaults} TConfigDefaults
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TPlugin
|
||||
* @typedef {import('librechat-data-provider').TPlugin} TPlugin
|
||||
@@ -337,6 +354,18 @@
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TConversation
|
||||
* @typedef {import('librechat-data-provider').TConversation} TConversation
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TModelSpec
|
||||
* @typedef {import('librechat-data-provider').TModelSpec} TModelSpec
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TPlugin
|
||||
* @typedef {import('librechat-data-provider').TPlugin} TPlugin
|
||||
@@ -615,6 +644,12 @@
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports ImportBatchBuilder
|
||||
* @typedef {import('./server/utils/import/importBatchBuilder.js').ImportBatchBuilder} ImportBatchBuilder
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports Thread
|
||||
* @typedef {Object} Thread
|
||||
@@ -1145,3 +1180,100 @@
|
||||
* @param {onTokenProgress} params.onTokenProgress
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} OllamaModelDetails
|
||||
* @property {string} parent_model - The identifier for the parent model, if any.
|
||||
* @property {string} format - The format of the model.
|
||||
* @property {string} family - The primary family to which the model belongs.
|
||||
* @property {string[]} families - An array of families that include the model.
|
||||
* @property {string} parameter_size - The size of the parameters of the model.
|
||||
* @property {string} quantization_level - The level of quantization of the model.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} OllamaModel
|
||||
* @property {string} name - The name of the model, including version tag.
|
||||
* @property {string} model - A redundant copy of the name, including version tag.
|
||||
* @property {string} modified_at - The ISO string representing the last modification date.
|
||||
* @property {number} size - The size of the model in bytes.
|
||||
* @property {string} digest - The digest hash of the model.
|
||||
* @property {OllamaModelDetails} details - Detailed information about the model.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} OllamaListResponse
|
||||
* @property {OllamaModel[]} models - the list of models available.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTAuthor
|
||||
* @property {string} role - The role of the author (e.g., 'assistant', 'system', 'user').
|
||||
* @property {?string} name - The name of the author, if available.
|
||||
* @property {Object} metadata - Additional metadata related to the author.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTContentPart
|
||||
* @property {string} content_type - The type of content (e.g., 'text').
|
||||
* @property {string[]} parts - The textual parts of the message.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTMetadata
|
||||
* @property {boolean} is_visually_hidden_from_conversation - Indicates if the message should be hidden.
|
||||
* @property {?Array<Object>} citations - Potential citations included in the message.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTMessage
|
||||
* @property {string} id - Unique identifier for the message.
|
||||
* @property {?ChatGPTAuthor} author - The author of the message.
|
||||
* @property {?number} create_time - Creation time as a Unix timestamp.
|
||||
* @property {?number} update_time - Last update time as a Unix timestamp.
|
||||
* @property {ChatGPTContentPart} content - Content of the message.
|
||||
* @property {string} status - Status of the message (e.g., 'finished_successfully').
|
||||
* @property {boolean} end_turn - Indicates if it's the end of a conversation turn.
|
||||
* @property {number} weight - A numerical value representing the weight/importance of the message.
|
||||
* @property {ChatGPTMetadata} metadata - Metadata associated with the message.
|
||||
* @property {string} recipient - Intended recipient of the message.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTMapping
|
||||
* @property {ChatGPTMessage} message - Details of the message.
|
||||
* @property {string} id - Identifier of the message.
|
||||
* @property {?string} parent - Parent message ID.
|
||||
* @property {string[]} children - Child message IDs.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTConvo
|
||||
* @property {string} title - Title of the conversation.
|
||||
* @property {number} create_time - Creation time of the conversation as a Unix timestamp.
|
||||
* @property {number} update_time - Last update time of the conversation as a Unix timestamp.
|
||||
* @property {Object.<string, ChatGPTMapping>} mapping - Mapping of message nodes within the conversation.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/** Mutations */
|
||||
|
||||
/**
|
||||
* @exports TForkConvoResponse
|
||||
* @typedef {import('librechat-data-provider').TForkConvoResponse} TForkConvoResponse
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TForkConvoRequest
|
||||
* @typedef {import('librechat-data-provider').TForkConvoRequest} TForkConvoRequest
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
@@ -6,7 +6,7 @@ function loadYaml(filepath) {
|
||||
let fileContents = fs.readFileSync(filepath, 'utf8');
|
||||
return yaml.load(fileContents);
|
||||
} catch (e) {
|
||||
// console.error(e);
|
||||
return e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 30 KiB |
BIN
client/public/assets/apipie.png
Normal file
BIN
client/public/assets/apipie.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 34 KiB |
BIN
client/public/assets/mlx.png
Normal file
BIN
client/public/assets/mlx.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 82 KiB |
@@ -2,21 +2,24 @@ import { FileSources } from 'librechat-data-provider';
|
||||
import type { ColumnDef } from '@tanstack/react-table';
|
||||
import type { SetterOrUpdater } from 'recoil';
|
||||
import type {
|
||||
TSetOption as SetOption,
|
||||
TConversation,
|
||||
TMessage,
|
||||
TPreset,
|
||||
TLoginUser,
|
||||
TUser,
|
||||
EModelEndpoint,
|
||||
Action,
|
||||
TPreset,
|
||||
TPlugin,
|
||||
TMessage,
|
||||
TLoginUser,
|
||||
AuthTypeEnum,
|
||||
TConversation,
|
||||
EModelEndpoint,
|
||||
AuthorizationTypeEnum,
|
||||
TSetOption as SetOption,
|
||||
TokenExchangeMethodEnum,
|
||||
} from 'librechat-data-provider';
|
||||
import type { UseMutationResult } from '@tanstack/react-query';
|
||||
import type { LucideIcon } from 'lucide-react';
|
||||
|
||||
export type TPluginMap = Record<string, TPlugin>;
|
||||
|
||||
export type GenericSetter<T> = (value: T | ((currentValue: T) => T)) => void;
|
||||
|
||||
export type LastSelectedModels = Record<EModelEndpoint, string>;
|
||||
@@ -32,6 +35,16 @@ export enum IconContext {
|
||||
message = 'message',
|
||||
}
|
||||
|
||||
export type IconMapProps = {
|
||||
className?: string;
|
||||
iconURL?: string;
|
||||
context?: 'landing' | 'menu-item' | 'nav' | 'message';
|
||||
endpoint?: string | null;
|
||||
assistantName?: string;
|
||||
avatar?: string;
|
||||
size?: number;
|
||||
};
|
||||
|
||||
export type NavLink = {
|
||||
title: string;
|
||||
label?: string;
|
||||
@@ -311,6 +324,7 @@ export type Option = Record<string, unknown> & {
|
||||
};
|
||||
|
||||
export type OptionWithIcon = Option & { icon?: React.ReactNode };
|
||||
export type MentionOption = OptionWithIcon & { type: string; value: string; description?: string };
|
||||
|
||||
export type TOptionSettings = {
|
||||
showExamples?: boolean;
|
||||
|
||||
@@ -139,11 +139,6 @@ function ResetPassword() {
|
||||
type="password"
|
||||
id="confirm_password"
|
||||
aria-label={localize('com_auth_password_confirm')}
|
||||
// uncomment to prevent pasting in confirm field
|
||||
onPaste={(e) => {
|
||||
e.preventDefault();
|
||||
return false;
|
||||
}}
|
||||
{...register('confirm_password', {
|
||||
validate: (value) =>
|
||||
value === password || localize('com_auth_password_not_match'),
|
||||
|
||||
@@ -1,17 +1,30 @@
|
||||
import { useMemo } from 'react';
|
||||
import { useOutletContext } from 'react-router-dom';
|
||||
import { getConfigDefaults } from 'librechat-data-provider';
|
||||
import { useGetStartupConfig } from 'librechat-data-provider/react-query';
|
||||
import type { ContextType } from '~/common';
|
||||
import { EndpointsMenu, PresetsMenu, HeaderNewChat } from './Menus';
|
||||
import { EndpointsMenu, ModelSpecsMenu, PresetsMenu, HeaderNewChat } from './Menus';
|
||||
import HeaderOptions from './Input/HeaderOptions';
|
||||
|
||||
const defaultInterface = getConfigDefaults().interface;
|
||||
|
||||
export default function Header() {
|
||||
const { data: startupConfig } = useGetStartupConfig();
|
||||
const { navVisible } = useOutletContext<ContextType>();
|
||||
const modelSpecs = useMemo(() => startupConfig?.modelSpecs?.list ?? [], [startupConfig]);
|
||||
const interfaceConfig = useMemo(
|
||||
() => startupConfig?.interface ?? defaultInterface,
|
||||
[startupConfig],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="sticky top-0 z-10 flex h-14 w-full items-center justify-between bg-white p-2 font-semibold dark:bg-gray-800 dark:text-white">
|
||||
<div className="hide-scrollbar flex items-center gap-2 overflow-x-auto">
|
||||
{!navVisible && <HeaderNewChat />}
|
||||
<EndpointsMenu />
|
||||
<HeaderOptions />
|
||||
<PresetsMenu />
|
||||
{interfaceConfig.endpointsMenu && <EndpointsMenu />}
|
||||
{modelSpecs?.length > 0 && <ModelSpecsMenu modelSpecs={modelSpecs} />}
|
||||
{<HeaderOptions interfaceConfig={interfaceConfig} />}
|
||||
{interfaceConfig.presets && <PresetsMenu />}
|
||||
</div>
|
||||
{/* Empty div for spacing */}
|
||||
<div />
|
||||
|
||||
8
client/src/components/Chat/Input/ActiveSetting.tsx
Normal file
8
client/src/components/Chat/Input/ActiveSetting.tsx
Normal file
@@ -0,0 +1,8 @@
|
||||
export default function ActiveSetting() {
|
||||
return (
|
||||
<div className="text-token-text-tertiary space-x-2 overflow-hidden text-ellipsis text-sm font-light">
|
||||
Talking to{' '}
|
||||
<span className="text-token-text-secondary font-medium">[latest] Tailwind CSS GPT</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -17,23 +17,28 @@ import { mainTextareaId } from '~/common';
|
||||
import StopButton from './StopButton';
|
||||
import SendButton from './SendButton';
|
||||
import FileRow from './Files/FileRow';
|
||||
import Mention from './Mention';
|
||||
import store from '~/store';
|
||||
|
||||
const ChatForm = ({ index = 0 }) => {
|
||||
const submitButtonRef = useRef<HTMLButtonElement>(null);
|
||||
const textAreaRef = useRef<HTMLTextAreaElement | null>(null);
|
||||
const [showStopButton, setShowStopButton] = useRecoilState(store.showStopButtonByIndex(index));
|
||||
const [showMentionPopover, setShowMentionPopover] = useRecoilState(
|
||||
store.showMentionPopoverFamily(index),
|
||||
);
|
||||
const { requiresKey } = useRequiresKey();
|
||||
|
||||
const methods = useForm<{ text: string }>({
|
||||
defaultValues: { text: '' },
|
||||
});
|
||||
|
||||
const { handlePaste, handleKeyDown, handleCompositionStart, handleCompositionEnd } = useTextarea({
|
||||
textAreaRef,
|
||||
submitButtonRef,
|
||||
disabled: !!requiresKey,
|
||||
});
|
||||
const { handlePaste, handleKeyDown, handleKeyUp, handleCompositionStart, handleCompositionEnd } =
|
||||
useTextarea({
|
||||
textAreaRef,
|
||||
submitButtonRef,
|
||||
disabled: !!requiresKey,
|
||||
});
|
||||
|
||||
const {
|
||||
ask,
|
||||
@@ -92,6 +97,9 @@ const ChatForm = ({ index = 0 }) => {
|
||||
>
|
||||
<div className="relative flex h-full flex-1 items-stretch md:flex-col">
|
||||
<div className="flex w-full items-center">
|
||||
{showMentionPopover && (
|
||||
<Mention setShowMentionPopover={setShowMentionPopover} textAreaRef={textAreaRef} />
|
||||
)}
|
||||
<div className="[&:has(textarea:focus)]:border-token-border-xheavy border-token-border-medium bg-token-main-surface-primary relative flex w-full flex-grow flex-col overflow-hidden rounded-2xl border dark:border-gray-600 dark:text-white [&:has(textarea:focus)]:shadow-[0_2px_6px_rgba(0,0,0,.05)] dark:[&:has(textarea:focus)]:border-gray-500">
|
||||
<FileRow
|
||||
files={files}
|
||||
@@ -114,6 +122,7 @@ const ChatForm = ({ index = 0 }) => {
|
||||
disabled={disableInputs}
|
||||
onPaste={handlePaste}
|
||||
onKeyDown={handleKeyDown}
|
||||
onKeyUp={handleKeyUp}
|
||||
onCompositionStart={handleCompositionStart}
|
||||
onCompositionEnd={handleCompositionEnd}
|
||||
id={mainTextareaId}
|
||||
|
||||
@@ -20,12 +20,13 @@ const FilePreview = ({
|
||||
}) => {
|
||||
const radius = 55; // Radius of the SVG circle
|
||||
const circumference = 2 * Math.PI * radius;
|
||||
const progress = useProgress(file?.['progress'] ?? 1, 0.001);
|
||||
const progress = useProgress(file?.['progress'] ?? 1, 0.001, file?.size ?? 1);
|
||||
console.log(progress);
|
||||
|
||||
// Calculate the offset based on the loading progress
|
||||
const offset = circumference - progress * circumference;
|
||||
const circleCSSProperties = {
|
||||
transition: 'stroke-dashoffset 0.3s linear',
|
||||
transition: 'stroke-dashoffset 0.5s linear',
|
||||
};
|
||||
|
||||
return (
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import type { TMessage } from 'librechat-data-provider';
|
||||
import { useMediaQuery, useGenerationsByLatest } from '~/hooks';
|
||||
import Regenerate from '~/components/Input/Generations/Regenerate';
|
||||
import Continue from '~/components/Input/Generations/Continue';
|
||||
import Stop from '~/components/Input/Generations/Stop';
|
||||
import { useChatContext } from '~/Providers';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
type GenerationButtonsProps = {
|
||||
endpoint: string;
|
||||
showPopover?: boolean;
|
||||
opacityClass?: string;
|
||||
};
|
||||
|
||||
export default function GenerationButtons({
|
||||
endpoint,
|
||||
showPopover = false,
|
||||
opacityClass = 'full-opacity',
|
||||
}: GenerationButtonsProps) {
|
||||
const {
|
||||
getMessages,
|
||||
isSubmitting,
|
||||
latestMessage,
|
||||
handleContinue,
|
||||
handleRegenerate,
|
||||
handleStopGenerating,
|
||||
} = useChatContext();
|
||||
const isSmallScreen = useMediaQuery('(max-width: 768px)');
|
||||
const { continueSupported, regenerateEnabled } = useGenerationsByLatest({
|
||||
endpoint,
|
||||
message: latestMessage as TMessage,
|
||||
isSubmitting,
|
||||
latestMessage,
|
||||
});
|
||||
|
||||
const [userStopped, setUserStopped] = useState(false);
|
||||
const messages = getMessages();
|
||||
|
||||
const handleStop = (e: React.MouseEvent<HTMLButtonElement>) => {
|
||||
setUserStopped(true);
|
||||
handleStopGenerating(e);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
let timer: NodeJS.Timeout;
|
||||
|
||||
if (userStopped) {
|
||||
timer = setTimeout(() => {
|
||||
setUserStopped(false);
|
||||
}, 200);
|
||||
}
|
||||
|
||||
return () => {
|
||||
clearTimeout(timer);
|
||||
};
|
||||
}, [userStopped]);
|
||||
|
||||
if (isSmallScreen) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let button: React.ReactNode = null;
|
||||
|
||||
if (isSubmitting) {
|
||||
button = <Stop onClick={handleStop} />;
|
||||
} else if (userStopped || continueSupported) {
|
||||
button = <Continue onClick={handleContinue} />;
|
||||
} else if (messages && messages.length > 0 && regenerateEnabled) {
|
||||
button = <Regenerate onClick={handleRegenerate} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="absolute bottom-0 right-0 z-[62]">
|
||||
<div className="grow" />
|
||||
<div className="flex items-center md:items-end">
|
||||
<div
|
||||
className={cn('option-buttons', showPopover ? '' : opacityClass)}
|
||||
data-projection-id="173"
|
||||
>
|
||||
{button}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -3,7 +3,7 @@ import { Settings2 } from 'lucide-react';
|
||||
import { Root, Anchor } from '@radix-ui/react-popover';
|
||||
import { useState, useEffect, useMemo } from 'react';
|
||||
import { tPresetUpdateSchema, EModelEndpoint } from 'librechat-data-provider';
|
||||
import type { TPreset } from 'librechat-data-provider';
|
||||
import type { TPreset, TInterfaceConfig } from 'librechat-data-provider';
|
||||
import { EndpointSettings, SaveAsPresetDialog, AlternativeSettings } from '~/components/Endpoints';
|
||||
import { ModelSelect } from '~/components/Input/ModelSelect';
|
||||
import { PluginStoreDialog } from '~/components';
|
||||
@@ -15,7 +15,11 @@ import { Button } from '~/components/ui';
|
||||
import { cn, cardStyle } from '~/utils/';
|
||||
import store from '~/store';
|
||||
|
||||
export default function HeaderOptions() {
|
||||
export default function HeaderOptions({
|
||||
interfaceConfig,
|
||||
}: {
|
||||
interfaceConfig?: Partial<TInterfaceConfig>;
|
||||
}) {
|
||||
const [saveAsDialogShow, setSaveAsDialogShow] = useState<boolean>(false);
|
||||
const [showPluginStoreDialog, setShowPluginStoreDialog] = useRecoilState(
|
||||
store.showPluginStoreDialog,
|
||||
@@ -70,13 +74,15 @@ export default function HeaderOptions() {
|
||||
<div className="my-auto lg:max-w-2xl xl:max-w-3xl">
|
||||
<span className="flex w-full flex-col items-center justify-center gap-0 md:order-none md:m-auto md:gap-2">
|
||||
<div className="z-[61] flex w-full items-center justify-center gap-2">
|
||||
<ModelSelect
|
||||
conversation={conversation}
|
||||
setOption={setOption}
|
||||
isMultiChat={true}
|
||||
showAbove={false}
|
||||
/>
|
||||
{!noSettings[endpoint] && (
|
||||
{interfaceConfig?.modelSelect && (
|
||||
<ModelSelect
|
||||
conversation={conversation}
|
||||
setOption={setOption}
|
||||
showAbove={false}
|
||||
popover={true}
|
||||
/>
|
||||
)}
|
||||
{!noSettings[endpoint] && interfaceConfig?.parameters && (
|
||||
<Button
|
||||
type="button"
|
||||
className={cn(
|
||||
@@ -90,34 +96,41 @@ export default function HeaderOptions() {
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
<OptionsPopover
|
||||
visible={showPopover}
|
||||
saveAsPreset={saveAsPreset}
|
||||
closePopover={() => setShowPopover(false)}
|
||||
PopoverButtons={<PopoverButtons />}
|
||||
>
|
||||
<div className="px-4 py-4">
|
||||
<EndpointSettings
|
||||
conversation={conversation}
|
||||
setOption={setOption}
|
||||
isMultiChat={true}
|
||||
/>
|
||||
<AlternativeSettings conversation={conversation} setOption={setOption} />
|
||||
</div>
|
||||
</OptionsPopover>
|
||||
<SaveAsPresetDialog
|
||||
open={saveAsDialogShow}
|
||||
onOpenChange={setSaveAsDialogShow}
|
||||
preset={
|
||||
tPresetUpdateSchema.parse({
|
||||
...conversation,
|
||||
}) as TPreset
|
||||
}
|
||||
/>
|
||||
<PluginStoreDialog
|
||||
isOpen={showPluginStoreDialog}
|
||||
setIsOpen={setShowPluginStoreDialog}
|
||||
/>
|
||||
{interfaceConfig?.parameters && (
|
||||
<OptionsPopover
|
||||
visible={showPopover}
|
||||
saveAsPreset={saveAsPreset}
|
||||
presetsDisabled={!interfaceConfig?.presets}
|
||||
PopoverButtons={<PopoverButtons />}
|
||||
closePopover={() => setShowPopover(false)}
|
||||
>
|
||||
<div className="px-4 py-4">
|
||||
<EndpointSettings
|
||||
className="[&::-webkit-scrollbar]:w-2"
|
||||
conversation={conversation}
|
||||
setOption={setOption}
|
||||
/>
|
||||
<AlternativeSettings conversation={conversation} setOption={setOption} />
|
||||
</div>
|
||||
</OptionsPopover>
|
||||
)}
|
||||
{interfaceConfig?.presets && (
|
||||
<SaveAsPresetDialog
|
||||
open={saveAsDialogShow}
|
||||
onOpenChange={setSaveAsDialogShow}
|
||||
preset={
|
||||
tPresetUpdateSchema.parse({
|
||||
...conversation,
|
||||
}) as TPreset
|
||||
}
|
||||
/>
|
||||
)}
|
||||
{interfaceConfig?.parameters && (
|
||||
<PluginStoreDialog
|
||||
isOpen={showPluginStoreDialog}
|
||||
setIsOpen={setShowPluginStoreDialog}
|
||||
/>
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
</Anchor>
|
||||
|
||||
148
client/src/components/Chat/Input/Mention.tsx
Normal file
148
client/src/components/Chat/Input/Mention.tsx
Normal file
@@ -0,0 +1,148 @@
|
||||
import { useState, useRef, useEffect } from 'react';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import type { SetterOrUpdater } from 'recoil';
|
||||
import type { MentionOption } from '~/common';
|
||||
import { useAssistantsMapContext } from '~/Providers';
|
||||
import useMentions from '~/hooks/Input/useMentions';
|
||||
import { useLocalize, useCombobox } from '~/hooks';
|
||||
import { removeAtSymbolIfLast } from '~/utils';
|
||||
import MentionItem from './MentionItem';
|
||||
|
||||
export default function Mention({
|
||||
setShowMentionPopover,
|
||||
textAreaRef,
|
||||
}: {
|
||||
setShowMentionPopover: SetterOrUpdater<boolean>;
|
||||
textAreaRef: React.MutableRefObject<HTMLTextAreaElement | null>;
|
||||
}) {
|
||||
const localize = useLocalize();
|
||||
const assistantMap = useAssistantsMapContext();
|
||||
const { options, modelsConfig, assistants, onSelectMention } = useMentions({ assistantMap });
|
||||
|
||||
const [activeIndex, setActiveIndex] = useState(0);
|
||||
const timeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const inputRef = useRef<HTMLInputElement | null>(null);
|
||||
const [inputOptions, setInputOptions] = useState<MentionOption[]>(options);
|
||||
|
||||
const { open, setOpen, searchValue, setSearchValue, matches } = useCombobox({
|
||||
value: '',
|
||||
options: inputOptions,
|
||||
});
|
||||
|
||||
const handleSelect = (mention?: MentionOption) => {
|
||||
if (!mention) {
|
||||
return;
|
||||
}
|
||||
|
||||
const defaultSelect = () => {
|
||||
setSearchValue('');
|
||||
setOpen(false);
|
||||
setShowMentionPopover(false);
|
||||
onSelectMention(mention);
|
||||
|
||||
if (textAreaRef.current) {
|
||||
removeAtSymbolIfLast(textAreaRef.current);
|
||||
}
|
||||
};
|
||||
|
||||
if (mention.type === 'endpoint' && mention.value === EModelEndpoint.assistants) {
|
||||
setSearchValue('');
|
||||
setInputOptions(assistants);
|
||||
setActiveIndex(0);
|
||||
inputRef.current?.focus();
|
||||
} else if (mention.type === 'endpoint') {
|
||||
const models = (modelsConfig?.[mention.value ?? ''] ?? []).map((model) => ({
|
||||
value: mention.value,
|
||||
label: model,
|
||||
type: 'model',
|
||||
}));
|
||||
|
||||
setActiveIndex(0);
|
||||
setSearchValue('');
|
||||
setInputOptions(models);
|
||||
inputRef.current?.focus();
|
||||
} else {
|
||||
defaultSelect();
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!open) {
|
||||
setInputOptions(options);
|
||||
setActiveIndex(0);
|
||||
}
|
||||
}, [open, options]);
|
||||
|
||||
useEffect(() => {
|
||||
const currentActiveItem = document.getElementById(`mention-item-${activeIndex}`);
|
||||
currentActiveItem?.scrollIntoView({ behavior: 'instant', block: 'nearest' });
|
||||
}, [activeIndex]);
|
||||
|
||||
return (
|
||||
<div className="absolute bottom-16 z-10 w-full space-y-2">
|
||||
<div className="popover border-token-border-light rounded-2xl border bg-white p-2 shadow-lg dark:bg-gray-700">
|
||||
<input
|
||||
autoFocus
|
||||
ref={inputRef}
|
||||
placeholder={localize('com_ui_mention')}
|
||||
className="mb-1 w-full border-0 bg-white p-2 text-sm focus:outline-none dark:bg-gray-700 dark:text-gray-200"
|
||||
autoComplete="off"
|
||||
value={searchValue}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Escape') {
|
||||
setOpen(false);
|
||||
setShowMentionPopover(false);
|
||||
textAreaRef.current?.focus();
|
||||
}
|
||||
if (e.key === 'ArrowDown') {
|
||||
setActiveIndex((prevIndex) => (prevIndex + 1) % matches.length);
|
||||
} else if (e.key === 'ArrowUp') {
|
||||
setActiveIndex((prevIndex) => (prevIndex - 1 + matches.length) % matches.length);
|
||||
} else if (e.key === 'Enter' || e.key === 'Tab') {
|
||||
const mentionOption = matches[0] as MentionOption | undefined;
|
||||
if (mentionOption?.type === 'endpoint') {
|
||||
e.preventDefault();
|
||||
} else if (e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
}
|
||||
handleSelect(matches[activeIndex] as MentionOption);
|
||||
} else if (e.key === 'Backspace' && searchValue === '') {
|
||||
setOpen(false);
|
||||
setShowMentionPopover(false);
|
||||
textAreaRef.current?.focus();
|
||||
}
|
||||
}}
|
||||
onChange={(e) => setSearchValue(e.target.value)}
|
||||
onFocus={() => setOpen(true)}
|
||||
onBlur={() => {
|
||||
timeoutRef.current = setTimeout(() => {
|
||||
setOpen(false);
|
||||
setShowMentionPopover(false);
|
||||
}, 150);
|
||||
}}
|
||||
/>
|
||||
{open && (
|
||||
<div className="max-h-40 overflow-y-auto">
|
||||
{(matches as MentionOption[]).map((mention, index) => (
|
||||
<MentionItem
|
||||
index={index}
|
||||
key={`${mention.value}-${index}`}
|
||||
onClick={() => {
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current);
|
||||
}
|
||||
timeoutRef.current = null;
|
||||
handleSelect(mention);
|
||||
}}
|
||||
name={mention.label ?? ''}
|
||||
icon={mention.icon}
|
||||
description={mention.description}
|
||||
isActive={index === activeIndex}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
46
client/src/components/Chat/Input/MentionItem.tsx
Normal file
46
client/src/components/Chat/Input/MentionItem.tsx
Normal file
@@ -0,0 +1,46 @@
|
||||
import React from 'react';
|
||||
import { Clock4 } from 'lucide-react';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
export default function MentionItem({
|
||||
name,
|
||||
onClick,
|
||||
index,
|
||||
icon,
|
||||
isActive,
|
||||
description,
|
||||
}: {
|
||||
name: string;
|
||||
onClick: () => void;
|
||||
index: number;
|
||||
icon?: React.ReactNode;
|
||||
isActive?: boolean;
|
||||
description?: string;
|
||||
}) {
|
||||
return (
|
||||
<div tabIndex={index} onClick={onClick} id={`mention-item-${index}`} className="cursor-pointer">
|
||||
<div
|
||||
className={cn(
|
||||
'hover:bg-token-main-surface-secondary text-token-text-primary bg-token-main-surface-secondary group flex h-10 items-center gap-2 rounded-lg px-2 text-sm font-medium dark:hover:bg-gray-600',
|
||||
index === 0 ? 'dark:bg-gray-600' : '',
|
||||
isActive ? 'dark:bg-gray-600' : '',
|
||||
)}
|
||||
>
|
||||
{icon ? icon : null}
|
||||
<div className="flex h-fit grow flex-row justify-between space-x-2 overflow-hidden text-ellipsis whitespace-nowrap">
|
||||
<div className="flex flex-row space-x-2">
|
||||
<span className="shrink-0 truncate">{name}</span>
|
||||
{description ? (
|
||||
<span className="text-token-text-tertiary flex-grow truncate text-sm font-light sm:max-w-xs lg:max-w-md">
|
||||
{description}
|
||||
</span>
|
||||
) : null}
|
||||
</div>
|
||||
<span className="shrink-0 self-center">
|
||||
<Clock4 size={16} className="icon-sm" />
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -13,6 +13,7 @@ type TOptionsPopoverProps = {
|
||||
saveAsPreset: () => void;
|
||||
closePopover: () => void;
|
||||
PopoverButtons: ReactNode;
|
||||
presetsDisabled: boolean;
|
||||
};
|
||||
|
||||
export default function OptionsPopover({
|
||||
@@ -22,6 +23,7 @@ export default function OptionsPopover({
|
||||
saveAsPreset,
|
||||
closePopover,
|
||||
PopoverButtons,
|
||||
presetsDisabled,
|
||||
}: TOptionsPopoverProps) {
|
||||
const popoverRef = useRef(null);
|
||||
useOnClickOutside(
|
||||
@@ -61,14 +63,16 @@ export default function OptionsPopover({
|
||||
)}
|
||||
>
|
||||
<div className="flex w-full items-center bg-gray-50 px-2 py-2 dark:bg-gray-700">
|
||||
<Button
|
||||
type="button"
|
||||
className="h-auto w-[150px] justify-start rounded-md border-2 border-gray-300/50 bg-transparent px-2 py-1 text-xs font-medium font-normal text-black hover:bg-gray-100 hover:text-black focus:ring-1 focus:ring-green-500/90 dark:border-gray-500/50 dark:bg-transparent dark:text-white dark:hover:bg-gray-600 dark:focus:ring-green-500"
|
||||
onClick={saveAsPreset}
|
||||
>
|
||||
<Save className="mr-1 w-[14px]" />
|
||||
{localize('com_endpoint_save_as_preset')}
|
||||
</Button>
|
||||
{presetsDisabled ? null : (
|
||||
<Button
|
||||
type="button"
|
||||
className="h-auto w-[150px] justify-start rounded-md border-2 border-gray-300/50 bg-transparent px-2 py-1 text-xs font-medium font-normal text-black hover:bg-gray-100 hover:text-black focus:ring-1 focus:ring-green-500/90 dark:border-gray-500/50 dark:bg-transparent dark:text-white dark:hover:bg-gray-600 dark:focus:ring-green-500"
|
||||
onClick={saveAsPreset}
|
||||
>
|
||||
<Save className="mr-1 w-[14px]" />
|
||||
{localize('com_endpoint_save_as_preset')}
|
||||
</Button>
|
||||
)}
|
||||
{PopoverButtons}
|
||||
<Button
|
||||
type="button"
|
||||
|
||||
@@ -3,20 +3,20 @@ import { useGetEndpointsQuery, useGetStartupConfig } from 'librechat-data-provid
|
||||
import type { ReactNode } from 'react';
|
||||
import { TooltipProvider, Tooltip, TooltipTrigger, TooltipContent } from '~/components/ui';
|
||||
import { useChatContext, useAssistantsMapContext } from '~/Providers';
|
||||
import { icons } from './Menus/Endpoints/Icons';
|
||||
import ConvoIcon from '~/components/Endpoints/ConvoIcon';
|
||||
import { BirthdayIcon } from '~/components/svg';
|
||||
import { getEndpointField } from '~/utils';
|
||||
import { getIconEndpoint, cn } from '~/utils';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
export default function Landing({ Header }: { Header?: ReactNode }) {
|
||||
const { conversation } = useChatContext();
|
||||
const { data: endpointsConfig } = useGetEndpointsQuery();
|
||||
const { data: startupConfig } = useGetStartupConfig();
|
||||
const assistantMap = useAssistantsMapContext();
|
||||
const { data: startupConfig } = useGetStartupConfig();
|
||||
const { data: endpointsConfig } = useGetEndpointsQuery();
|
||||
|
||||
const localize = useLocalize();
|
||||
|
||||
let { endpoint } = conversation ?? {};
|
||||
let { endpoint = '' } = conversation ?? {};
|
||||
const { assistant_id = null } = conversation ?? {};
|
||||
|
||||
if (
|
||||
@@ -27,43 +27,38 @@ export default function Landing({ Header }: { Header?: ReactNode }) {
|
||||
endpoint = EModelEndpoint.openAI;
|
||||
}
|
||||
|
||||
const endpointType = getEndpointField(endpointsConfig, endpoint, 'type');
|
||||
const iconURL = getEndpointField(endpointsConfig, endpoint, 'iconURL');
|
||||
const iconKey = endpointType ? 'unknown' : endpoint ?? 'unknown';
|
||||
const Icon = icons[iconKey];
|
||||
const iconURL = conversation?.iconURL;
|
||||
endpoint = getIconEndpoint({ endpointsConfig, iconURL, endpoint });
|
||||
|
||||
const assistant = endpoint === EModelEndpoint.assistants && assistantMap?.[assistant_id ?? ''];
|
||||
const assistantName = (assistant && assistant?.name) || '';
|
||||
const assistantDesc = (assistant && assistant?.description) || '';
|
||||
const avatar = (assistant && (assistant?.metadata?.avatar as string)) || '';
|
||||
|
||||
let className =
|
||||
const containerClassName =
|
||||
'shadow-stroke relative flex h-full items-center justify-center rounded-full bg-white text-black';
|
||||
|
||||
if (assistantName && avatar) {
|
||||
className = 'shadow-stroke overflow-hidden rounded-full';
|
||||
}
|
||||
|
||||
return (
|
||||
<TooltipProvider delayDuration={50}>
|
||||
<Tooltip>
|
||||
<div className="relative h-full">
|
||||
<div className="absolute left-0 right-0">{Header && Header}</div>
|
||||
<div className="flex h-full flex-col items-center justify-center">
|
||||
<div className="relative mb-3 h-[72px] w-[72px]">
|
||||
<div className={className}>
|
||||
{endpoint &&
|
||||
Icon &&
|
||||
Icon({
|
||||
size: 41,
|
||||
context: 'landing',
|
||||
className: 'h-2/3 w-2/3',
|
||||
endpoint: endpoint,
|
||||
iconURL: iconURL,
|
||||
assistantName,
|
||||
avatar,
|
||||
})}
|
||||
</div>
|
||||
<div
|
||||
className={cn(
|
||||
'relative h-[72px] w-[72px]',
|
||||
assistantName && avatar ? 'mb-0' : 'mb-3',
|
||||
)}
|
||||
>
|
||||
<ConvoIcon
|
||||
conversation={conversation}
|
||||
assistantMap={assistantMap}
|
||||
endpointsConfig={endpointsConfig}
|
||||
containerClassName={containerClassName}
|
||||
context="landing"
|
||||
className="h-2/3 w-2/3"
|
||||
size={41}
|
||||
/>
|
||||
<TooltipTrigger>
|
||||
{(startupConfig?.showBirthdayIcon ?? false) && (
|
||||
<BirthdayIcon className="absolute bottom-12 right-5" />
|
||||
@@ -88,8 +83,8 @@ export default function Landing({ Header }: { Header?: ReactNode }) {
|
||||
) : (
|
||||
<div className="mb-5 text-2xl font-medium dark:text-white">
|
||||
{endpoint === EModelEndpoint.assistants
|
||||
? localize('com_nav_welcome_assistant')
|
||||
: localize('com_nav_welcome_message')}
|
||||
? conversation?.greeting ?? localize('com_nav_welcome_assistant')
|
||||
: conversation?.greeting ?? localize('com_nav_welcome_message')}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import type { IconMapProps } from '~/common';
|
||||
import {
|
||||
MinimalPlugin,
|
||||
GPTIcon,
|
||||
@@ -23,22 +24,12 @@ export const icons = {
|
||||
[EModelEndpoint.google]: GoogleMinimalIcon,
|
||||
[EModelEndpoint.bingAI]: BingAIMinimalIcon,
|
||||
[EModelEndpoint.custom]: CustomMinimalIcon,
|
||||
[EModelEndpoint.assistants]: ({
|
||||
className = '',
|
||||
assistantName,
|
||||
avatar,
|
||||
size,
|
||||
}: {
|
||||
className?: string;
|
||||
assistantName?: string;
|
||||
avatar?: string;
|
||||
size?: number;
|
||||
}) => {
|
||||
[EModelEndpoint.assistants]: ({ className = '', assistantName, avatar, size }: IconMapProps) => {
|
||||
if (assistantName && avatar) {
|
||||
return (
|
||||
<img
|
||||
src={avatar}
|
||||
className="bg-token-surface-secondary dark:bg-token-surface-tertiary h-full w-full"
|
||||
className="bg-token-surface-secondary dark:bg-token-surface-tertiary h-full w-full rounded-full object-cover"
|
||||
alt={assistantName}
|
||||
width="80"
|
||||
height="80"
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { useState } from 'react';
|
||||
import { Settings } from 'lucide-react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { EModelEndpoint, modularEndpoints } from 'librechat-data-provider';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
||||
import type { TPreset, TConversation } from 'librechat-data-provider';
|
||||
import type { TConversation } from 'librechat-data-provider';
|
||||
import type { FC } from 'react';
|
||||
import { cn, getConvoSwitchLogic, getEndpointField, getIconKey } from '~/utils';
|
||||
import { useLocalize, useUserKey, useDefaultConvo } from '~/hooks';
|
||||
import { SetKeyDialog } from '~/components/Input/SetKeyDialog';
|
||||
import { cn, getEndpointField } from '~/utils';
|
||||
import { useChatContext } from '~/Providers';
|
||||
import { icons } from './Icons';
|
||||
import store from '~/store';
|
||||
@@ -43,58 +43,44 @@ const MenuItem: FC<MenuItemProps> = ({
|
||||
const onSelectEndpoint = (newEndpoint: EModelEndpoint) => {
|
||||
if (!newEndpoint) {
|
||||
return;
|
||||
} else {
|
||||
if (!expiryTime) {
|
||||
setDialogOpen(true);
|
||||
}
|
||||
|
||||
const currentEndpoint = conversation?.endpoint;
|
||||
const template: Partial<TPreset> = {
|
||||
...conversation,
|
||||
endpoint: newEndpoint,
|
||||
conversationId: 'new',
|
||||
};
|
||||
const isAssistantSwitch =
|
||||
newEndpoint === EModelEndpoint.assistants &&
|
||||
currentEndpoint === EModelEndpoint.assistants &&
|
||||
currentEndpoint === newEndpoint;
|
||||
|
||||
const { conversationId } = conversation ?? {};
|
||||
const isExistingConversation = conversationId && conversationId !== 'new';
|
||||
const currentEndpointType =
|
||||
getEndpointField(endpointsConfig, currentEndpoint, 'type') ?? currentEndpoint;
|
||||
const newEndpointType = getEndpointField(endpointsConfig, newEndpoint, 'type') ?? newEndpoint;
|
||||
|
||||
const hasEndpoint = modularEndpoints.has(currentEndpoint ?? '');
|
||||
const hasCurrentEndpointType = modularEndpoints.has(currentEndpointType ?? '');
|
||||
const isCurrentModular = hasEndpoint || hasCurrentEndpointType || isAssistantSwitch;
|
||||
|
||||
const hasNewEndpoint = modularEndpoints.has(newEndpoint ?? '');
|
||||
const hasNewEndpointType = modularEndpoints.has(newEndpointType ?? '');
|
||||
const isNewModular = hasNewEndpoint || hasNewEndpointType || isAssistantSwitch;
|
||||
|
||||
const endpointsMatch = currentEndpoint === newEndpoint;
|
||||
const shouldSwitch = endpointsMatch || modularChat || isAssistantSwitch;
|
||||
|
||||
if (isExistingConversation && isCurrentModular && isNewModular && shouldSwitch) {
|
||||
template.endpointType = newEndpointType;
|
||||
|
||||
const currentConvo = getDefaultConversation({
|
||||
/* target endpointType is necessary to avoid endpoint mixing */
|
||||
conversation: { ...(conversation ?? {}), endpointType: template.endpointType },
|
||||
preset: template,
|
||||
});
|
||||
|
||||
/* We don't reset the latest message, only when changing settings mid-converstion */
|
||||
newConversation({ template: currentConvo, preset: currentConvo, keepLatestMessage: true });
|
||||
return;
|
||||
}
|
||||
newConversation({ template: { ...(template as Partial<TConversation>) } });
|
||||
}
|
||||
|
||||
if (!expiryTime) {
|
||||
setDialogOpen(true);
|
||||
}
|
||||
|
||||
const {
|
||||
shouldSwitch,
|
||||
isNewModular,
|
||||
isCurrentModular,
|
||||
isExistingConversation,
|
||||
newEndpointType,
|
||||
template,
|
||||
} = getConvoSwitchLogic({
|
||||
newEndpoint,
|
||||
modularChat,
|
||||
conversation,
|
||||
endpointsConfig,
|
||||
});
|
||||
|
||||
if (isExistingConversation && isCurrentModular && isNewModular && shouldSwitch) {
|
||||
template.endpointType = newEndpointType;
|
||||
|
||||
const currentConvo = getDefaultConversation({
|
||||
/* target endpointType is necessary to avoid endpoint mixing */
|
||||
conversation: { ...(conversation ?? {}), endpointType: template.endpointType },
|
||||
preset: template,
|
||||
});
|
||||
|
||||
/* We don't reset the latest message, only when changing settings mid-converstion */
|
||||
newConversation({ template: currentConvo, preset: currentConvo, keepLatestMessage: true });
|
||||
return;
|
||||
}
|
||||
newConversation({ template: { ...(template as Partial<TConversation>) } });
|
||||
};
|
||||
|
||||
const endpointType = getEndpointField(endpointsConfig, endpoint, 'type');
|
||||
const iconKey = endpointType ? 'unknown' : endpoint ?? 'unknown';
|
||||
const iconKey = getIconKey({ endpoint, endpointsConfig, endpointType });
|
||||
const Icon = icons[iconKey];
|
||||
|
||||
return (
|
||||
|
||||
@@ -3,16 +3,18 @@ import { CustomMinimalIcon } from '~/components/svg';
|
||||
import { IconContext } from '~/common';
|
||||
|
||||
const knownEndpointAssets = {
|
||||
[KnownEndpoints.mistral]: '/assets/mistral.png',
|
||||
[KnownEndpoints.openrouter]: '/assets/openrouter.png',
|
||||
[KnownEndpoints.groq]: '/assets/groq.png',
|
||||
[KnownEndpoints.shuttleai]: '/assets/shuttleai.png',
|
||||
[KnownEndpoints.anyscale]: '/assets/anyscale.png',
|
||||
[KnownEndpoints.fireworks]: '/assets/fireworks.png',
|
||||
[KnownEndpoints.ollama]: '/assets/ollama.png',
|
||||
[KnownEndpoints.perplexity]: '/assets/perplexity.png',
|
||||
[KnownEndpoints['together.ai']]: '/assets/together.png',
|
||||
[KnownEndpoints.apipie]: '/assets/apipie.png',
|
||||
[KnownEndpoints.cohere]: '/assets/cohere.png',
|
||||
[KnownEndpoints.fireworks]: '/assets/fireworks.png',
|
||||
[KnownEndpoints.groq]: '/assets/groq.png',
|
||||
[KnownEndpoints.mistral]: '/assets/mistral.png',
|
||||
[KnownEndpoints.mlx]: '/assets/mlx.png',
|
||||
[KnownEndpoints.ollama]: '/assets/ollama.png',
|
||||
[KnownEndpoints.openrouter]: '/assets/openrouter.png',
|
||||
[KnownEndpoints.perplexity]: '/assets/perplexity.png',
|
||||
[KnownEndpoints.shuttleai]: '/assets/shuttleai.png',
|
||||
[KnownEndpoints['together.ai']]: '/assets/together.png',
|
||||
};
|
||||
|
||||
const knownEndpointClasses = {
|
||||
|
||||
49
client/src/components/Chat/Menus/Models/MenuButton.tsx
Normal file
49
client/src/components/Chat/Menus/Models/MenuButton.tsx
Normal file
@@ -0,0 +1,49 @@
|
||||
import { Trigger } from '@radix-ui/react-popover';
|
||||
import type { TModelSpec, TEndpointsConfig } from 'librechat-data-provider';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import SpecIcon from './SpecIcon';
|
||||
|
||||
export default function MenuButton({
|
||||
selected,
|
||||
primaryText = '',
|
||||
secondaryText = '',
|
||||
endpointsConfig,
|
||||
}: {
|
||||
selected?: TModelSpec;
|
||||
primaryText?: string;
|
||||
secondaryText?: string;
|
||||
endpointsConfig: TEndpointsConfig;
|
||||
}) {
|
||||
const localize = useLocalize();
|
||||
return (
|
||||
<Trigger asChild>
|
||||
<div
|
||||
className="group flex cursor-pointer items-center gap-1 rounded-xl px-3 py-2 text-lg font-medium hover:bg-gray-50 radix-state-open:bg-gray-50 dark:hover:bg-gray-700 dark:radix-state-open:bg-gray-700"
|
||||
// type="button"
|
||||
>
|
||||
{selected && selected.showIconInHeader && (
|
||||
<SpecIcon currentSpec={selected} endpointsConfig={endpointsConfig} />
|
||||
)}
|
||||
<div>
|
||||
{!selected ? localize('com_ui_none_selected') : primaryText}{' '}
|
||||
{!!secondaryText && <span className="text-token-text-secondary">{secondaryText}</span>}
|
||||
</div>
|
||||
<svg
|
||||
width="16"
|
||||
height="17"
|
||||
viewBox="0 0 16 17"
|
||||
fill="none"
|
||||
className="text-token-text-tertiary"
|
||||
>
|
||||
<path
|
||||
d="M11.3346 7.83203L8.00131 11.1654L4.66797 7.83203"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</Trigger>
|
||||
);
|
||||
}
|
||||
130
client/src/components/Chat/Menus/Models/ModelSpec.tsx
Normal file
130
client/src/components/Chat/Menus/Models/ModelSpec.tsx
Normal file
@@ -0,0 +1,130 @@
|
||||
import { useState, useMemo } from 'react';
|
||||
import { Settings } from 'lucide-react';
|
||||
import type { FC } from 'react';
|
||||
import type { TModelSpec, TEndpointsConfig } from 'librechat-data-provider';
|
||||
import { SetKeyDialog } from '~/components/Input/SetKeyDialog';
|
||||
import { useLocalize, useUserKey } from '~/hooks';
|
||||
import { cn, getEndpointField } from '~/utils';
|
||||
import SpecIcon from './SpecIcon';
|
||||
|
||||
type MenuItemProps = {
|
||||
title: string;
|
||||
spec: TModelSpec;
|
||||
selected: boolean;
|
||||
description?: string;
|
||||
userProvidesKey: boolean;
|
||||
endpointsConfig: TEndpointsConfig;
|
||||
onClick?: () => void;
|
||||
// iconPath: string;
|
||||
// hoverContent?: string;
|
||||
};
|
||||
|
||||
const MenuItem: FC<MenuItemProps> = ({
|
||||
title,
|
||||
spec,
|
||||
selected,
|
||||
description,
|
||||
userProvidesKey,
|
||||
endpointsConfig,
|
||||
onClick,
|
||||
...rest
|
||||
}) => {
|
||||
const { endpoint } = spec.preset;
|
||||
const [isDialogOpen, setDialogOpen] = useState(false);
|
||||
const { getExpiry } = useUserKey(endpoint ?? '');
|
||||
const localize = useLocalize();
|
||||
const expiryTime = getExpiry();
|
||||
|
||||
const clickHandler = () => {
|
||||
if (!expiryTime) {
|
||||
setDialogOpen(true);
|
||||
}
|
||||
if (onClick) {
|
||||
onClick();
|
||||
}
|
||||
};
|
||||
|
||||
const endpointType = useMemo(
|
||||
() => spec.preset.endpointType ?? getEndpointField(endpointsConfig, endpoint, 'type'),
|
||||
[spec, endpointsConfig, endpoint],
|
||||
);
|
||||
|
||||
const { showIconInMenu = true } = spec;
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
role="menuitem"
|
||||
className="group m-1.5 flex cursor-pointer gap-2 rounded px-1 py-2.5 !pr-3 text-sm !opacity-100 hover:bg-black/5 focus:ring-0 radix-disabled:pointer-events-none radix-disabled:opacity-50 dark:hover:bg-white/5"
|
||||
tabIndex={-1}
|
||||
{...rest}
|
||||
onClick={clickHandler}
|
||||
>
|
||||
<div className="flex grow items-center justify-between gap-2">
|
||||
<div>
|
||||
<div className="flex items-center gap-2">
|
||||
{showIconInMenu && <SpecIcon currentSpec={spec} endpointsConfig={endpointsConfig} />}
|
||||
<div>
|
||||
{title}
|
||||
<div className="text-token-text-tertiary">{description}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{userProvidesKey ? (
|
||||
<div className="text-token-text-primary" key={`set-key-${endpoint}`}>
|
||||
<button
|
||||
className={cn(
|
||||
'invisible flex gap-x-1 group-hover:visible',
|
||||
selected ? 'visible' : '',
|
||||
expiryTime
|
||||
? 'w-full rounded-lg p-2 hover:bg-gray-200 dark:hover:bg-gray-900'
|
||||
: '',
|
||||
)}
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setDialogOpen(true);
|
||||
}}
|
||||
>
|
||||
<div className={cn('invisible group-hover:visible', expiryTime ? 'text-xs' : '')}>
|
||||
{localize('com_endpoint_config_key')}
|
||||
</div>
|
||||
<Settings className={cn(expiryTime ? 'icon-sm' : 'icon-md stroke-1')} />
|
||||
</button>
|
||||
</div>
|
||||
) : null}
|
||||
{selected && (
|
||||
<svg
|
||||
width="24"
|
||||
height="24"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
className="icon-md block"
|
||||
// className="icon-md block group-hover:hidden"
|
||||
>
|
||||
<path
|
||||
fillRule="evenodd"
|
||||
clipRule="evenodd"
|
||||
d="M2 12C2 6.47715 6.47715 2 12 2C17.5228 2 22 6.47715 22 12C22 17.5228 17.5228 22 12 22C6.47715 22 2 17.5228 2 12ZM16.0755 7.93219C16.5272 8.25003 16.6356 8.87383 16.3178 9.32549L11.5678 16.0755C11.3931 16.3237 11.1152 16.4792 10.8123 16.4981C10.5093 16.517 10.2142 16.3973 10.0101 16.1727L7.51006 13.4227C7.13855 13.014 7.16867 12.3816 7.57733 12.0101C7.98598 11.6386 8.61843 11.6687 8.98994 12.0773L10.6504 13.9039L14.6822 8.17451C15 7.72284 15.6238 7.61436 16.0755 7.93219Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
</svg>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{userProvidesKey && (
|
||||
<SetKeyDialog
|
||||
open={isDialogOpen}
|
||||
onOpenChange={setDialogOpen}
|
||||
endpoint={endpoint ?? ''}
|
||||
endpointType={endpointType}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default MenuItem;
|
||||
44
client/src/components/Chat/Menus/Models/ModelSpecs.tsx
Normal file
44
client/src/components/Chat/Menus/Models/ModelSpecs.tsx
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { FC } from 'react';
|
||||
import { Close } from '@radix-ui/react-popover';
|
||||
import { AuthType } from 'librechat-data-provider';
|
||||
import type { TModelSpec, TEndpointsConfig } from 'librechat-data-provider';
|
||||
import MenuSeparator from '~/components/Chat/Menus/UI/MenuSeparator';
|
||||
import ModelSpec from './ModelSpec';
|
||||
|
||||
const ModelSpecs: FC<{
|
||||
specs?: TModelSpec[];
|
||||
selected?: TModelSpec;
|
||||
setSelected?: (spec: TModelSpec) => void;
|
||||
endpointsConfig: TEndpointsConfig;
|
||||
}> = ({ specs = [], selected, setSelected = () => ({}), endpointsConfig }) => {
|
||||
return (
|
||||
<>
|
||||
{specs &&
|
||||
specs.map((spec, i) => {
|
||||
if (!spec) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Close asChild key={`spec-${spec.name}`}>
|
||||
<div key={`spec-${spec.name}`}>
|
||||
<ModelSpec
|
||||
spec={spec}
|
||||
title={spec.label}
|
||||
key={`spec-item-${spec.name}`}
|
||||
description={spec.description}
|
||||
onClick={() => setSelected(spec)}
|
||||
data-testid={`spec-item-${spec.name}`}
|
||||
selected={selected?.name === spec.name}
|
||||
userProvidesKey={spec.authType === AuthType.USER_PROVIDED}
|
||||
endpointsConfig={endpointsConfig}
|
||||
/>
|
||||
{i !== specs.length - 1 && <MenuSeparator />}
|
||||
</div>
|
||||
</Close>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default ModelSpecs;
|
||||
106
client/src/components/Chat/Menus/Models/ModelSpecsMenu.tsx
Normal file
106
client/src/components/Chat/Menus/Models/ModelSpecsMenu.tsx
Normal file
@@ -0,0 +1,106 @@
|
||||
import { useMemo } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { Content, Portal, Root } from '@radix-ui/react-popover';
|
||||
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
||||
import type { TModelSpec, TConversation, TEndpointsConfig } from 'librechat-data-provider';
|
||||
import { getConvoSwitchLogic, getModelSpecIconURL } from '~/utils';
|
||||
import { useDefaultConvo, useNewConvo } from '~/hooks';
|
||||
import { useChatContext } from '~/Providers';
|
||||
import MenuButton from './MenuButton';
|
||||
import ModelSpecs from './ModelSpecs';
|
||||
import store from '~/store';
|
||||
|
||||
export default function ModelSpecsMenu({ modelSpecs }: { modelSpecs: TModelSpec[] }) {
|
||||
const { conversation } = useChatContext();
|
||||
const { newConversation } = useNewConvo();
|
||||
|
||||
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
|
||||
const modularChat = useRecoilValue(store.modularChat);
|
||||
const getDefaultConversation = useDefaultConvo();
|
||||
|
||||
const onSelectSpec = (spec: TModelSpec) => {
|
||||
const { preset } = spec;
|
||||
preset.iconURL = getModelSpecIconURL(spec);
|
||||
preset.spec = spec.name;
|
||||
const { endpoint: newEndpoint } = preset;
|
||||
if (!newEndpoint) {
|
||||
return;
|
||||
}
|
||||
|
||||
const {
|
||||
shouldSwitch,
|
||||
isNewModular,
|
||||
isCurrentModular,
|
||||
isExistingConversation,
|
||||
newEndpointType,
|
||||
template,
|
||||
} = getConvoSwitchLogic({
|
||||
newEndpoint,
|
||||
modularChat,
|
||||
conversation,
|
||||
endpointsConfig,
|
||||
});
|
||||
|
||||
if (isExistingConversation && isCurrentModular && isNewModular && shouldSwitch) {
|
||||
template.endpointType = newEndpointType as EModelEndpoint | undefined;
|
||||
|
||||
const currentConvo = getDefaultConversation({
|
||||
/* target endpointType is necessary to avoid endpoint mixing */
|
||||
conversation: { ...(conversation ?? {}), endpointType: template.endpointType },
|
||||
preset: template,
|
||||
});
|
||||
|
||||
/* We don't reset the latest message, only when changing settings mid-converstion */
|
||||
newConversation({ template: currentConvo, preset, keepLatestMessage: true });
|
||||
return;
|
||||
}
|
||||
|
||||
newConversation({ template: { ...(template as Partial<TConversation>) }, preset });
|
||||
};
|
||||
|
||||
const selected = useMemo(() => {
|
||||
const spec = modelSpecs?.find((spec) => spec.name === conversation?.spec);
|
||||
if (!spec) {
|
||||
return undefined;
|
||||
}
|
||||
return spec;
|
||||
}, [modelSpecs, conversation?.spec]);
|
||||
|
||||
return (
|
||||
<Root>
|
||||
<MenuButton
|
||||
primaryText={selected?.label ?? ''}
|
||||
selected={selected}
|
||||
endpointsConfig={endpointsConfig}
|
||||
/>
|
||||
<Portal>
|
||||
{modelSpecs && modelSpecs?.length && (
|
||||
<div
|
||||
style={{
|
||||
position: 'fixed',
|
||||
left: '0px',
|
||||
top: '0px',
|
||||
transform: 'translate3d(268px, 50px, 0px)',
|
||||
minWidth: 'max-content',
|
||||
zIndex: 'auto',
|
||||
}}
|
||||
>
|
||||
<Content
|
||||
side="bottom"
|
||||
align="start"
|
||||
className="models-scrollbar mt-2 max-h-[65vh] min-w-[340px] max-w-xs overflow-y-auto rounded-lg border border-gray-100 bg-white shadow-lg dark:border-gray-700 dark:bg-gray-700 dark:text-white lg:max-h-[75vh]"
|
||||
>
|
||||
<ModelSpecs
|
||||
specs={modelSpecs}
|
||||
selected={selected}
|
||||
setSelected={onSelectSpec}
|
||||
endpointsConfig={endpointsConfig}
|
||||
/>
|
||||
</Content>
|
||||
</div>
|
||||
)}
|
||||
</Portal>
|
||||
</Root>
|
||||
);
|
||||
}
|
||||
50
client/src/components/Chat/Menus/Models/SpecIcon.tsx
Normal file
50
client/src/components/Chat/Menus/Models/SpecIcon.tsx
Normal file
@@ -0,0 +1,50 @@
|
||||
import React from 'react';
|
||||
import type { TModelSpec, TEndpointsConfig } from 'librechat-data-provider';
|
||||
import type { IconMapProps } from '~/common';
|
||||
import { getModelSpecIconURL, getIconKey, getEndpointField } from '~/utils';
|
||||
import { icons } from '~/components/Chat/Menus/Endpoints/Icons';
|
||||
|
||||
interface SpecIconProps {
|
||||
currentSpec: TModelSpec;
|
||||
endpointsConfig: TEndpointsConfig;
|
||||
}
|
||||
|
||||
const SpecIcon: React.FC<SpecIconProps> = ({ currentSpec, endpointsConfig }) => {
|
||||
const iconURL = getModelSpecIconURL(currentSpec);
|
||||
const { endpoint } = currentSpec.preset;
|
||||
const endpointIconURL = getEndpointField(endpointsConfig, endpoint, 'iconURL');
|
||||
const iconKey = getIconKey({ endpoint, endpointsConfig, endpointIconURL });
|
||||
let Icon: (props: IconMapProps) => React.JSX.Element;
|
||||
|
||||
if (!iconURL?.includes('http')) {
|
||||
Icon = icons[iconKey] ?? icons.unknown;
|
||||
} else {
|
||||
Icon = iconURL
|
||||
? () => (
|
||||
<div
|
||||
className="icon-xl mr-1 shrink-0 overflow-hidden rounded-full "
|
||||
style={{ width: '20', height: '20' }}
|
||||
>
|
||||
<img
|
||||
src={iconURL}
|
||||
alt={currentSpec.name}
|
||||
style={{ width: '100%', height: '100%' }}
|
||||
className="object-cover"
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
: icons[endpoint ?? ''] ?? icons.unknown;
|
||||
}
|
||||
|
||||
return (
|
||||
<Icon
|
||||
size={20}
|
||||
endpoint={endpoint}
|
||||
context="menu-item"
|
||||
iconURL={endpointIconURL}
|
||||
className="icon-lg mr-1 shrink-0 dark:text-white"
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default SpecIcon;
|
||||
46
client/src/components/Chat/Menus/Models/fakeData.ts
Normal file
46
client/src/components/Chat/Menus/Models/fakeData.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { EModelEndpoint, AuthType } from 'librechat-data-provider';
|
||||
import type { TModelSpec } from 'librechat-data-provider';
|
||||
|
||||
export const data: TModelSpec[] = [
|
||||
{
|
||||
name: 'commander_01',
|
||||
label: 'Commander in Chief',
|
||||
description:
|
||||
'Salute your president, soldier! Salute your president, soldier! Salute your president, soldier!',
|
||||
iconURL: 'https://i.kym-cdn.com/entries/icons/facebook/000/017/252/2f0.jpg',
|
||||
// iconURL: EModelEndpoint.openAI,
|
||||
preset: {
|
||||
endpoint: 'Ollama',
|
||||
greeting: 'My fellow Americans,',
|
||||
// 'endpointType': EModelEndpoint.custom,
|
||||
frequency_penalty: 0,
|
||||
// 'imageDetail': 'auto',
|
||||
model: 'command-r',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendFiles: false,
|
||||
temperature: 0.8,
|
||||
top_p: 0.5,
|
||||
},
|
||||
authType: AuthType.SYSTEM_DEFINED,
|
||||
},
|
||||
{
|
||||
name: 'vision_pro',
|
||||
label: 'Vision Pro',
|
||||
description:
|
||||
'Salute your president, soldier! Salute your president, soldier! Salute your president, soldier!',
|
||||
// iconURL: 'https://i.ytimg.com/vi/SaneSRqePVY/maxresdefault.jpg',
|
||||
iconURL: EModelEndpoint.openAI, // Allow using project-included icons
|
||||
preset: {
|
||||
chatGptLabel: 'Vision Helper',
|
||||
greeting: 'What\'s up!!',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
model: 'gpt-4-turbo',
|
||||
promptPrefix:
|
||||
'Examine images closely to understand its style, colors, composition, and other elements. Then, craft a detailed prompt to that closely resemble the original. Your focus is on accuracy in replicating the style, colors, techniques, and details of the original image in written form. Your prompt must be excruciatingly detailed as it will be given to an image generating AI for image generation. \n',
|
||||
temperature: 0.8,
|
||||
top_p: 1,
|
||||
},
|
||||
authType: AuthType.SYSTEM_DEFINED,
|
||||
},
|
||||
];
|
||||
@@ -105,7 +105,6 @@ const EditPresetDialog = ({
|
||||
conversation={preset}
|
||||
setOption={setOption}
|
||||
isPreset={true}
|
||||
isMultiChat={true}
|
||||
className="h-full md:mb-4 md:h-[440px]"
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -4,11 +4,11 @@ import { Flipper, Flipped } from 'react-flip-toolkit';
|
||||
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
||||
import type { FC } from 'react';
|
||||
import type { TPreset } from 'librechat-data-provider';
|
||||
import { getPresetTitle, getEndpointField, getIconKey } from '~/utils';
|
||||
import FileUpload from '~/components/Chat/Input/Files/FileUpload';
|
||||
import { PinIcon, EditIcon, TrashIcon } from '~/components/svg';
|
||||
import { Dialog, DialogTrigger, Label } from '~/components/ui';
|
||||
import DialogTemplate from '~/components/ui/DialogTemplate';
|
||||
import { getPresetTitle, getEndpointField } from '~/utils';
|
||||
import { Dialog, DialogTrigger, Label } from '~/components/ui/';
|
||||
import { MenuSeparator, MenuItem } from '../UI';
|
||||
import { icons } from '../Endpoints/Icons';
|
||||
import { useLocalize } from '~/hooks';
|
||||
@@ -115,9 +115,7 @@ const PresetItems: FC<{
|
||||
return null;
|
||||
}
|
||||
|
||||
const iconKey = getEndpointField(endpointsConfig, preset.endpoint, 'type')
|
||||
? 'unknown'
|
||||
: preset.endpointType ?? preset.endpoint ?? 'unknown';
|
||||
const iconKey = getIconKey({ endpoint: preset.endpoint, endpointsConfig });
|
||||
const Icon = icons[iconKey];
|
||||
|
||||
return (
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export { default as EndpointsMenu } from './EndpointsMenu';
|
||||
export { default as PresetsMenu } from './PresetsMenu';
|
||||
export { default as EndpointsMenu } from './EndpointsMenu';
|
||||
export { default as HeaderNewChat } from './HeaderNewChat';
|
||||
export { default as ModelSpecsMenu } from './Models/ModelSpecsMenu';
|
||||
|
||||
@@ -3,6 +3,7 @@ import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import type { TConversation, TMessage } from 'librechat-data-provider';
|
||||
import { Clipboard, CheckMark, EditIcon, RegenerateIcon, ContinueIcon } from '~/components/svg';
|
||||
import { useGenerationsByLatest, useLocalize } from '~/hooks';
|
||||
import { Fork } from '~/components/Conversations';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
type THoverButtons = {
|
||||
@@ -34,13 +35,14 @@ export default function HoverButtons({
|
||||
const { endpoint: _endpoint, endpointType } = conversation ?? {};
|
||||
const endpoint = endpointType ?? _endpoint;
|
||||
const [isCopied, setIsCopied] = useState(false);
|
||||
const { hideEditButton, regenerateEnabled, continueSupported } = useGenerationsByLatest({
|
||||
isEditing,
|
||||
isSubmitting,
|
||||
message,
|
||||
endpoint: endpoint ?? '',
|
||||
latestMessage,
|
||||
});
|
||||
const { hideEditButton, regenerateEnabled, continueSupported, forkingSupported } =
|
||||
useGenerationsByLatest({
|
||||
isEditing,
|
||||
isSubmitting,
|
||||
message,
|
||||
endpoint: endpoint ?? '',
|
||||
latestMessage,
|
||||
});
|
||||
if (!conversation) {
|
||||
return null;
|
||||
}
|
||||
@@ -100,6 +102,13 @@ export default function HoverButtons({
|
||||
<RegenerateIcon className="hover:text-gray-700 dark:hover:bg-gray-700 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400" />
|
||||
</button>
|
||||
) : null}
|
||||
<Fork
|
||||
isLast={isLast}
|
||||
messageId={message.messageId}
|
||||
conversationId={conversation.conversationId}
|
||||
forkingSupported={forkingSupported}
|
||||
latestMessage={latestMessage}
|
||||
/>
|
||||
{continueSupported ? (
|
||||
<button
|
||||
className={cn(
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { useAuthContext, useMessageHelpers, useLocalize } from '~/hooks';
|
||||
import type { TMessageProps } from '~/common';
|
||||
import Icon from '~/components/Chat/Messages/MessageIcon';
|
||||
import { Plugin } from '~/components/Messages/Content';
|
||||
import MessageContent from './Content/MessageContent';
|
||||
import SiblingSwitch from './SiblingSwitch';
|
||||
@@ -18,7 +19,6 @@ export default function Message(props: TMessageProps) {
|
||||
|
||||
const {
|
||||
ask,
|
||||
icon,
|
||||
edit,
|
||||
isLast,
|
||||
enterEdit,
|
||||
@@ -42,7 +42,7 @@ export default function Message(props: TMessageProps) {
|
||||
|
||||
let messageLabel = '';
|
||||
if (isCreatedByUser) {
|
||||
messageLabel = UsernameDisplay ? user?.name : localize('com_user_message');
|
||||
messageLabel = UsernameDisplay ? user?.name || user?.username : localize('com_user_message');
|
||||
} else {
|
||||
messageLabel = message.sender;
|
||||
}
|
||||
@@ -60,11 +60,7 @@ export default function Message(props: TMessageProps) {
|
||||
<div>
|
||||
<div className="pt-0.5">
|
||||
<div className="flex h-6 w-6 items-center justify-center overflow-hidden rounded-full">
|
||||
{typeof icon === 'string' && /[^\\x00-\\x7F]+/.test(icon as string) ? (
|
||||
<span className=" direction-rtl w-40 overflow-x-scroll">{icon}</span>
|
||||
) : (
|
||||
icon
|
||||
)}
|
||||
<Icon message={message} conversation={conversation} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
58
client/src/components/Chat/Messages/MessageIcon.tsx
Normal file
58
client/src/components/Chat/Messages/MessageIcon.tsx
Normal file
@@ -0,0 +1,58 @@
|
||||
import { useMemo } from 'react';
|
||||
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
||||
import type { TMessage, TPreset, Assistant } from 'librechat-data-provider';
|
||||
import type { TMessageProps } from '~/common';
|
||||
import ConvoIconURL from '~/components/Endpoints/ConvoIconURL';
|
||||
import { getEndpointField, getIconEndpoint } from '~/utils';
|
||||
import Icon from '~/components/Endpoints/Icon';
|
||||
|
||||
export default function MessageIcon(
|
||||
props: Pick<TMessageProps, 'message' | 'conversation'> & {
|
||||
assistant?: false | Assistant;
|
||||
},
|
||||
) {
|
||||
const { data: endpointsConfig } = useGetEndpointsQuery();
|
||||
const { message, conversation, assistant } = props;
|
||||
|
||||
const assistantName = assistant ? (assistant.name as string | undefined) : '';
|
||||
const assistantAvatar = assistant ? (assistant.metadata?.avatar as string | undefined) : '';
|
||||
|
||||
const messageSettings = useMemo(
|
||||
() => ({
|
||||
...(conversation ?? {}),
|
||||
...({
|
||||
...message,
|
||||
iconURL: message?.iconURL ?? '',
|
||||
} as TMessage),
|
||||
}),
|
||||
[conversation, message],
|
||||
);
|
||||
|
||||
const iconURL = messageSettings?.iconURL;
|
||||
let endpoint = messageSettings?.endpoint;
|
||||
endpoint = getIconEndpoint({ endpointsConfig, iconURL, endpoint });
|
||||
const endpointIconURL = getEndpointField(endpointsConfig, endpoint, 'iconURL');
|
||||
|
||||
if (!message?.isCreatedByUser && iconURL && iconURL.includes('http')) {
|
||||
return (
|
||||
<ConvoIconURL
|
||||
preset={messageSettings as typeof messageSettings & TPreset}
|
||||
context="message"
|
||||
assistantAvatar={assistantAvatar}
|
||||
endpointIconURL={endpointIconURL}
|
||||
assistantName={assistantName}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Icon
|
||||
{...messageSettings}
|
||||
endpoint={endpoint}
|
||||
iconURL={!assistant ? endpointIconURL : assistantAvatar}
|
||||
model={message?.model ?? conversation?.model}
|
||||
assistantName={assistantName}
|
||||
size={28.8}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import ContentParts from './Content/ContentParts';
|
||||
import type { TMessageProps } from '~/common';
|
||||
import Icon from '~/components/Chat/Messages/MessageIcon';
|
||||
import ContentParts from './Content/ContentParts';
|
||||
import SiblingSwitch from './SiblingSwitch';
|
||||
import { useMessageHelpers } from '~/hooks';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
@@ -14,7 +15,6 @@ export default function Message(props: TMessageProps) {
|
||||
|
||||
const {
|
||||
ask,
|
||||
icon,
|
||||
edit,
|
||||
isLast,
|
||||
enterEdit,
|
||||
@@ -47,11 +47,7 @@ export default function Message(props: TMessageProps) {
|
||||
<div>
|
||||
<div className="pt-0.5">
|
||||
<div className="shadow-stroke flex h-6 w-6 items-center justify-center overflow-hidden rounded-full">
|
||||
{typeof icon === 'string' && /[^\\x00-\\x7F]+/.test(icon as string) ? (
|
||||
<span className=" direction-rtl w-40 overflow-x-scroll">{icon}</span>
|
||||
) : (
|
||||
icon
|
||||
)}
|
||||
<Icon message={message} conversation={conversation} assistant={assistant} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { FileSources } from 'librechat-data-provider';
|
||||
import { useEffect, useMemo } from 'react';
|
||||
import { useGetStartupConfig } from 'librechat-data-provider/react-query';
|
||||
import { FileSources, LocalStorageKeys, getConfigDefaults } from 'librechat-data-provider';
|
||||
import type { ExtendedFile } from '~/common';
|
||||
import { useDragHelpers, useSetFilesToDelete } from '~/hooks';
|
||||
import DragDropOverlay from './Input/Files/DragDropOverlay';
|
||||
@@ -8,6 +9,8 @@ import { useDeleteFilesMutation } from '~/data-provider';
|
||||
import { SidePanel } from '~/components/SidePanel';
|
||||
import store from '~/store';
|
||||
|
||||
const defaultInterface = getConfigDefaults().interface;
|
||||
|
||||
export default function Presentation({
|
||||
children,
|
||||
useSidePanel = false,
|
||||
@@ -17,9 +20,16 @@ export default function Presentation({
|
||||
panel?: React.ReactNode;
|
||||
useSidePanel?: boolean;
|
||||
}) {
|
||||
const { data: startupConfig } = useGetStartupConfig();
|
||||
const hideSidePanel = useRecoilValue(store.hideSidePanel);
|
||||
const { isOver, canDrop, drop } = useDragHelpers();
|
||||
const interfaceConfig = useMemo(
|
||||
() => startupConfig?.interface ?? defaultInterface,
|
||||
[startupConfig],
|
||||
);
|
||||
|
||||
const setFilesToDelete = useSetFilesToDelete();
|
||||
const { isOver, canDrop, drop } = useDragHelpers();
|
||||
|
||||
const { mutateAsync } = useDeleteFilesMutation({
|
||||
onSuccess: () => {
|
||||
console.log('Temporary Files deleted');
|
||||
@@ -31,7 +41,7 @@ export default function Presentation({
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const filesToDelete = localStorage.getItem('filesToDelete');
|
||||
const filesToDelete = localStorage.getItem(LocalStorageKeys.FILES_TO_DELETE);
|
||||
const map = JSON.parse(filesToDelete ?? '{}') as Record<string, ExtendedFile>;
|
||||
const files = Object.values(map)
|
||||
.filter((file) => file.filepath && file.source && !file.embedded && file.temp_file_id)
|
||||
@@ -49,12 +59,16 @@ export default function Presentation({
|
||||
}, [mutateAsync]);
|
||||
|
||||
const isActive = canDrop && isOver;
|
||||
const resizableLayout = localStorage.getItem('react-resizable-panels:layout');
|
||||
const collapsedPanels = localStorage.getItem('react-resizable-panels:collapsed');
|
||||
|
||||
const defaultLayout = resizableLayout ? JSON.parse(resizableLayout) : undefined;
|
||||
const defaultCollapsed = collapsedPanels ? JSON.parse(collapsedPanels) : undefined;
|
||||
const fullCollapse = localStorage.getItem('fullPanelCollapse') === 'true';
|
||||
const defaultLayout = useMemo(() => {
|
||||
const resizableLayout = localStorage.getItem('react-resizable-panels:layout');
|
||||
return resizableLayout ? JSON.parse(resizableLayout) : undefined;
|
||||
}, []);
|
||||
const defaultCollapsed = useMemo(() => {
|
||||
const collapsedPanels = localStorage.getItem('react-resizable-panels:collapsed');
|
||||
return collapsedPanels ? JSON.parse(collapsedPanels) : undefined;
|
||||
}, []);
|
||||
const fullCollapse = useMemo(() => localStorage.getItem('fullPanelCollapse') === 'true', []);
|
||||
|
||||
const layout = () => (
|
||||
<div className="transition-width relative flex h-full w-full flex-1 flex-col items-stretch overflow-hidden bg-white pt-0 dark:bg-gray-800">
|
||||
@@ -65,7 +79,7 @@ export default function Presentation({
|
||||
</div>
|
||||
);
|
||||
|
||||
if (useSidePanel && !hideSidePanel) {
|
||||
if (useSidePanel && !hideSidePanel && interfaceConfig.sidePanel) {
|
||||
return (
|
||||
<div
|
||||
ref={drop}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user