Compare commits
66 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cdab1e9cda | ||
|
|
3df4fac118 | ||
|
|
0ae98ff011 | ||
|
|
4d05e5b79a | ||
|
|
199f9f32e6 | ||
|
|
f94a782b4f | ||
|
|
738207de50 | ||
|
|
c96f067689 | ||
|
|
3bfd185cab | ||
|
|
c937b8cd07 | ||
|
|
6db91978ca | ||
|
|
8c22bb1d3d | ||
|
|
5d642d0187 | ||
|
|
4196a86fa9 | ||
|
|
e6310c806a | ||
|
|
3d1dec62a4 | ||
|
|
de3987cbaf | ||
|
|
f406a85633 | ||
|
|
692ce3b346 | ||
|
|
26ea990045 | ||
|
|
265abbc1c8 | ||
|
|
0b7da72be6 | ||
|
|
3c184e9410 | ||
|
|
bf4e64ce63 | ||
|
|
9d854dac07 | ||
|
|
fce7246ac1 | ||
|
|
2cc580ba52 | ||
|
|
d2d9ac0280 | ||
|
|
f380f261a5 | ||
|
|
9d137ce42f | ||
|
|
25f92dd1c3 | ||
|
|
9277e2a0c5 | ||
|
|
c19dfddd0f | ||
|
|
0fe47cf1f8 | ||
|
|
8e5f1ad575 | ||
|
|
f64a2cb0b0 | ||
|
|
e4c07eb895 | ||
|
|
2240fee44a | ||
|
|
cb64b84846 | ||
|
|
cc71125fa1 | ||
|
|
6f0eb35365 | ||
|
|
3411d7a543 | ||
|
|
caabab4489 | ||
|
|
0b165260f7 | ||
|
|
334b603247 | ||
|
|
476767355b | ||
|
|
e80debb704 | ||
|
|
549026f677 | ||
|
|
f6a84887e1 | ||
|
|
fb80af05be | ||
|
|
cd7f3a51e1 | ||
|
|
daa5f43ac6 | ||
|
|
d0d8e47ec8 | ||
|
|
09cd1a7e74 | ||
|
|
94950b6e8b | ||
|
|
e418edd3dc | ||
|
|
e3c236ba3b | ||
|
|
7bd03a6e70 | ||
|
|
f146db5c59 | ||
|
|
9922baf7d1 | ||
|
|
09da05afa1 | ||
|
|
e66aa280c0 | ||
|
|
ed17e17a73 | ||
|
|
30d084e696 | ||
|
|
93af814596 | ||
|
|
1bafe80e78 |
@@ -78,7 +78,7 @@ PROXY=
|
||||
#============#
|
||||
|
||||
ANTHROPIC_API_KEY=user_provided
|
||||
# ANTHROPIC_MODELS=claude-3-opus-20240229,claude-3-sonnet-20240229,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
||||
# ANTHROPIC_MODELS=claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
||||
# ANTHROPIC_REVERSE_PROXY=
|
||||
|
||||
#============#
|
||||
@@ -148,7 +148,7 @@ ASSISTANTS_API_KEY=user_provided
|
||||
#============#
|
||||
# OpenRouter #
|
||||
#============#
|
||||
|
||||
# !!!Warning: Use the variable above instead of this one. Using this one will override the OpenAI endpoint
|
||||
# OPENROUTER_API_KEY=
|
||||
|
||||
#============#
|
||||
@@ -192,7 +192,7 @@ AZURE_AI_SEARCH_SEARCH_OPTION_SELECT=
|
||||
|
||||
# Google
|
||||
#-----------------
|
||||
GOOGLE_API_KEY=
|
||||
GOOGLE_SEARCH_API_KEY=
|
||||
GOOGLE_CSE_ID=
|
||||
|
||||
# SerpAPI
|
||||
@@ -316,6 +316,9 @@ OPENID_ISSUER=
|
||||
OPENID_SESSION_SECRET=
|
||||
OPENID_SCOPE="openid profile email"
|
||||
OPENID_CALLBACK_URL=/oauth/openid/callback
|
||||
OPENID_REQUIRED_ROLE=
|
||||
OPENID_REQUIRED_ROLE_TOKEN_KIND=
|
||||
OPENID_REQUIRED_ROLE_PARAMETER_PATH=
|
||||
|
||||
OPENID_BUTTON_LABEL=
|
||||
OPENID_IMAGE_URL=
|
||||
|
||||
5
.github/workflows/backend-review.yml
vendored
5
.github/workflows/backend-review.yml
vendored
@@ -51,6 +51,9 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Prepare .env.test file
|
||||
run: cp api/test/.env.test.example api/test/.env.test
|
||||
|
||||
- name: Run unit tests
|
||||
run: cd api && npm run test:ci
|
||||
|
||||
@@ -60,4 +63,4 @@ jobs:
|
||||
- name: Run linters
|
||||
uses: wearerequired/lint-action@v2
|
||||
with:
|
||||
eslint: true
|
||||
eslint: true
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# v0.7.0
|
||||
# v0.7.1
|
||||
|
||||
# Base node image
|
||||
FROM node:18-alpine3.18 AS node
|
||||
@@ -26,6 +26,10 @@ RUN npm install --no-audit
|
||||
ENV NODE_OPTIONS="--max-old-space-size=2048"
|
||||
RUN npm run frontend
|
||||
|
||||
# Create directories for the volumes to inherit
|
||||
# the correct permissions
|
||||
RUN mkdir -p /app/client/public/images /app/api/logs
|
||||
|
||||
# Node API setup
|
||||
EXPOSE 3080
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# v0.7.0
|
||||
# v0.7.1
|
||||
|
||||
# Build API, Client and Data Provider
|
||||
FROM node:20-alpine AS base
|
||||
@@ -13,11 +13,12 @@ RUN npm run build
|
||||
# React client build
|
||||
FROM data-provider-build AS client-build
|
||||
WORKDIR /app/client
|
||||
COPY ./client/ ./
|
||||
COPY ./client/package*.json ./
|
||||
# Copy data-provider to client's node_modules
|
||||
RUN mkdir -p /app/client/node_modules/librechat-data-provider/
|
||||
RUN cp -R /app/packages/data-provider/* /app/client/node_modules/librechat-data-provider/
|
||||
RUN npm install
|
||||
COPY ./client/ ./
|
||||
ENV NODE_OPTIONS="--max-old-space-size=2048"
|
||||
RUN npm run build
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://railway.app/template/b5k2mn?referralCode=HI9hWz">
|
||||
<a href="https://railway.app/template/b5k2mn?referralCode=myKrVZ">
|
||||
<img src="https://railway.app/button.svg" alt="Deploy on Railway" height="30">
|
||||
</a>
|
||||
<a href="https://zeabur.com/templates/0X2ZY8">
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
require('dotenv').config();
|
||||
const { KeyvFile } = require('keyv-file');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -23,10 +24,7 @@ const askBing = async ({
|
||||
|
||||
let key = null;
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your BingAI Cookies have expired. Please provide your cookies again.',
|
||||
);
|
||||
checkUserKeyExpiry(expiresAt, EModelEndpoint.bingAI);
|
||||
key = await getUserKey({ userId, name: 'bingAI' });
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
require('dotenv').config();
|
||||
const { KeyvFile } = require('keyv-file');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { Constants, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('../server/services/UserService');
|
||||
|
||||
const browserClient = async ({
|
||||
@@ -18,10 +18,7 @@ const browserClient = async ({
|
||||
|
||||
let key = null;
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your ChatGPT Access Token has expired. Please provide your token again.',
|
||||
);
|
||||
checkUserKeyExpiry(expiresAt, EModelEndpoint.chatGPTBrowser);
|
||||
key = await getUserKey({ userId, name: 'chatGPTBrowser' });
|
||||
}
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ class BaseClient {
|
||||
throw new Error('Method \'setOptions\' must be implemented.');
|
||||
}
|
||||
|
||||
getCompletion() {
|
||||
async getCompletion() {
|
||||
throw new Error('Method \'getCompletion\' must be implemented.');
|
||||
}
|
||||
|
||||
|
||||
@@ -3,10 +3,13 @@ const crypto = require('crypto');
|
||||
const {
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
CohereConstants,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { CohereClient } = require('cohere-ai');
|
||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
||||
const { fetchEventSource } = require('@waylaidwanderer/fetch-event-source');
|
||||
const { createCoherePayload } = require('./llm');
|
||||
const { Agent, ProxyAgent } = require('undici');
|
||||
const BaseClient = require('./BaseClient');
|
||||
const { logger } = require('~/config');
|
||||
@@ -147,7 +150,8 @@ class ChatGPTClient extends BaseClient {
|
||||
return tokenizer;
|
||||
}
|
||||
|
||||
async getCompletion(input, onProgress, abortController = null) {
|
||||
/** @type {getCompletion} */
|
||||
async getCompletion(input, onProgress, onTokenProgress, abortController = null) {
|
||||
if (!abortController) {
|
||||
abortController = new AbortController();
|
||||
}
|
||||
@@ -305,6 +309,11 @@ class ChatGPTClient extends BaseClient {
|
||||
});
|
||||
}
|
||||
|
||||
if (baseURL.startsWith(CohereConstants.API_URL)) {
|
||||
const payload = createCoherePayload({ modelOptions });
|
||||
return await this.cohereChatCompletion({ payload, onTokenProgress });
|
||||
}
|
||||
|
||||
if (baseURL.includes('v1') && !baseURL.includes('/completions') && !this.isChatCompletion) {
|
||||
baseURL = baseURL.split('v1')[0] + 'v1/completions';
|
||||
} else if (
|
||||
@@ -408,6 +417,35 @@ class ChatGPTClient extends BaseClient {
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/** @type {cohereChatCompletion} */
|
||||
async cohereChatCompletion({ payload, onTokenProgress }) {
|
||||
const cohere = new CohereClient({
|
||||
token: this.apiKey,
|
||||
environment: this.completionsUrl,
|
||||
});
|
||||
|
||||
if (!payload.stream) {
|
||||
const chatResponse = await cohere.chat(payload);
|
||||
return chatResponse.text;
|
||||
}
|
||||
|
||||
const chatStream = await cohere.chatStream(payload);
|
||||
let reply = '';
|
||||
for await (const message of chatStream) {
|
||||
if (!message) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (message.eventType === 'text-generation' && message.text) {
|
||||
onTokenProgress(message.text);
|
||||
} else if (message.eventType === 'stream-end' && message.response) {
|
||||
reply = message.response.text;
|
||||
}
|
||||
}
|
||||
|
||||
return reply;
|
||||
}
|
||||
|
||||
async generateTitle(userMessage, botMessage) {
|
||||
const instructionsPayload = {
|
||||
role: 'system',
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
const { google } = require('googleapis');
|
||||
const { Agent, ProxyAgent } = require('undici');
|
||||
const { GoogleVertexAI } = require('langchain/llms/googlevertexai');
|
||||
const { ChatVertexAI } = require('@langchain/google-vertexai');
|
||||
const { ChatGoogleGenerativeAI } = require('@langchain/google-genai');
|
||||
const { GoogleGenerativeAI: GenAI } = require('@google/generative-ai');
|
||||
const { GoogleVertexAI } = require('@langchain/community/llms/googlevertexai');
|
||||
const { ChatGoogleVertexAI } = require('langchain/chat_models/googlevertexai');
|
||||
const { AIMessage, HumanMessage, SystemMessage } = require('langchain/schema');
|
||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
||||
@@ -10,6 +12,7 @@ const {
|
||||
getResponseSender,
|
||||
endpointSettings,
|
||||
EModelEndpoint,
|
||||
VisionModes,
|
||||
AuthKeys,
|
||||
} = require('librechat-data-provider');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images');
|
||||
@@ -126,7 +129,7 @@ class GoogleClient extends BaseClient {
|
||||
|
||||
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
|
||||
|
||||
// TODO: as of 12/14/23, only gemini models are "Generative AI" models provided by Google
|
||||
/** @type {boolean} Whether using a "GenerativeAI" Model */
|
||||
this.isGenerativeModel = this.modelOptions.model.includes('gemini');
|
||||
const { isGenerativeModel } = this;
|
||||
this.isChatModel = !isGenerativeModel && this.modelOptions.model.includes('chat');
|
||||
@@ -234,7 +237,7 @@ class GoogleClient extends BaseClient {
|
||||
this.isVisionModel = true;
|
||||
}
|
||||
|
||||
if (this.isVisionModel && !attachments) {
|
||||
if (this.isVisionModel && !attachments && this.modelOptions.model.includes('gemini-pro')) {
|
||||
this.modelOptions.model = 'gemini-pro';
|
||||
this.isVisionModel = false;
|
||||
}
|
||||
@@ -247,6 +250,40 @@ class GoogleClient extends BaseClient {
|
||||
})).bind(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats messages for generative AI
|
||||
* @param {TMessage[]} messages
|
||||
* @returns
|
||||
*/
|
||||
async formatGenerativeMessages(messages) {
|
||||
const formattedMessages = [];
|
||||
const attachments = await this.options.attachments;
|
||||
const latestMessage = { ...messages[messages.length - 1] };
|
||||
const files = await this.addImageURLs(latestMessage, attachments, VisionModes.generative);
|
||||
this.options.attachments = files;
|
||||
messages[messages.length - 1] = latestMessage;
|
||||
|
||||
for (const _message of messages) {
|
||||
const role = _message.isCreatedByUser ? this.userLabel : this.modelLabel;
|
||||
const parts = [];
|
||||
parts.push({ text: _message.text });
|
||||
if (!_message.image_urls?.length) {
|
||||
formattedMessages.push({ role, parts });
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const images of _message.image_urls) {
|
||||
if (images.inlineData) {
|
||||
parts.push({ inlineData: images.inlineData });
|
||||
}
|
||||
}
|
||||
|
||||
formattedMessages.push({ role, parts });
|
||||
}
|
||||
|
||||
return formattedMessages;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Adds image URLs to the message object and returns the files
|
||||
@@ -255,17 +292,23 @@ class GoogleClient extends BaseClient {
|
||||
* @param {MongoFile[]} files
|
||||
* @returns {Promise<MongoFile[]>}
|
||||
*/
|
||||
async addImageURLs(message, attachments) {
|
||||
async addImageURLs(message, attachments, mode = '') {
|
||||
const { files, image_urls } = await encodeAndFormat(
|
||||
this.options.req,
|
||||
attachments,
|
||||
EModelEndpoint.google,
|
||||
mode,
|
||||
);
|
||||
message.image_urls = image_urls.length ? image_urls : undefined;
|
||||
return files;
|
||||
}
|
||||
|
||||
async buildVisionMessages(messages = [], parentMessageId) {
|
||||
/**
|
||||
* Builds the augmented prompt for attachments
|
||||
* TODO: Add File API Support
|
||||
* @param {TMessage[]} messages
|
||||
*/
|
||||
async buildAugmentedPrompt(messages = []) {
|
||||
const attachments = await this.options.attachments;
|
||||
const latestMessage = { ...messages[messages.length - 1] };
|
||||
this.contextHandlers = createContextHandlers(this.options.req, latestMessage.text);
|
||||
@@ -281,6 +324,12 @@ class GoogleClient extends BaseClient {
|
||||
this.augmentedPrompt = await this.contextHandlers.createContext();
|
||||
this.options.promptPrefix = this.augmentedPrompt + this.options.promptPrefix;
|
||||
}
|
||||
}
|
||||
|
||||
async buildVisionMessages(messages = [], parentMessageId) {
|
||||
const attachments = await this.options.attachments;
|
||||
const latestMessage = { ...messages[messages.length - 1] };
|
||||
await this.buildAugmentedPrompt(messages);
|
||||
|
||||
const { prompt } = await this.buildMessagesPrompt(messages, parentMessageId);
|
||||
|
||||
@@ -301,15 +350,26 @@ class GoogleClient extends BaseClient {
|
||||
return { prompt: payload };
|
||||
}
|
||||
|
||||
/** @param {TMessage[]} [messages=[]] */
|
||||
async buildGenerativeMessages(messages = []) {
|
||||
this.userLabel = 'user';
|
||||
this.modelLabel = 'model';
|
||||
const promises = [];
|
||||
promises.push(await this.formatGenerativeMessages(messages));
|
||||
promises.push(this.buildAugmentedPrompt(messages));
|
||||
const [formattedMessages] = await Promise.all(promises);
|
||||
return { prompt: formattedMessages };
|
||||
}
|
||||
|
||||
async buildMessages(messages = [], parentMessageId) {
|
||||
if (!this.isGenerativeModel && !this.project_id) {
|
||||
throw new Error(
|
||||
'[GoogleClient] a Service Account JSON Key is required for PaLM 2 and Codey models (Vertex AI)',
|
||||
);
|
||||
} else if (this.isGenerativeModel && (!this.apiKey || this.apiKey === 'user_provided')) {
|
||||
throw new Error(
|
||||
'[GoogleClient] an API Key is required for Gemini models (Generative Language API)',
|
||||
);
|
||||
}
|
||||
|
||||
if (!this.project_id && this.modelOptions.model.includes('1.5')) {
|
||||
return await this.buildGenerativeMessages(messages);
|
||||
}
|
||||
|
||||
if (this.options.attachments && this.isGenerativeModel) {
|
||||
@@ -526,13 +586,24 @@ class GoogleClient extends BaseClient {
|
||||
}
|
||||
|
||||
createLLM(clientOptions) {
|
||||
if (this.isGenerativeModel) {
|
||||
return new ChatGoogleGenerativeAI({ ...clientOptions, apiKey: this.apiKey });
|
||||
const model = clientOptions.modelName ?? clientOptions.model;
|
||||
if (this.project_id && this.isTextModel) {
|
||||
return new GoogleVertexAI(clientOptions);
|
||||
} else if (this.project_id && this.isChatModel) {
|
||||
return new ChatGoogleVertexAI(clientOptions);
|
||||
} else if (this.project_id) {
|
||||
return new ChatVertexAI(clientOptions);
|
||||
} else if (model.includes('1.5')) {
|
||||
return new GenAI(this.apiKey).getGenerativeModel(
|
||||
{
|
||||
...clientOptions,
|
||||
model,
|
||||
},
|
||||
{ apiVersion: 'v1beta' },
|
||||
);
|
||||
}
|
||||
|
||||
return this.isTextModel
|
||||
? new GoogleVertexAI(clientOptions)
|
||||
: new ChatGoogleVertexAI(clientOptions);
|
||||
return new ChatGoogleGenerativeAI({ ...clientOptions, apiKey: this.apiKey });
|
||||
}
|
||||
|
||||
async getCompletion(_payload, options = {}) {
|
||||
@@ -544,7 +615,7 @@ class GoogleClient extends BaseClient {
|
||||
|
||||
let clientOptions = { ...parameters, maxRetries: 2 };
|
||||
|
||||
if (!this.isGenerativeModel) {
|
||||
if (this.project_id) {
|
||||
clientOptions['authOptions'] = {
|
||||
credentials: {
|
||||
...this.serviceKey,
|
||||
@@ -557,7 +628,7 @@ class GoogleClient extends BaseClient {
|
||||
clientOptions = { ...clientOptions, ...this.modelOptions };
|
||||
}
|
||||
|
||||
if (this.isGenerativeModel) {
|
||||
if (this.isGenerativeModel && !this.project_id) {
|
||||
clientOptions.modelName = clientOptions.model;
|
||||
delete clientOptions.model;
|
||||
}
|
||||
@@ -588,16 +659,46 @@ class GoogleClient extends BaseClient {
|
||||
messages.unshift(new SystemMessage(context));
|
||||
}
|
||||
|
||||
const modelName = clientOptions.modelName ?? clientOptions.model ?? '';
|
||||
if (modelName?.includes('1.5') && !this.project_id) {
|
||||
/** @type {GenerativeModel} */
|
||||
const client = model;
|
||||
const requestOptions = {
|
||||
contents: _payload,
|
||||
};
|
||||
|
||||
if (this.options?.promptPrefix?.length) {
|
||||
requestOptions.systemInstruction = {
|
||||
parts: [
|
||||
{
|
||||
text: this.options.promptPrefix,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const result = await client.generateContentStream(requestOptions);
|
||||
for await (const chunk of result.stream) {
|
||||
const chunkText = chunk.text();
|
||||
this.generateTextStream(chunkText, onProgress, {
|
||||
delay: 12,
|
||||
});
|
||||
reply += chunkText;
|
||||
}
|
||||
return reply;
|
||||
}
|
||||
|
||||
const stream = await model.stream(messages, {
|
||||
signal: abortController.signal,
|
||||
timeout: 7000,
|
||||
});
|
||||
|
||||
for await (const chunk of stream) {
|
||||
await this.generateTextStream(chunk?.content ?? chunk, onProgress, {
|
||||
const chunkText = chunk?.content ?? chunk;
|
||||
this.generateTextStream(chunkText, onProgress, {
|
||||
delay: this.isGenerativeModel ? 12 : 8,
|
||||
});
|
||||
reply += chunk?.content ?? chunk;
|
||||
reply += chunkText;
|
||||
}
|
||||
|
||||
return reply;
|
||||
|
||||
@@ -5,6 +5,7 @@ const {
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
ImageDetailCost,
|
||||
CohereConstants,
|
||||
getResponseSender,
|
||||
validateVisionModel,
|
||||
mapModelToAzureConfig,
|
||||
@@ -16,7 +17,13 @@ const {
|
||||
getModelMaxTokens,
|
||||
genAzureChatCompletion,
|
||||
} = require('~/utils');
|
||||
const { truncateText, formatMessage, createContextHandlers, CUT_OFF_PROMPT } = require('./prompts');
|
||||
const {
|
||||
truncateText,
|
||||
formatMessage,
|
||||
createContextHandlers,
|
||||
CUT_OFF_PROMPT,
|
||||
titleInstruction,
|
||||
} = require('./prompts');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { handleOpenAIErrors } = require('./tools/util');
|
||||
const spendTokens = require('~/models/spendTokens');
|
||||
@@ -39,7 +46,10 @@ class OpenAIClient extends BaseClient {
|
||||
super(apiKey, options);
|
||||
this.ChatGPTClient = new ChatGPTClient();
|
||||
this.buildPrompt = this.ChatGPTClient.buildPrompt.bind(this);
|
||||
/** @type {getCompletion} */
|
||||
this.getCompletion = this.ChatGPTClient.getCompletion.bind(this);
|
||||
/** @type {cohereChatCompletion} */
|
||||
this.cohereChatCompletion = this.ChatGPTClient.cohereChatCompletion.bind(this);
|
||||
this.contextStrategy = options.contextStrategy
|
||||
? options.contextStrategy.toLowerCase()
|
||||
: 'discard';
|
||||
@@ -48,6 +58,9 @@ class OpenAIClient extends BaseClient {
|
||||
this.azure = options.azure || false;
|
||||
this.setOptions(options);
|
||||
this.metadata = {};
|
||||
|
||||
/** @type {string | undefined} - The API Completions URL */
|
||||
this.completionsUrl;
|
||||
}
|
||||
|
||||
// TODO: PluginsClient calls this 3x, unneeded
|
||||
@@ -533,6 +546,7 @@ class OpenAIClient extends BaseClient {
|
||||
return result;
|
||||
}
|
||||
|
||||
/** @type {sendCompletion} */
|
||||
async sendCompletion(payload, opts = {}) {
|
||||
let reply = '';
|
||||
let result = null;
|
||||
@@ -541,7 +555,7 @@ class OpenAIClient extends BaseClient {
|
||||
const invalidBaseUrl = this.completionsUrl && extractBaseURL(this.completionsUrl) === null;
|
||||
const useOldMethod = !!(invalidBaseUrl || !this.isChatCompletion || typeof Bun !== 'undefined');
|
||||
if (typeof opts.onProgress === 'function' && useOldMethod) {
|
||||
await this.getCompletion(
|
||||
const completionResult = await this.getCompletion(
|
||||
payload,
|
||||
(progressMessage) => {
|
||||
if (progressMessage === '[DONE]') {
|
||||
@@ -574,8 +588,13 @@ class OpenAIClient extends BaseClient {
|
||||
opts.onProgress(token);
|
||||
reply += token;
|
||||
},
|
||||
opts.onProgress,
|
||||
opts.abortController || new AbortController(),
|
||||
);
|
||||
|
||||
if (completionResult && typeof completionResult === 'string') {
|
||||
reply = completionResult;
|
||||
}
|
||||
} else if (typeof opts.onProgress === 'function' || this.options.useChatCompletion) {
|
||||
reply = await this.chatCompletion({
|
||||
payload,
|
||||
@@ -586,9 +605,14 @@ class OpenAIClient extends BaseClient {
|
||||
result = await this.getCompletion(
|
||||
payload,
|
||||
null,
|
||||
opts.onProgress,
|
||||
opts.abortController || new AbortController(),
|
||||
);
|
||||
|
||||
if (result && typeof result === 'string') {
|
||||
return result.trim();
|
||||
}
|
||||
|
||||
logger.debug('[OpenAIClient] sendCompletion: result', result);
|
||||
|
||||
if (this.isChatCompletion) {
|
||||
@@ -760,8 +784,7 @@ class OpenAIClient extends BaseClient {
|
||||
const instructionsPayload = [
|
||||
{
|
||||
role: 'system',
|
||||
content: `Detect user language and write in the same language an extremely concise title for this conversation, which you must accurately detect.
|
||||
Write in the detected language. Title in 5 Words or Less. No Punctuation or Quotation. Do not mention the language. All first letters of every word should be capitalized and write the title in User Language only.
|
||||
content: `Please generate ${titleInstruction}
|
||||
|
||||
${convo}
|
||||
|
||||
@@ -769,10 +792,18 @@ ${convo}
|
||||
},
|
||||
];
|
||||
|
||||
const promptTokens = this.getTokenCountForMessage(instructionsPayload[0]);
|
||||
|
||||
try {
|
||||
let useChatCompletion = true;
|
||||
if (this.options.reverseProxyUrl === CohereConstants.API_URL) {
|
||||
useChatCompletion = false;
|
||||
}
|
||||
title = (
|
||||
await this.sendPayload(instructionsPayload, { modelOptions, useChatCompletion: true })
|
||||
await this.sendPayload(instructionsPayload, { modelOptions, useChatCompletion })
|
||||
).replaceAll('"', '');
|
||||
const completionTokens = this.getTokenCount(title);
|
||||
this.recordTokenUsage({ promptTokens, completionTokens, context: 'title' });
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
'[OpenAIClient] There was an issue generating the title with the completion method',
|
||||
@@ -924,12 +955,12 @@ ${convo}
|
||||
}
|
||||
}
|
||||
|
||||
async recordTokenUsage({ promptTokens, completionTokens }) {
|
||||
async recordTokenUsage({ promptTokens, completionTokens, context = 'message' }) {
|
||||
await spendTokens(
|
||||
{
|
||||
context,
|
||||
user: this.user,
|
||||
model: this.modelOptions.model,
|
||||
context: 'message',
|
||||
conversationId: this.conversationId,
|
||||
endpointTokenConfig: this.options.endpointTokenConfig,
|
||||
},
|
||||
|
||||
@@ -244,7 +244,7 @@ class PluginsClient extends OpenAIClient {
|
||||
this.setOptions(opts);
|
||||
return super.sendMessage(message, opts);
|
||||
}
|
||||
logger.debug('[PluginsClient] sendMessage', { message, opts });
|
||||
logger.debug('[PluginsClient] sendMessage', { userMessageText: message, opts });
|
||||
const {
|
||||
user,
|
||||
isEdited,
|
||||
|
||||
85
api/app/clients/llm/createCoherePayload.js
Normal file
85
api/app/clients/llm/createCoherePayload.js
Normal file
@@ -0,0 +1,85 @@
|
||||
const { CohereConstants } = require('librechat-data-provider');
|
||||
const { titleInstruction } = require('../prompts/titlePrompts');
|
||||
|
||||
// Mapping OpenAI roles to Cohere roles
|
||||
const roleMap = {
|
||||
user: CohereConstants.ROLE_USER,
|
||||
assistant: CohereConstants.ROLE_CHATBOT,
|
||||
system: CohereConstants.ROLE_SYSTEM, // Recognize and map the system role explicitly
|
||||
};
|
||||
|
||||
/**
|
||||
* Adjusts an OpenAI ChatCompletionPayload to conform with Cohere's expected chat payload format.
|
||||
* Now includes handling for "system" roles explicitly mentioned.
|
||||
*
|
||||
* @param {Object} options - Object containing the model options.
|
||||
* @param {ChatCompletionPayload} options.modelOptions - The OpenAI model payload options.
|
||||
* @returns {CohereChatStreamRequest} Cohere-compatible chat API payload.
|
||||
*/
|
||||
function createCoherePayload({ modelOptions }) {
|
||||
/** @type {string | undefined} */
|
||||
let preamble;
|
||||
let latestUserMessageContent = '';
|
||||
const {
|
||||
stream,
|
||||
stop,
|
||||
top_p,
|
||||
temperature,
|
||||
frequency_penalty,
|
||||
presence_penalty,
|
||||
max_tokens,
|
||||
messages,
|
||||
model,
|
||||
...rest
|
||||
} = modelOptions;
|
||||
|
||||
// Filter out the latest user message and transform remaining messages to Cohere's chat_history format
|
||||
let chatHistory = messages.reduce((acc, message, index, arr) => {
|
||||
const isLastUserMessage = index === arr.length - 1 && message.role === 'user';
|
||||
|
||||
const messageContent =
|
||||
typeof message.content === 'string'
|
||||
? message.content
|
||||
: message.content.map((part) => (part.type === 'text' ? part.text : '')).join(' ');
|
||||
|
||||
if (isLastUserMessage) {
|
||||
latestUserMessageContent = messageContent;
|
||||
} else {
|
||||
acc.push({
|
||||
role: roleMap[message.role] || CohereConstants.ROLE_USER,
|
||||
message: messageContent,
|
||||
});
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
if (
|
||||
chatHistory.length === 1 &&
|
||||
chatHistory[0].role === CohereConstants.ROLE_SYSTEM &&
|
||||
!latestUserMessageContent.length
|
||||
) {
|
||||
const message = chatHistory[0].message;
|
||||
latestUserMessageContent = message.includes(titleInstruction)
|
||||
? CohereConstants.TITLE_MESSAGE
|
||||
: '.';
|
||||
preamble = message;
|
||||
}
|
||||
|
||||
return {
|
||||
message: latestUserMessageContent,
|
||||
model: model,
|
||||
chatHistory,
|
||||
stream: stream ?? false,
|
||||
temperature: temperature,
|
||||
frequencyPenalty: frequency_penalty,
|
||||
presencePenalty: presence_penalty,
|
||||
maxTokens: max_tokens,
|
||||
stopSequences: stop,
|
||||
preamble,
|
||||
p: top_p,
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = createCoherePayload;
|
||||
@@ -1,7 +1,9 @@
|
||||
const createLLM = require('./createLLM');
|
||||
const RunManager = require('./RunManager');
|
||||
const createCoherePayload = require('./createCoherePayload');
|
||||
|
||||
module.exports = {
|
||||
createLLM,
|
||||
RunManager,
|
||||
createCoherePayload,
|
||||
};
|
||||
|
||||
@@ -13,7 +13,7 @@ module.exports = {
|
||||
...handleInputs,
|
||||
...instructions,
|
||||
...titlePrompts,
|
||||
truncateText,
|
||||
...truncateText,
|
||||
createVisionPrompt,
|
||||
createContextHandlers,
|
||||
};
|
||||
|
||||
@@ -27,6 +27,8 @@ ${convo}`,
|
||||
return titlePrompt;
|
||||
};
|
||||
|
||||
const titleInstruction =
|
||||
'a concise, 5-word-or-less title for the conversation, using its same language, with no punctuation. Apply title case conventions appropriate for the language. For English, use AP Stylebook Title Case. Never directly mention the language name or the word "title"';
|
||||
const titleFunctionPrompt = `In this environment you have access to a set of tools you can use to generate the conversation title.
|
||||
|
||||
You may call them like this:
|
||||
@@ -51,7 +53,7 @@ Submit a brief title in the conversation's language, following the parameter des
|
||||
<parameter>
|
||||
<name>title</name>
|
||||
<type>string</type>
|
||||
<description>A concise, 5-word-or-less title for the conversation, using its same language, with no punctuation. Apply title case conventions appropriate for the language. For English, use AP Stylebook Title Case. Never directly mention the language name or the word "title"</description>
|
||||
<description>${titleInstruction}</description>
|
||||
</parameter>
|
||||
</parameters>
|
||||
</tool_description>
|
||||
@@ -80,6 +82,7 @@ function parseTitleFromPrompt(prompt) {
|
||||
|
||||
module.exports = {
|
||||
langPrompt,
|
||||
titleInstruction,
|
||||
createTitlePrompt,
|
||||
titleFunctionPrompt,
|
||||
parseTitleFromPrompt,
|
||||
|
||||
@@ -1,10 +1,40 @@
|
||||
const MAX_CHAR = 255;
|
||||
|
||||
function truncateText(text) {
|
||||
if (text.length > MAX_CHAR) {
|
||||
return `${text.slice(0, MAX_CHAR)}... [text truncated for brevity]`;
|
||||
/**
|
||||
* Truncates a given text to a specified maximum length, appending ellipsis and a notification
|
||||
* if the original text exceeds the maximum length.
|
||||
*
|
||||
* @param {string} text - The text to be truncated.
|
||||
* @param {number} [maxLength=MAX_CHAR] - The maximum length of the text after truncation. Defaults to MAX_CHAR.
|
||||
* @returns {string} The truncated text if the original text length exceeds maxLength, otherwise returns the original text.
|
||||
*/
|
||||
function truncateText(text, maxLength = MAX_CHAR) {
|
||||
if (text.length > maxLength) {
|
||||
return `${text.slice(0, maxLength)}... [text truncated for brevity]`;
|
||||
}
|
||||
return text;
|
||||
}
|
||||
|
||||
module.exports = truncateText;
|
||||
/**
|
||||
* Truncates a given text to a specified maximum length by showing the first half and the last half of the text,
|
||||
* separated by ellipsis. This method ensures the output does not exceed the maximum length, including the addition
|
||||
* of ellipsis and notification if the original text exceeds the maximum length.
|
||||
*
|
||||
* @param {string} text - The text to be truncated.
|
||||
* @param {number} [maxLength=MAX_CHAR] - The maximum length of the output text after truncation. Defaults to MAX_CHAR.
|
||||
* @returns {string} The truncated text showing the first half and the last half, or the original text if it does not exceed maxLength.
|
||||
*/
|
||||
function smartTruncateText(text, maxLength = MAX_CHAR) {
|
||||
const ellipsis = '...';
|
||||
const notification = ' [text truncated for brevity]';
|
||||
const halfMaxLength = Math.floor((maxLength - ellipsis.length - notification.length) / 2);
|
||||
|
||||
if (text.length > maxLength) {
|
||||
const startLastHalf = text.length - halfMaxLength;
|
||||
return `${text.slice(0, halfMaxLength)}${ellipsis}${text.slice(startLastHalf)}${notification}`;
|
||||
}
|
||||
|
||||
return text;
|
||||
}
|
||||
|
||||
module.exports = { truncateText, smartTruncateText };
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
"description": "This is your Google Custom Search Engine ID. For instructions on how to obtain this, see <a href='https://github.com/danny-avila/LibreChat/blob/main/docs/features/plugins/google_search.md'>Our Docs</a>."
|
||||
},
|
||||
{
|
||||
"authField": "GOOGLE_API_KEY",
|
||||
"authField": "GOOGLE_SEARCH_API_KEY",
|
||||
"label": "Google API Key",
|
||||
"description": "This is your Google Custom Search API Key. For instructions on how to obtain this, see <a href='https://github.com/danny-avila/LibreChat/blob/main/docs/features/plugins/google_search.md'>Our Docs</a>."
|
||||
}
|
||||
@@ -60,7 +60,7 @@
|
||||
"name": "CodeSherpa",
|
||||
"pluginKey": "codesherpa_tools",
|
||||
"description": "[Experimental] A REPL for your chat. Requires https://github.com/iamgreggarcia/codesherpa",
|
||||
"icon": "https://github.com/iamgreggarcia/codesherpa/blob/main/localserver/_logo.png",
|
||||
"icon": "https://raw.githubusercontent.com/iamgreggarcia/codesherpa/main/localserver/_logo.png",
|
||||
"authConfig": [
|
||||
{
|
||||
"authField": "CODESHERPA_SERVER_URL",
|
||||
|
||||
@@ -9,7 +9,7 @@ class GoogleSearchResults extends Tool {
|
||||
|
||||
constructor(fields = {}) {
|
||||
super(fields);
|
||||
this.envVarApiKey = 'GOOGLE_API_KEY';
|
||||
this.envVarApiKey = 'GOOGLE_SEARCH_API_KEY';
|
||||
this.envVarSearchEngineId = 'GOOGLE_CSE_ID';
|
||||
this.override = fields.override ?? false;
|
||||
this.apiKey = fields.apiKey ?? getEnvironmentVariable(this.envVarApiKey);
|
||||
|
||||
7
api/cache/banViolation.js
vendored
7
api/cache/banViolation.js
vendored
@@ -1,6 +1,7 @@
|
||||
const Session = require('~/models/Session');
|
||||
const getLogStores = require('./getLogStores');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { isEnabled, math, removePorts } = require('~/server/utils');
|
||||
const getLogStores = require('./getLogStores');
|
||||
const Session = require('~/models/Session');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {};
|
||||
@@ -48,7 +49,7 @@ const banViolation = async (req, res, errorMessage) => {
|
||||
await Session.deleteAllUserSessions(user_id);
|
||||
res.clearCookie('refreshToken');
|
||||
|
||||
const banLogs = getLogStores('ban');
|
||||
const banLogs = getLogStores(ViolationTypes.BAN);
|
||||
const duration = errorMessage.duration || banLogs.opts.ttl;
|
||||
|
||||
if (duration <= 0) {
|
||||
|
||||
3
api/cache/banViolation.spec.js
vendored
3
api/cache/banViolation.spec.js
vendored
@@ -6,6 +6,7 @@ jest.mock('../models/Session');
|
||||
jest.mock('./getLogStores', () => {
|
||||
return jest.fn().mockImplementation(() => {
|
||||
const EventEmitter = require('events');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const math = require('../server/utils/math');
|
||||
const mockGet = jest.fn();
|
||||
const mockSet = jest.fn();
|
||||
@@ -33,7 +34,7 @@ jest.mock('./getLogStores', () => {
|
||||
}
|
||||
|
||||
return new KeyvMongo('', {
|
||||
namespace: 'bans',
|
||||
namespace: CacheKeys.BANS,
|
||||
ttl: math(process.env.BAN_DURATION, 7200000),
|
||||
});
|
||||
});
|
||||
|
||||
12
api/cache/getLogStores.js
vendored
12
api/cache/getLogStores.js
vendored
@@ -6,6 +6,7 @@ const keyvRedis = require('./keyvRedis');
|
||||
const keyvMongo = require('./keyvMongo');
|
||||
|
||||
const { BAN_DURATION, USE_REDIS } = process.env ?? {};
|
||||
const THIRTY_MINUTES = 1800000;
|
||||
|
||||
const duration = math(BAN_DURATION, 7200000);
|
||||
|
||||
@@ -24,8 +25,8 @@ const config = isEnabled(USE_REDIS)
|
||||
: new Keyv({ namespace: CacheKeys.CONFIG_STORE });
|
||||
|
||||
const tokenConfig = isEnabled(USE_REDIS) // ttl: 30 minutes
|
||||
? new Keyv({ store: keyvRedis, ttl: 1800000 })
|
||||
: new Keyv({ namespace: CacheKeys.TOKEN_CONFIG, ttl: 1800000 });
|
||||
? new Keyv({ store: keyvRedis, ttl: THIRTY_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.TOKEN_CONFIG, ttl: THIRTY_MINUTES });
|
||||
|
||||
const genTitle = isEnabled(USE_REDIS) // ttl: 2 minutes
|
||||
? new Keyv({ store: keyvRedis, ttl: 120000 })
|
||||
@@ -42,7 +43,12 @@ const abortKeys = isEnabled(USE_REDIS)
|
||||
const namespaces = {
|
||||
[CacheKeys.CONFIG_STORE]: config,
|
||||
pending_req,
|
||||
ban: new Keyv({ store: keyvMongo, namespace: 'bans', ttl: duration }),
|
||||
[ViolationTypes.BAN]: new Keyv({ store: keyvMongo, namespace: CacheKeys.BANS, ttl: duration }),
|
||||
[CacheKeys.ENCODED_DOMAINS]: new Keyv({
|
||||
store: keyvMongo,
|
||||
namespace: CacheKeys.ENCODED_DOMAINS,
|
||||
ttl: 0,
|
||||
}),
|
||||
general: new Keyv({ store: logFile, namespace: 'violations' }),
|
||||
concurrent: createViolationInstance('concurrent'),
|
||||
non_browser: createViolationInstance('non_browser'),
|
||||
|
||||
@@ -6,6 +6,8 @@ module.exports = {
|
||||
clientPath: path.resolve(__dirname, '..', '..', 'client'),
|
||||
dist: path.resolve(__dirname, '..', '..', 'client', 'dist'),
|
||||
publicPath: path.resolve(__dirname, '..', '..', 'client', 'public'),
|
||||
fonts: path.resolve(__dirname, '..', '..', 'client', 'public', 'fonts'),
|
||||
assets: path.resolve(__dirname, '..', '..', 'client', 'public', 'assets'),
|
||||
imageOutput: path.resolve(__dirname, '..', '..', 'client', 'public', 'images'),
|
||||
structuredTools: path.resolve(__dirname, '..', 'app', 'clients', 'tools', 'structured'),
|
||||
pluginManifest: path.resolve(__dirname, '..', 'app', 'clients', 'tools', 'manifest.json'),
|
||||
|
||||
@@ -5,19 +5,18 @@ const Action = mongoose.model('action', actionSchema);
|
||||
|
||||
/**
|
||||
* Update an action with new data without overwriting existing properties,
|
||||
* or create a new action if it doesn't exist.
|
||||
* or create a new action if it doesn't exist, within a transaction session if provided.
|
||||
*
|
||||
* @param {Object} searchParams - The search parameters to find the action to update.
|
||||
* @param {string} searchParams.action_id - The ID of the action to update.
|
||||
* @param {string} searchParams.user - The user ID of the action's author.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @param {mongoose.ClientSession} [session] - The transaction session to use.
|
||||
* @returns {Promise<Object>} The updated or newly created action document as a plain object.
|
||||
*/
|
||||
const updateAction = async (searchParams, updateData) => {
|
||||
return await Action.findOneAndUpdate(searchParams, updateData, {
|
||||
new: true,
|
||||
upsert: true,
|
||||
}).lean();
|
||||
const updateAction = async (searchParams, updateData, session = null) => {
|
||||
const options = { new: true, upsert: true, session };
|
||||
return await Action.findOneAndUpdate(searchParams, updateData, options).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -50,15 +49,17 @@ const getActions = async (searchParams, includeSensitive = false) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes an action by its ID.
|
||||
* Deletes an action by params, within a transaction session if provided.
|
||||
*
|
||||
* @param {Object} searchParams - The search parameters to find the action to update.
|
||||
* @param {string} searchParams.action_id - The ID of the action to update.
|
||||
* @param {Object} searchParams - The search parameters to find the action to delete.
|
||||
* @param {string} searchParams.action_id - The ID of the action to delete.
|
||||
* @param {string} searchParams.user - The user ID of the action's author.
|
||||
* @param {mongoose.ClientSession} [session] - The transaction session to use (optional).
|
||||
* @returns {Promise<Object>} A promise that resolves to the deleted action document as a plain object, or null if no document was found.
|
||||
*/
|
||||
const deleteAction = async (searchParams) => {
|
||||
return await Action.findOneAndDelete(searchParams).lean();
|
||||
const deleteAction = async (searchParams, session = null) => {
|
||||
const options = session ? { session } : {};
|
||||
return await Action.findOneAndDelete(searchParams, options).lean();
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
|
||||
@@ -5,19 +5,18 @@ const Assistant = mongoose.model('assistant', assistantSchema);
|
||||
|
||||
/**
|
||||
* Update an assistant with new data without overwriting existing properties,
|
||||
* or create a new assistant if it doesn't exist.
|
||||
* or create a new assistant if it doesn't exist, within a transaction session if provided.
|
||||
*
|
||||
* @param {Object} searchParams - The search parameters to find the assistant to update.
|
||||
* @param {string} searchParams.assistant_id - The ID of the assistant to update.
|
||||
* @param {string} searchParams.user - The user ID of the assistant's author.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @param {mongoose.ClientSession} [session] - The transaction session to use (optional).
|
||||
* @returns {Promise<Object>} The updated or newly created assistant document as a plain object.
|
||||
*/
|
||||
const updateAssistant = async (searchParams, updateData) => {
|
||||
return await Assistant.findOneAndUpdate(searchParams, updateData, {
|
||||
new: true,
|
||||
upsert: true,
|
||||
}).lean();
|
||||
const updateAssistant = async (searchParams, updateData, session = null) => {
|
||||
const options = { new: true, upsert: true, session };
|
||||
return await Assistant.findOneAndUpdate(searchParams, updateData, options).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -12,7 +12,7 @@ transactionSchema.methods.calculateTokenValue = function () {
|
||||
this.tokenValue = this.rawAmount;
|
||||
}
|
||||
const { valueKey, tokenType, model, endpointTokenConfig } = this;
|
||||
const multiplier = getMultiplier({ valueKey, tokenType, model, endpointTokenConfig });
|
||||
const multiplier = Math.abs(getMultiplier({ valueKey, tokenType, model, endpointTokenConfig }));
|
||||
this.rate = multiplier;
|
||||
this.tokenValue = this.rawAmount * multiplier;
|
||||
if (this.context && this.tokenType === 'completion' && this.context === 'incomplete') {
|
||||
@@ -36,18 +36,24 @@ transactionSchema.statics.create = async function (transactionData) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Adjust the user's balance
|
||||
const updatedBalance = await Balance.findOneAndUpdate(
|
||||
let balance = await Balance.findOne({ user: transaction.user }).lean();
|
||||
let incrementValue = transaction.tokenValue;
|
||||
|
||||
if (balance && balance?.tokenCredits + incrementValue < 0) {
|
||||
incrementValue = -balance.tokenCredits;
|
||||
}
|
||||
|
||||
balance = await Balance.findOneAndUpdate(
|
||||
{ user: transaction.user },
|
||||
{ $inc: { tokenCredits: transaction.tokenValue } },
|
||||
{ $inc: { tokenCredits: incrementValue } },
|
||||
{ upsert: true, new: true },
|
||||
).lean();
|
||||
|
||||
return {
|
||||
rate: transaction.rate,
|
||||
user: transaction.user.toString(),
|
||||
balance: updatedBalance.tokenCredits,
|
||||
[transaction.tokenType]: transaction.tokenValue,
|
||||
balance: balance.tokenCredits,
|
||||
[transaction.tokenType]: incrementValue,
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -99,4 +99,6 @@ const fileSchema = mongoose.Schema(
|
||||
},
|
||||
);
|
||||
|
||||
fileSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
|
||||
module.exports = fileSchema;
|
||||
|
||||
@@ -54,7 +54,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
||||
prompt &&
|
||||
completion &&
|
||||
logger.debug('[spendTokens] Transaction data record against balance:', {
|
||||
user: prompt.user,
|
||||
user: txData.user,
|
||||
prompt: prompt.prompt,
|
||||
promptRate: prompt.rate,
|
||||
completion: completion.completion,
|
||||
|
||||
@@ -3,6 +3,7 @@ const defaultRate = 6;
|
||||
|
||||
/**
|
||||
* Mapping of model token sizes to their respective multipliers for prompt and completion.
|
||||
* The rates are 1 USD per 1M tokens.
|
||||
* @type {Object.<string, {prompt: number, completion: number}>}
|
||||
*/
|
||||
const tokenValues = {
|
||||
@@ -19,6 +20,15 @@ const tokenValues = {
|
||||
'claude-2.1': { prompt: 8, completion: 24 },
|
||||
'claude-2': { prompt: 8, completion: 24 },
|
||||
'claude-': { prompt: 0.8, completion: 2.4 },
|
||||
'command-r-plus': { prompt: 3, completion: 15 },
|
||||
'command-r': { prompt: 0.5, completion: 1.5 },
|
||||
/* cohere doesn't have rates for the older command models,
|
||||
so this was from https://artificialanalysis.ai/models/command-light/providers */
|
||||
command: { prompt: 0.38, completion: 0.38 },
|
||||
// 'gemini-1.5': { prompt: 7, completion: 21 }, // May 2nd, 2024 pricing
|
||||
// 'gemini': { prompt: 0.5, completion: 1.5 }, // May 2nd, 2024 pricing
|
||||
'gemini-1.5': { prompt: 0, completion: 0 }, // currently free
|
||||
gemini: { prompt: 0, completion: 0 }, // currently free
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -42,6 +52,8 @@ const getValueKey = (model, endpoint) => {
|
||||
return 'gpt-3.5-turbo-1106';
|
||||
} else if (modelName.includes('gpt-3.5')) {
|
||||
return '4k';
|
||||
} else if (modelName.includes('gpt-4-vision')) {
|
||||
return 'gpt-4-1106';
|
||||
} else if (modelName.includes('gpt-4-1106')) {
|
||||
return 'gpt-4-1106';
|
||||
} else if (modelName.includes('gpt-4-0125')) {
|
||||
|
||||
@@ -34,6 +34,13 @@ describe('getValueKey', () => {
|
||||
expect(getValueKey('openai/gpt-4-1106')).toBe('gpt-4-1106');
|
||||
expect(getValueKey('gpt-4-1106/openai/')).toBe('gpt-4-1106');
|
||||
});
|
||||
|
||||
it('should return "gpt-4-1106" for model type of "gpt-4-1106"', () => {
|
||||
expect(getValueKey('gpt-4-vision-preview')).toBe('gpt-4-1106');
|
||||
expect(getValueKey('openai/gpt-4-1106')).toBe('gpt-4-1106');
|
||||
expect(getValueKey('gpt-4-turbo')).toBe('gpt-4-1106');
|
||||
expect(getValueKey('gpt-4-0125')).toBe('gpt-4-1106');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMultiplier', () => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "0.7.0",
|
||||
"version": "0.7.1",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
@@ -35,14 +35,16 @@
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.16.1",
|
||||
"@azure/search-documents": "^12.0.0",
|
||||
"@google/generative-ai": "^0.5.0",
|
||||
"@keyv/mongo": "^2.1.8",
|
||||
"@keyv/redis": "^2.8.1",
|
||||
"@langchain/community": "^0.0.17",
|
||||
"@langchain/google-genai": "^0.0.8",
|
||||
"@langchain/community": "^0.0.46",
|
||||
"@langchain/google-genai": "^0.0.11",
|
||||
"@langchain/google-vertexai": "^0.0.5",
|
||||
"axios": "^1.3.4",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"cohere-ai": "^6.0.0",
|
||||
"cohere-ai": "^7.9.1",
|
||||
"connect-redis": "^7.1.0",
|
||||
"cookie": "^0.5.0",
|
||||
"cors": "^2.8.5",
|
||||
@@ -52,7 +54,7 @@
|
||||
"express-rate-limit": "^6.9.0",
|
||||
"express-session": "^1.17.3",
|
||||
"file-type": "^18.7.0",
|
||||
"firebase": "^10.8.0",
|
||||
"firebase": "^10.6.0",
|
||||
"googleapis": "^126.0.1",
|
||||
"handlebars": "^4.7.7",
|
||||
"html": "^1.0.0",
|
||||
@@ -72,7 +74,7 @@
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"nodejs-gpt": "^1.37.4",
|
||||
"nodemailer": "^6.9.4",
|
||||
"openai": "^4.29.0",
|
||||
"openai": "4.36.0",
|
||||
"openai-chat-tokens": "^0.2.8",
|
||||
"openid-client": "^5.4.2",
|
||||
"passport": "^0.6.0",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const throttle = require('lodash/throttle');
|
||||
const { getResponseSender, Constants } = require('librechat-data-provider');
|
||||
const { getResponseSender, Constants, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||
const { saveMessage, getConvo } = require('~/models');
|
||||
@@ -48,7 +48,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
|
||||
try {
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
const unfinished = endpointOption.endpoint === EModelEndpoint.google ? false : true;
|
||||
const { onProgress: progressCallback, getPartialText } = createOnProgress({
|
||||
onProgress: throttle(
|
||||
({ text: partialText }) => {
|
||||
@@ -59,7 +59,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
parentMessageId: overrideParentMessageId ?? userMessageId,
|
||||
text: partialText,
|
||||
model: client.modelOptions.model,
|
||||
unfinished: true,
|
||||
unfinished,
|
||||
error: false,
|
||||
user,
|
||||
});
|
||||
|
||||
@@ -76,14 +76,14 @@ const refreshController = async (req, res) => {
|
||||
}
|
||||
|
||||
try {
|
||||
let payload;
|
||||
payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
const userId = payload.id;
|
||||
const user = await User.findOne({ _id: userId });
|
||||
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
const user = await User.findOne({ _id: payload.id });
|
||||
if (!user) {
|
||||
return res.status(401).redirect('/login');
|
||||
}
|
||||
|
||||
const userId = payload.id;
|
||||
|
||||
if (process.env.NODE_ENV === 'CI') {
|
||||
const token = await setAuthTokens(userId, res);
|
||||
const userObj = user.toJSON();
|
||||
@@ -118,6 +118,6 @@ module.exports = {
|
||||
getUserController,
|
||||
refreshController,
|
||||
registrationController,
|
||||
resetPasswordRequestController,
|
||||
resetPasswordController,
|
||||
resetPasswordRequestController,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const throttle = require('lodash/throttle');
|
||||
const { getResponseSender } = require('librechat-data-provider');
|
||||
const { getResponseSender, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||
const { saveMessage, getConvo } = require('~/models');
|
||||
@@ -48,6 +48,7 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const unfinished = endpointOption.endpoint === EModelEndpoint.google ? false : true;
|
||||
const { onProgress: progressCallback, getPartialText } = createOnProgress({
|
||||
generation,
|
||||
onProgress: throttle(
|
||||
@@ -59,7 +60,7 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
parentMessageId: overrideParentMessageId ?? userMessageId,
|
||||
text: partialText,
|
||||
model: endpointOption.modelOptions.model,
|
||||
unfinished: true,
|
||||
unfinished,
|
||||
isEdited: true,
|
||||
error: false,
|
||||
user,
|
||||
|
||||
@@ -6,6 +6,7 @@ const axios = require('axios');
|
||||
const express = require('express');
|
||||
const passport = require('passport');
|
||||
const mongoSanitize = require('express-mongo-sanitize');
|
||||
const validateImageRequest = require('./middleware/validateImageRequest');
|
||||
const errorController = require('./controllers/ErrorController');
|
||||
const { jwtLogin, passportLogin } = require('~/strategies');
|
||||
const configureSocialLogins = require('./socialLogins');
|
||||
@@ -43,7 +44,8 @@ const startServer = async () => {
|
||||
app.use(mongoSanitize());
|
||||
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
||||
app.use(express.static(app.locals.paths.dist));
|
||||
app.use(express.static(app.locals.paths.publicPath));
|
||||
app.use(express.static(app.locals.paths.fonts));
|
||||
app.use(express.static(app.locals.paths.assets));
|
||||
app.set('trust proxy', 1); // trust first proxy
|
||||
app.use(cors());
|
||||
|
||||
@@ -82,6 +84,7 @@ const startServer = async () => {
|
||||
app.use('/api/config', routes.config);
|
||||
app.use('/api/assistants', routes.assistants);
|
||||
app.use('/api/files', await routes.files.initialize());
|
||||
app.use('/images/', validateImageRequest, routes.staticRoute);
|
||||
|
||||
app.use((req, res) => {
|
||||
res.status(404).sendFile(path.join(app.locals.paths.dist, 'index.html'));
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { sendMessage, sendError, countTokens, isEnabled } = require('~/server/utils');
|
||||
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
|
||||
const { saveMessage, getConvo, getConvoTitle } = require('~/models');
|
||||
const clearPendingReq = require('~/cache/clearPendingReq');
|
||||
const abortControllers = require('./abortControllers');
|
||||
const { redactMessage } = require('~/config/parsers');
|
||||
const spendTokens = require('~/models/spendTokens');
|
||||
const { abortRun } = require('./abortRun');
|
||||
const { logger } = require('~/config');
|
||||
@@ -100,7 +100,15 @@ const createAbortController = (req, res, getAbortData) => {
|
||||
};
|
||||
|
||||
const handleAbortError = async (res, req, error, data) => {
|
||||
logger.error('[handleAbortError] AI response error; aborting request:', error);
|
||||
if (error?.message?.includes('base64')) {
|
||||
logger.error('[handleAbortError] Error in base64 encoding', {
|
||||
...error,
|
||||
stack: smartTruncateText(error?.stack, 1000),
|
||||
message: truncateText(error.message, 350),
|
||||
});
|
||||
} else {
|
||||
logger.error('[handleAbortError] AI response error; aborting request:', error);
|
||||
}
|
||||
const { sender, conversationId, messageId, parentMessageId, partialText } = data;
|
||||
|
||||
if (error.stack && error.stack.includes('google')) {
|
||||
@@ -109,13 +117,17 @@ const handleAbortError = async (res, req, error, data) => {
|
||||
);
|
||||
}
|
||||
|
||||
const errorText = error?.message?.includes('"type"')
|
||||
? error.message
|
||||
: 'An error occurred while processing your request. Please contact the Admin.';
|
||||
|
||||
const respondWithError = async (partialText) => {
|
||||
let options = {
|
||||
sender,
|
||||
messageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
text: redactMessage(error.message),
|
||||
text: errorText,
|
||||
shouldSaveMessage: true,
|
||||
user: req.user.id,
|
||||
};
|
||||
|
||||
@@ -75,7 +75,6 @@ async function abortRun(req, res) {
|
||||
});
|
||||
|
||||
const finalEvent = {
|
||||
title: 'New Chat',
|
||||
final: true,
|
||||
conversation,
|
||||
runMessages,
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
const Keyv = require('keyv');
|
||||
const uap = require('ua-parser-js');
|
||||
const denyRequest = require('./denyRequest');
|
||||
const { getLogStores } = require('../../cache');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { isEnabled, removePorts } = require('../utils');
|
||||
const keyvRedis = require('../../cache/keyvRedis');
|
||||
const User = require('../../models/User');
|
||||
const keyvRedis = require('~/cache/keyvRedis');
|
||||
const denyRequest = require('./denyRequest');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const User = require('~/models/User');
|
||||
|
||||
const banCache = isEnabled(process.env.USE_REDIS)
|
||||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: 'bans', ttl: 0 });
|
||||
: new Keyv({ namespace: ViolationTypes.BAN, ttl: 0 });
|
||||
const message = 'Your account has been temporarily banned due to violations of our service.';
|
||||
|
||||
/**
|
||||
@@ -28,7 +29,7 @@ const banResponse = async (req, res) => {
|
||||
if (!ua.browser.name) {
|
||||
return res.status(403).json({ message });
|
||||
} else if (baseUrl === '/api/ask' || baseUrl === '/api/edit') {
|
||||
return await denyRequest(req, res, { type: 'ban' });
|
||||
return await denyRequest(req, res, { type: ViolationTypes.BAN });
|
||||
}
|
||||
|
||||
return res.status(403).json({ message });
|
||||
@@ -87,7 +88,7 @@ const checkBan = async (req, res, next = () => {}) => {
|
||||
return await banResponse(req, res);
|
||||
}
|
||||
|
||||
const banLogs = getLogStores('ban');
|
||||
const banLogs = getLogStores(ViolationTypes.BAN);
|
||||
const duration = banLogs.opts.ttl;
|
||||
|
||||
if (duration <= 0) {
|
||||
|
||||
@@ -14,6 +14,7 @@ const concurrentLimiter = require('./concurrentLimiter');
|
||||
const validateMessageReq = require('./validateMessageReq');
|
||||
const buildEndpointOption = require('./buildEndpointOption');
|
||||
const validateRegistration = require('./validateRegistration');
|
||||
const validateImageRequest = require('./validateImageRequest');
|
||||
const moderateText = require('./moderateText');
|
||||
const noIndex = require('./noIndex');
|
||||
|
||||
@@ -33,6 +34,7 @@ module.exports = {
|
||||
validateMessageReq,
|
||||
buildEndpointOption,
|
||||
validateRegistration,
|
||||
validateImageRequest,
|
||||
validateModel,
|
||||
moderateText,
|
||||
noIndex,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const axios = require('axios');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const denyRequest = require('./denyRequest');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -24,7 +25,7 @@ async function moderateText(req, res, next) {
|
||||
const flagged = results.some((result) => result.flagged);
|
||||
|
||||
if (flagged) {
|
||||
const type = 'moderation';
|
||||
const type = ErrorTypes.MODERATION;
|
||||
const errorMessage = { type };
|
||||
return await denyRequest(req, res, errorMessage);
|
||||
}
|
||||
|
||||
42
api/server/middleware/validateImageRequest.js
Normal file
42
api/server/middleware/validateImageRequest.js
Normal file
@@ -0,0 +1,42 @@
|
||||
const cookies = require('cookie');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Middleware to validate image request.
|
||||
* Must be set by `secureImageLinks` via custom config file.
|
||||
*/
|
||||
function validateImageRequest(req, res, next) {
|
||||
if (!req.app.locals.secureImageLinks) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const refreshToken = req.headers.cookie ? cookies.parse(req.headers.cookie).refreshToken : null;
|
||||
if (!refreshToken) {
|
||||
logger.warn('[validateImageRequest] Refresh token not provided');
|
||||
return res.status(401).send('Unauthorized');
|
||||
}
|
||||
|
||||
let payload;
|
||||
try {
|
||||
payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
} catch (err) {
|
||||
logger.warn('[validateImageRequest]', err);
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
const currentTimeInSeconds = Math.floor(Date.now() / 1000);
|
||||
if (payload.exp < currentTimeInSeconds) {
|
||||
logger.warn('[validateImageRequest] Refresh token expired');
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
if (req.path.includes(payload.id)) {
|
||||
logger.debug('[validateImageRequest] Image request validated');
|
||||
next();
|
||||
} else {
|
||||
res.status(403).send('Access Denied');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = validateImageRequest;
|
||||
@@ -1,10 +1,11 @@
|
||||
const { v4 } = require('uuid');
|
||||
const express = require('express');
|
||||
const { actionDelimiter } = require('librechat-data-provider');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { encryptMetadata, domainParser } = require('~/server/services/ActionService');
|
||||
const { actionDelimiter, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { updateAction, getActions, deleteAction } = require('~/models/Action');
|
||||
const { updateAssistant, getAssistant } = require('~/models/Assistant');
|
||||
const { withSession } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
@@ -46,7 +47,7 @@ router.post('/:assistant_id', async (req, res) => {
|
||||
|
||||
let { domain } = metadata;
|
||||
/* Azure doesn't support periods in function names */
|
||||
domain = domainParser(req, domain, true);
|
||||
domain = await domainParser(req, domain, true);
|
||||
|
||||
if (!domain) {
|
||||
return res.status(400).json({ message: 'No domain provided' });
|
||||
@@ -110,7 +111,8 @@ router.post('/:assistant_id', async (req, res) => {
|
||||
|
||||
const promises = [];
|
||||
promises.push(
|
||||
updateAssistant(
|
||||
withSession(
|
||||
updateAssistant,
|
||||
{ assistant_id },
|
||||
{
|
||||
actions,
|
||||
@@ -119,7 +121,9 @@ router.post('/:assistant_id', async (req, res) => {
|
||||
),
|
||||
);
|
||||
promises.push(openai.beta.assistants.update(assistant_id, { tools }));
|
||||
promises.push(updateAction({ action_id }, { metadata, assistant_id, user: req.user.id }));
|
||||
promises.push(
|
||||
withSession(updateAction, { action_id }, { metadata, assistant_id, user: req.user.id }),
|
||||
);
|
||||
|
||||
/** @type {[AssistantDocument, Assistant, Action]} */
|
||||
const resolved = await Promise.all(promises);
|
||||
@@ -129,6 +133,15 @@ router.post('/:assistant_id', async (req, res) => {
|
||||
delete resolved[2].metadata[field];
|
||||
}
|
||||
}
|
||||
|
||||
/* Map Azure OpenAI model to the assistant as defined by config */
|
||||
if (req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||
resolved[1] = {
|
||||
...resolved[1],
|
||||
model: req.body.model,
|
||||
};
|
||||
}
|
||||
|
||||
res.json(resolved);
|
||||
} catch (error) {
|
||||
const message = 'Trouble updating the Assistant Action';
|
||||
@@ -171,7 +184,7 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => {
|
||||
return true;
|
||||
});
|
||||
|
||||
domain = domainParser(req, domain, true);
|
||||
domain = await domainParser(req, domain, true);
|
||||
|
||||
const updatedTools = tools.filter(
|
||||
(tool) => !(tool.function && tool.function.name.includes(domain)),
|
||||
@@ -179,7 +192,8 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => {
|
||||
|
||||
const promises = [];
|
||||
promises.push(
|
||||
updateAssistant(
|
||||
withSession(
|
||||
updateAssistant,
|
||||
{ assistant_id },
|
||||
{
|
||||
actions: updatedActions,
|
||||
@@ -188,7 +202,7 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => {
|
||||
),
|
||||
);
|
||||
promises.push(openai.beta.assistants.update(assistant_id, { tools: updatedTools }));
|
||||
promises.push(deleteAction({ action_id }));
|
||||
promises.push(withSession(deleteAction, { action_id }));
|
||||
|
||||
await Promise.all(promises);
|
||||
res.status(200).json({ message: 'Action deleted successfully' });
|
||||
|
||||
@@ -213,7 +213,13 @@ router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) =>
|
||||
/** @type {{ openai: OpenAI }} */
|
||||
const { openai } = await initializeClient({ req, res });
|
||||
|
||||
const image = await uploadImageBuffer({ req, context: FileContext.avatar });
|
||||
const image = await uploadImageBuffer({
|
||||
req,
|
||||
context: FileContext.avatar,
|
||||
metadata: {
|
||||
buffer: req.file.buffer,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
_metadata = JSON.parse(_metadata);
|
||||
|
||||
@@ -247,7 +247,6 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||
}
|
||||
|
||||
finalEvent = {
|
||||
title: 'New Chat',
|
||||
final: true,
|
||||
conversation: await getConvo(req.user.id, conversationId),
|
||||
runMessages,
|
||||
@@ -477,7 +476,6 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||
|
||||
conversation = {
|
||||
conversationId,
|
||||
title: 'New Chat',
|
||||
endpoint: EModelEndpoint.assistants,
|
||||
promptPrefix: promptPrefix,
|
||||
instructions: instructions,
|
||||
@@ -607,7 +605,6 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||
};
|
||||
|
||||
sendMessage(res, {
|
||||
title: 'New Chat',
|
||||
final: true,
|
||||
conversation,
|
||||
requestMessage: {
|
||||
|
||||
@@ -18,13 +18,15 @@ router.post('/', upload.single('input'), async (req, res) => {
|
||||
}
|
||||
|
||||
const fileStrategy = req.app.locals.fileStrategy;
|
||||
const webPBuffer = await resizeAvatar({
|
||||
const desiredFormat = req.app.locals.imageOutputType;
|
||||
const resizedBuffer = await resizeAvatar({
|
||||
userId,
|
||||
input,
|
||||
desiredFormat,
|
||||
});
|
||||
|
||||
const { processAvatar } = getStrategyFunctions(fileStrategy);
|
||||
const url = await processAvatar({ buffer: webPBuffer, userId, manual });
|
||||
const url = await processAvatar({ buffer: resizedBuffer, userId, manual });
|
||||
|
||||
res.json({ url });
|
||||
} catch (error) {
|
||||
|
||||
@@ -66,17 +66,16 @@ router.delete('/', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/download/:userId/:filepath', async (req, res) => {
|
||||
router.get('/download/:userId/:file_id', async (req, res) => {
|
||||
try {
|
||||
const { userId, filepath } = req.params;
|
||||
const { userId, file_id } = req.params;
|
||||
logger.debug(`File download requested by user ${userId}: ${file_id}`);
|
||||
|
||||
if (userId !== req.user.id) {
|
||||
logger.warn(`${errorPrefix} forbidden: ${file_id}`);
|
||||
return res.status(403).send('Forbidden');
|
||||
}
|
||||
|
||||
const parts = filepath.split('/');
|
||||
const file_id = parts[2];
|
||||
const [file] = await getFiles({ file_id });
|
||||
const errorPrefix = `File download requested by user ${userId}`;
|
||||
|
||||
@@ -114,8 +113,10 @@ router.get('/download/:userId/:filepath', async (req, res) => {
|
||||
if (file.source === FileSources.openai) {
|
||||
req.body = { model: file.model };
|
||||
const { openai } = await initializeClient({ req, res });
|
||||
logger.debug(`Downloading file ${file_id} from OpenAI`);
|
||||
passThrough = await getDownloadStream(file_id, openai);
|
||||
setHeaders();
|
||||
logger.debug(`File ${file_id} downloaded from OpenAI`);
|
||||
passThrough.body.pipe(res);
|
||||
} else {
|
||||
fileStream = getDownloadStream(file_id);
|
||||
|
||||
@@ -17,6 +17,7 @@ const user = require('./user');
|
||||
const config = require('./config');
|
||||
const assistants = require('./assistants');
|
||||
const files = require('./files');
|
||||
const staticRoute = require('./static');
|
||||
|
||||
module.exports = {
|
||||
search,
|
||||
@@ -38,4 +39,5 @@ module.exports = {
|
||||
config,
|
||||
assistants,
|
||||
files,
|
||||
staticRoute,
|
||||
};
|
||||
|
||||
7
api/server/routes/static.js
Normal file
7
api/server/routes/static.js
Normal file
@@ -0,0 +1,7 @@
|
||||
const express = require('express');
|
||||
const paths = require('~/config/paths');
|
||||
|
||||
const router = express.Router();
|
||||
router.use(express.static(paths.imageOutput));
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,20 +1,27 @@
|
||||
const { AuthTypeEnum, EModelEndpoint, actionDomainSeparator } = require('librechat-data-provider');
|
||||
const {
|
||||
AuthTypeEnum,
|
||||
EModelEndpoint,
|
||||
actionDomainSeparator,
|
||||
CacheKeys,
|
||||
Constants,
|
||||
} = require('librechat-data-provider');
|
||||
const { encryptV2, decryptV2 } = require('~/server/utils/crypto');
|
||||
const { getActions } = require('~/models/Action');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Parses the domain for an action.
|
||||
* Encodes or decodes a domain name to/from base64, or replacing periods with a custom separator.
|
||||
*
|
||||
* Azure OpenAI Assistants API doesn't support periods in function
|
||||
* names due to `[a-zA-Z0-9_-]*` Regex Validation.
|
||||
* Necessary because Azure OpenAI Assistants API doesn't support periods in function
|
||||
* names due to `[a-zA-Z0-9_-]*` Regex Validation, limited to a 64-character maximum.
|
||||
*
|
||||
* @param {Express.Request} req - Express Request object
|
||||
* @param {string} domain - The domain for the actoin
|
||||
* @param {boolean} inverse - If true, replaces periods with `actionDomainSeparator`
|
||||
* @returns {string} The parsed domain
|
||||
* @param {Express.Request} req - The Express Request object.
|
||||
* @param {string} domain - The domain name to encode/decode.
|
||||
* @param {boolean} inverse - False to decode from base64, true to encode to base64.
|
||||
* @returns {Promise<string>} Encoded or decoded domain string.
|
||||
*/
|
||||
function domainParser(req, domain, inverse = false) {
|
||||
async function domainParser(req, domain, inverse = false) {
|
||||
if (!domain) {
|
||||
return;
|
||||
}
|
||||
@@ -23,11 +30,35 @@ function domainParser(req, domain, inverse = false) {
|
||||
return domain;
|
||||
}
|
||||
|
||||
if (inverse) {
|
||||
const domainsCache = getLogStores(CacheKeys.ENCODED_DOMAINS);
|
||||
const cachedDomain = await domainsCache.get(domain);
|
||||
if (inverse && cachedDomain) {
|
||||
return domain;
|
||||
}
|
||||
|
||||
if (inverse && domain.length <= Constants.ENCODED_DOMAIN_LENGTH) {
|
||||
return domain.replace(/\./g, actionDomainSeparator);
|
||||
}
|
||||
|
||||
return domain.replace(actionDomainSeparator, '.');
|
||||
if (inverse) {
|
||||
const modifiedDomain = Buffer.from(domain).toString('base64');
|
||||
const key = modifiedDomain.substring(0, Constants.ENCODED_DOMAIN_LENGTH);
|
||||
await domainsCache.set(key, modifiedDomain);
|
||||
return key;
|
||||
}
|
||||
|
||||
const replaceSeparatorRegex = new RegExp(actionDomainSeparator, 'g');
|
||||
|
||||
if (!cachedDomain) {
|
||||
return domain.replace(replaceSeparatorRegex, '.');
|
||||
}
|
||||
|
||||
try {
|
||||
return Buffer.from(cachedDomain, 'base64').toString('utf-8');
|
||||
} catch (error) {
|
||||
logger.error(`Failed to parse domain (possibly not base64): ${domain}`, error);
|
||||
return domain;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
196
api/server/services/ActionService.spec.js
Normal file
196
api/server/services/ActionService.spec.js
Normal file
@@ -0,0 +1,196 @@
|
||||
const { Constants, EModelEndpoint, actionDomainSeparator } = require('librechat-data-provider');
|
||||
const { domainParser } = require('./ActionService');
|
||||
|
||||
jest.mock('keyv');
|
||||
|
||||
const globalCache = {};
|
||||
jest.mock('~/cache/getLogStores', () => {
|
||||
return jest.fn().mockImplementation(() => {
|
||||
const EventEmitter = require('events');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
|
||||
class KeyvMongo extends EventEmitter {
|
||||
constructor(url = 'mongodb://127.0.0.1:27017', options) {
|
||||
super();
|
||||
this.ttlSupport = false;
|
||||
url = url ?? {};
|
||||
if (typeof url === 'string') {
|
||||
url = { url };
|
||||
}
|
||||
if (url.uri) {
|
||||
url = { url: url.uri, ...url };
|
||||
}
|
||||
this.opts = {
|
||||
url,
|
||||
collection: 'keyv',
|
||||
...url,
|
||||
...options,
|
||||
};
|
||||
}
|
||||
|
||||
get = async (key) => {
|
||||
return new Promise((resolve) => {
|
||||
resolve(globalCache[key] || null);
|
||||
});
|
||||
};
|
||||
|
||||
set = async (key, value) => {
|
||||
return new Promise((resolve) => {
|
||||
globalCache[key] = value;
|
||||
resolve(true);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
return new KeyvMongo('', {
|
||||
namespace: CacheKeys.ENCODED_DOMAINS,
|
||||
ttl: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('domainParser', () => {
|
||||
const req = {
|
||||
app: {
|
||||
locals: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
assistants: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const reqNoAzure = {
|
||||
app: {
|
||||
locals: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
assistants: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const TLD = '.com';
|
||||
|
||||
// Non-azure request
|
||||
it('returns domain as is if not azure', async () => {
|
||||
const domain = `example.com${actionDomainSeparator}test${actionDomainSeparator}`;
|
||||
const result1 = await domainParser(reqNoAzure, domain, false);
|
||||
const result2 = await domainParser(reqNoAzure, domain, true);
|
||||
expect(result1).toEqual(domain);
|
||||
expect(result2).toEqual(domain);
|
||||
});
|
||||
|
||||
// Test for Empty or Null Inputs
|
||||
it('returns undefined for null domain input', async () => {
|
||||
const result = await domainParser(req, null, true);
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it('returns undefined for empty domain input', async () => {
|
||||
const result = await domainParser(req, '', true);
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
// Verify Correct Caching Behavior
|
||||
it('caches encoded domain correctly', async () => {
|
||||
const domain = 'longdomainname.com';
|
||||
const encodedDomain = Buffer.from(domain)
|
||||
.toString('base64')
|
||||
.substring(0, Constants.ENCODED_DOMAIN_LENGTH);
|
||||
|
||||
await domainParser(req, domain, true);
|
||||
|
||||
const cachedValue = await globalCache[encodedDomain];
|
||||
expect(cachedValue).toEqual(Buffer.from(domain).toString('base64'));
|
||||
});
|
||||
|
||||
// Test for Edge Cases Around Length Threshold
|
||||
it('encodes domain exactly at threshold without modification', async () => {
|
||||
const domain = 'a'.repeat(Constants.ENCODED_DOMAIN_LENGTH - TLD.length) + TLD;
|
||||
const expected = domain.replace(/\./g, actionDomainSeparator);
|
||||
const result = await domainParser(req, domain, true);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
it('encodes domain just below threshold without modification', async () => {
|
||||
const domain = 'a'.repeat(Constants.ENCODED_DOMAIN_LENGTH - 1 - TLD.length) + TLD;
|
||||
const expected = domain.replace(/\./g, actionDomainSeparator);
|
||||
const result = await domainParser(req, domain, true);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
// Test for Unicode Domain Names
|
||||
it('handles unicode characters in domain names correctly when encoding', async () => {
|
||||
const unicodeDomain = 'täst.example.com';
|
||||
const encodedDomain = Buffer.from(unicodeDomain)
|
||||
.toString('base64')
|
||||
.substring(0, Constants.ENCODED_DOMAIN_LENGTH);
|
||||
const result = await domainParser(req, unicodeDomain, true);
|
||||
expect(result).toEqual(encodedDomain);
|
||||
});
|
||||
|
||||
it('decodes unicode domain names correctly', async () => {
|
||||
const unicodeDomain = 'täst.example.com';
|
||||
const encodedDomain = Buffer.from(unicodeDomain).toString('base64');
|
||||
globalCache[encodedDomain.substring(0, Constants.ENCODED_DOMAIN_LENGTH)] = encodedDomain; // Simulate caching
|
||||
|
||||
const result = await domainParser(
|
||||
req,
|
||||
encodedDomain.substring(0, Constants.ENCODED_DOMAIN_LENGTH),
|
||||
false,
|
||||
);
|
||||
expect(result).toEqual(unicodeDomain);
|
||||
});
|
||||
|
||||
// Core Functionality Tests
|
||||
it('returns domain with replaced separators if no cached domain exists', async () => {
|
||||
const domain = 'example.com';
|
||||
const withSeparator = domain.replace(/\./g, actionDomainSeparator);
|
||||
const result = await domainParser(req, withSeparator, false);
|
||||
expect(result).toEqual(domain);
|
||||
});
|
||||
|
||||
it('returns domain with replaced separators when inverse is false and under encoding length', async () => {
|
||||
const domain = 'examp.com';
|
||||
const withSeparator = domain.replace(/\./g, actionDomainSeparator);
|
||||
const result = await domainParser(req, withSeparator, false);
|
||||
expect(result).toEqual(domain);
|
||||
});
|
||||
|
||||
it('replaces periods with actionDomainSeparator when inverse is true and under encoding length', async () => {
|
||||
const domain = 'examp.com';
|
||||
const expected = domain.replace(/\./g, actionDomainSeparator);
|
||||
const result = await domainParser(req, domain, true);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
it('encodes domain when length is above threshold and inverse is true', async () => {
|
||||
const domain = 'a'.repeat(Constants.ENCODED_DOMAIN_LENGTH + 1).concat('.com');
|
||||
const result = await domainParser(req, domain, true);
|
||||
expect(result).not.toEqual(domain);
|
||||
expect(result.length).toBeLessThanOrEqual(Constants.ENCODED_DOMAIN_LENGTH);
|
||||
});
|
||||
|
||||
it('returns encoded value if no encoded value is cached, and inverse is false', async () => {
|
||||
const originalDomain = 'example.com';
|
||||
const encodedDomain = Buffer.from(
|
||||
originalDomain.replace(/\./g, actionDomainSeparator),
|
||||
).toString('base64');
|
||||
const result = await domainParser(req, encodedDomain, false);
|
||||
expect(result).toEqual(encodedDomain);
|
||||
});
|
||||
|
||||
it('decodes encoded value if cached and encoded value is provided, and inverse is false', async () => {
|
||||
const originalDomain = 'example.com';
|
||||
const encodedDomain = await domainParser(req, originalDomain, true);
|
||||
const result = await domainParser(req, encodedDomain, false);
|
||||
expect(result).toEqual(originalDomain);
|
||||
});
|
||||
|
||||
it('handles invalid base64 encoded values gracefully', async () => {
|
||||
const invalidBase64Domain = 'not_base64_encoded';
|
||||
const result = await domainParser(req, invalidBase64Domain, false);
|
||||
expect(result).toEqual(invalidBase64Domain);
|
||||
});
|
||||
});
|
||||
@@ -1,21 +1,17 @@
|
||||
const {
|
||||
Constants,
|
||||
FileSources,
|
||||
Capabilities,
|
||||
EModelEndpoint,
|
||||
EImageOutputType,
|
||||
defaultSocialLogins,
|
||||
validateAzureGroups,
|
||||
mapModelToAzureConfig,
|
||||
assistantEndpointSchema,
|
||||
deprecatedAzureVariables,
|
||||
conflictingAzureVariables,
|
||||
} = require('librechat-data-provider');
|
||||
const { checkVariables, checkHealth, checkConfig, checkAzureVariables } = require('./start/checks');
|
||||
const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants');
|
||||
const { initializeFirebase } = require('./Files/Firebase/initialize');
|
||||
const loadCustomConfig = require('./Config/loadCustomConfig');
|
||||
const handleRateLimits = require('./Config/handleRateLimits');
|
||||
const { azureConfigSetup } = require('./start/azureOpenAI');
|
||||
const { loadAndFormatTools } = require('./ToolService');
|
||||
const paths = require('~/config/paths');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -28,8 +24,12 @@ const AppService = async (app) => {
|
||||
const config = (await loadCustomConfig()) ?? {};
|
||||
|
||||
const fileStrategy = config.fileStrategy ?? FileSources.local;
|
||||
const imageOutputType = config?.imageOutputType ?? EImageOutputType.PNG;
|
||||
process.env.CDN_PROVIDER = fileStrategy;
|
||||
|
||||
checkVariables();
|
||||
await checkHealth();
|
||||
|
||||
if (fileStrategy === FileSources.firebase) {
|
||||
initializeFirebase();
|
||||
}
|
||||
@@ -50,130 +50,46 @@ const AppService = async (app) => {
|
||||
|
||||
if (!Object.keys(config).length) {
|
||||
app.locals = {
|
||||
availableTools,
|
||||
paths,
|
||||
fileStrategy,
|
||||
socialLogins,
|
||||
paths,
|
||||
availableTools,
|
||||
imageOutputType,
|
||||
};
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (config.version !== Constants.CONFIG_VERSION) {
|
||||
logger.info(
|
||||
`\nOutdated Config version: ${config.version}. Current version: ${Constants.CONFIG_VERSION}\n\nCheck out the latest config file guide for new options and features.\nhttps://docs.librechat.ai/install/configuration/custom_config.html\n\n`,
|
||||
);
|
||||
}
|
||||
|
||||
checkConfig(config);
|
||||
handleRateLimits(config?.rateLimits);
|
||||
|
||||
const endpointLocals = {};
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.azureOpenAI]) {
|
||||
const { groups, ...azureConfiguration } = config.endpoints[EModelEndpoint.azureOpenAI];
|
||||
const { isValid, modelNames, modelGroupMap, groupMap, errors } = validateAzureGroups(groups);
|
||||
endpointLocals[EModelEndpoint.azureOpenAI] = azureConfigSetup(config);
|
||||
checkAzureVariables();
|
||||
}
|
||||
|
||||
if (!isValid) {
|
||||
const errorString = errors.join('\n');
|
||||
const errorMessage = 'Invalid Azure OpenAI configuration:\n' + errorString;
|
||||
logger.error(errorMessage);
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
const assistantModels = [];
|
||||
const assistantGroups = new Set();
|
||||
for (const modelName of modelNames) {
|
||||
mapModelToAzureConfig({ modelName, modelGroupMap, groupMap });
|
||||
const groupName = modelGroupMap?.[modelName]?.group;
|
||||
const modelGroup = groupMap?.[groupName];
|
||||
let supportsAssistants = modelGroup?.assistants || modelGroup?.[modelName]?.assistants;
|
||||
if (supportsAssistants) {
|
||||
assistantModels.push(modelName);
|
||||
!assistantGroups.has(groupName) && assistantGroups.add(groupName);
|
||||
}
|
||||
}
|
||||
|
||||
if (azureConfiguration.assistants && assistantModels.length === 0) {
|
||||
throw new Error(
|
||||
'No Azure models are configured to support assistants. Please remove the `assistants` field or configure at least one model to support assistants.',
|
||||
);
|
||||
}
|
||||
|
||||
endpointLocals[EModelEndpoint.azureOpenAI] = {
|
||||
modelNames,
|
||||
modelGroupMap,
|
||||
groupMap,
|
||||
assistantModels,
|
||||
assistantGroups: Array.from(assistantGroups),
|
||||
...azureConfiguration,
|
||||
};
|
||||
|
||||
deprecatedAzureVariables.forEach(({ key, description }) => {
|
||||
if (process.env[key]) {
|
||||
logger.warn(
|
||||
`The \`${key}\` environment variable (related to ${description}) should not be used in combination with the \`azureOpenAI\` endpoint configuration, as you will experience conflicts and errors.`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
conflictingAzureVariables.forEach(({ key }) => {
|
||||
if (process.env[key]) {
|
||||
logger.warn(
|
||||
`The \`${key}\` environment variable should not be used in combination with the \`azureOpenAI\` endpoint configuration, as you may experience with the defined placeholders for mapping to the current model grouping using the same name.`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
if (azureConfiguration.assistants) {
|
||||
endpointLocals[EModelEndpoint.assistants] = {
|
||||
// Note: may need to add retrieval models here in the future
|
||||
capabilities: [Capabilities.tools, Capabilities.actions, Capabilities.code_interpreter],
|
||||
};
|
||||
}
|
||||
if (config?.endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||
endpointLocals[EModelEndpoint.assistants] = azureAssistantsDefaults();
|
||||
}
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.assistants]) {
|
||||
const assistantsConfig = config.endpoints[EModelEndpoint.assistants];
|
||||
const parsedConfig = assistantEndpointSchema.parse(assistantsConfig);
|
||||
if (assistantsConfig.supportedIds?.length && assistantsConfig.excludedIds?.length) {
|
||||
logger.warn(
|
||||
`Both \`supportedIds\` and \`excludedIds\` are defined for the ${EModelEndpoint.assistants} endpoint; \`excludedIds\` field will be ignored.`,
|
||||
);
|
||||
}
|
||||
|
||||
const prevConfig = endpointLocals[EModelEndpoint.assistants] ?? {};
|
||||
|
||||
/** @type {Partial<TAssistantEndpoint>} */
|
||||
endpointLocals[EModelEndpoint.assistants] = {
|
||||
...prevConfig,
|
||||
retrievalModels: parsedConfig.retrievalModels,
|
||||
disableBuilder: parsedConfig.disableBuilder,
|
||||
pollIntervalMs: parsedConfig.pollIntervalMs,
|
||||
supportedIds: parsedConfig.supportedIds,
|
||||
capabilities: parsedConfig.capabilities,
|
||||
excludedIds: parsedConfig.excludedIds,
|
||||
timeoutMs: parsedConfig.timeoutMs,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${process.env.RAG_API_URL}/health`);
|
||||
if (response?.ok && response?.status === 200) {
|
||||
logger.info(`RAG API is running and reachable at ${process.env.RAG_API_URL}.`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`RAG API is either not running or not reachable at ${process.env.RAG_API_URL}, you may experience errors with file uploads.`,
|
||||
endpointLocals[EModelEndpoint.assistants] = assistantsConfigSetup(
|
||||
config,
|
||||
endpointLocals[EModelEndpoint.assistants],
|
||||
);
|
||||
}
|
||||
|
||||
app.locals = {
|
||||
socialLogins,
|
||||
availableTools,
|
||||
fileStrategy,
|
||||
fileConfig: config?.fileConfig,
|
||||
interface: config?.interface,
|
||||
paths,
|
||||
socialLogins,
|
||||
fileStrategy,
|
||||
availableTools,
|
||||
imageOutputType,
|
||||
interface: config?.interface,
|
||||
fileConfig: config?.fileConfig,
|
||||
secureImageLinks: config?.secureImageLinks,
|
||||
...endpointLocals,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const {
|
||||
FileSources,
|
||||
EModelEndpoint,
|
||||
EImageOutputType,
|
||||
defaultSocialLogins,
|
||||
validateAzureGroups,
|
||||
deprecatedAzureVariables,
|
||||
@@ -107,6 +108,10 @@ describe('AppService', () => {
|
||||
},
|
||||
},
|
||||
paths: expect.anything(),
|
||||
imageOutputType: expect.any(String),
|
||||
interface: undefined,
|
||||
fileConfig: undefined,
|
||||
secureImageLinks: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -125,6 +130,36 @@ describe('AppService', () => {
|
||||
expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('Outdated Config version'));
|
||||
});
|
||||
|
||||
it('should change the `imageOutputType` based on config value', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
version: '0.10.0',
|
||||
imageOutputType: EImageOutputType.WEBP,
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
expect(app.locals.imageOutputType).toEqual(EImageOutputType.WEBP);
|
||||
});
|
||||
|
||||
it('should default to `PNG` `imageOutputType` with no provided type', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
version: '0.10.0',
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
expect(app.locals.imageOutputType).toEqual(EImageOutputType.PNG);
|
||||
});
|
||||
|
||||
it('should default to `PNG` `imageOutputType` with no provided config', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve(undefined));
|
||||
|
||||
await AppService(app);
|
||||
expect(app.locals.imageOutputType).toEqual(EImageOutputType.PNG);
|
||||
});
|
||||
|
||||
it('should initialize Firebase when fileStrategy is firebase', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
@@ -193,6 +228,27 @@ describe('AppService', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should correctly configure minimum Azure OpenAI Assistant values', async () => {
|
||||
const assistantGroups = [azureGroups[0], { ...azureGroups[1], assistants: true }];
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
groups: assistantGroups,
|
||||
assistants: true,
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
process.env.WESTUS_API_KEY = 'westus-key';
|
||||
process.env.EASTUS_API_KEY = 'eastus-key';
|
||||
|
||||
await AppService(app);
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.assistants);
|
||||
expect(app.locals[EModelEndpoint.assistants].capabilities.length).toEqual(3);
|
||||
});
|
||||
|
||||
it('should correctly configure Azure OpenAI endpoint based on custom config', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const path = require('path');
|
||||
const { CacheKeys, configSchema } = require('librechat-data-provider');
|
||||
const { CacheKeys, configSchema, EImageOutputType } = require('librechat-data-provider');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const loadYaml = require('~/utils/loadYaml');
|
||||
const { logger } = require('~/config');
|
||||
@@ -55,6 +55,20 @@ async function loadCustomConfig() {
|
||||
}
|
||||
|
||||
const result = configSchema.strict().safeParse(customConfig);
|
||||
if (result?.error?.errors?.some((err) => err?.path && err.path?.includes('imageOutputType'))) {
|
||||
throw new Error(
|
||||
`
|
||||
Please specify a correct \`imageOutputType\` value (case-sensitive).
|
||||
|
||||
The available options are:
|
||||
- ${EImageOutputType.JPEG}
|
||||
- ${EImageOutputType.PNG}
|
||||
- ${EImageOutputType.WEBP}
|
||||
|
||||
Refer to the latest config file guide for more information:
|
||||
https://docs.librechat.ai/install/configuration/custom_config.html`,
|
||||
);
|
||||
}
|
||||
if (!result.success) {
|
||||
i === 0 && logger.error(`Invalid custom config file at ${configPath}`, result.error);
|
||||
i === 0 && i++;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { AnthropicClient } = require('~/app');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { AnthropicClient } = require('~/app');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const { ANTHROPIC_API_KEY, ANTHROPIC_REVERSE_PROXY, PROXY } = process.env;
|
||||
@@ -7,14 +8,15 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const isUserProvided = ANTHROPIC_API_KEY === 'user_provided';
|
||||
|
||||
const anthropicApiKey = isUserProvided
|
||||
? await getAnthropicUserKey(req.user.id)
|
||||
? await getUserKey({ userId: req.user.id, name: EModelEndpoint.anthropic })
|
||||
: ANTHROPIC_API_KEY;
|
||||
|
||||
if (!anthropicApiKey) {
|
||||
throw new Error('Anthropic API key not provided. Please provide it again.');
|
||||
}
|
||||
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your ANTHROPIC_API_KEY has expired. Please provide your API key again.',
|
||||
);
|
||||
checkUserKeyExpiry(expiresAt, EModelEndpoint.anthropic);
|
||||
}
|
||||
|
||||
const client = new AnthropicClient(anthropicApiKey, {
|
||||
@@ -31,8 +33,4 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
};
|
||||
};
|
||||
|
||||
const getAnthropicUserKey = async (userId) => {
|
||||
return await getUserKey({ userId, name: 'anthropic' });
|
||||
};
|
||||
|
||||
module.exports = initializeClient;
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const {
|
||||
ErrorTypes,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
getUserKey,
|
||||
getUserKeyValues,
|
||||
getUserKeyExpiry,
|
||||
checkUserKeyExpiry,
|
||||
} = require('~/server/services/UserService');
|
||||
@@ -26,18 +27,8 @@ const initializeClient = async ({ req, res, endpointOption, initAppClient = fals
|
||||
userId: req.user.id,
|
||||
name: EModelEndpoint.assistants,
|
||||
});
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your Assistants API key has expired. Please provide your API key again.',
|
||||
);
|
||||
userValues = await getUserKey({ userId: req.user.id, name: EModelEndpoint.assistants });
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
'Invalid JSON provided for Assistants API user values. Please provide them again.',
|
||||
);
|
||||
}
|
||||
checkUserKeyExpiry(expiresAt, EModelEndpoint.assistants);
|
||||
userValues = await getUserKeyValues({ userId: req.user.id, name: EModelEndpoint.assistants });
|
||||
}
|
||||
|
||||
let apiKey = userProvidesKey ? userValues.apiKey : ASSISTANTS_API_KEY;
|
||||
@@ -101,6 +92,14 @@ const initializeClient = async ({ req, res, endpointOption, initAppClient = fals
|
||||
}
|
||||
}
|
||||
|
||||
if (userProvidesKey & !apiKey) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.NO_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error('Assistants API key not provided. Please provide it again.');
|
||||
}
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
// const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { getUserKey, getUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { getUserKey, getUserKeyExpiry, getUserKeyValues } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initializeClient');
|
||||
// const { OpenAIClient } = require('~/app');
|
||||
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKey: jest.fn(),
|
||||
getUserKeyExpiry: jest.fn(),
|
||||
getUserKeyValues: jest.fn(),
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
}));
|
||||
|
||||
@@ -52,9 +54,7 @@ describe('initializeClient', () => {
|
||||
process.env.ASSISTANTS_API_KEY = 'user_provided';
|
||||
process.env.ASSISTANTS_BASE_URL = 'user_provided';
|
||||
|
||||
getUserKey.mockResolvedValue(
|
||||
JSON.stringify({ apiKey: 'user-api-key', baseURL: 'https://user.api.url' }),
|
||||
);
|
||||
getUserKeyValues.mockResolvedValue({ apiKey: 'user-api-key', baseURL: 'https://user.api.url' });
|
||||
getUserKeyExpiry.mockResolvedValue(isoString);
|
||||
|
||||
const req = { user: { id: 'user123' }, app };
|
||||
@@ -70,11 +70,24 @@ describe('initializeClient', () => {
|
||||
process.env.ASSISTANTS_API_KEY = 'user_provided';
|
||||
getUserKey.mockResolvedValue('invalid-json');
|
||||
getUserKeyExpiry.mockResolvedValue(isoString);
|
||||
getUserKeyValues.mockImplementation(() => {
|
||||
let userValues = getUserKey();
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
});
|
||||
|
||||
const req = { user: { id: 'user123' } };
|
||||
const res = {};
|
||||
|
||||
await expect(initializeClient({ req, res })).rejects.toThrow(/Invalid JSON/);
|
||||
await expect(initializeClient({ req, res })).rejects.toThrow(/invalid_user_key/);
|
||||
});
|
||||
|
||||
test('throws error if API key is not provided', async () => {
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
const {
|
||||
CacheKeys,
|
||||
ErrorTypes,
|
||||
envVarRegex,
|
||||
EModelEndpoint,
|
||||
FetchTokenConfig,
|
||||
extractEnvVariable,
|
||||
} = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const getCustomConfig = require('~/server/services/Config/getCustomConfig');
|
||||
const { fetchModels } = require('~/server/services/ModelService');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
@@ -48,21 +49,29 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
|
||||
let userValues = null;
|
||||
if (expiresAt && (userProvidesKey || userProvidesURL)) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
`Your API values for ${endpoint} have expired. Please configure them again.`,
|
||||
);
|
||||
userValues = await getUserKey({ userId: req.user.id, name: endpoint });
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(`Invalid JSON provided for ${endpoint} user values.`);
|
||||
}
|
||||
checkUserKeyExpiry(expiresAt, endpoint);
|
||||
userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint });
|
||||
}
|
||||
|
||||
let apiKey = userProvidesKey ? userValues?.apiKey : CUSTOM_API_KEY;
|
||||
let baseURL = userProvidesURL ? userValues?.baseURL : CUSTOM_BASE_URL;
|
||||
|
||||
if (userProvidesKey & !apiKey) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.NO_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (userProvidesURL && !baseURL) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.NO_BASE_URL,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error(`${endpoint} API key not provided.`);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { GoogleClient } = require('~/app');
|
||||
const { EModelEndpoint, AuthKeys } = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { GoogleClient } = require('~/app');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const { GOOGLE_KEY, GOOGLE_REVERSE_PROXY, PROXY } = process.env;
|
||||
@@ -9,10 +9,7 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
|
||||
let userKey = null;
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your Google Credentials have expired. Please provide your Service Account JSON Key or Generative Language API Key again.',
|
||||
);
|
||||
checkUserKeyExpiry(expiresAt, EModelEndpoint.google);
|
||||
userKey = await getUserKey({ userId: req.user.id, name: EModelEndpoint.google });
|
||||
}
|
||||
|
||||
|
||||
@@ -1,15 +1,10 @@
|
||||
// file deepcode ignore HardcodedNonCryptoSecret: No hardcoded secrets
|
||||
|
||||
const { getUserKey } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initializeClient');
|
||||
const { GoogleClient } = require('~/app');
|
||||
const { checkUserKeyExpiry, getUserKey } = require('../../UserService');
|
||||
|
||||
jest.mock('../../UserService', () => ({
|
||||
checkUserKeyExpiry: jest.fn().mockImplementation((expiresAt, errorMessage) => {
|
||||
if (new Date(expiresAt) < new Date()) {
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
}),
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
getUserKey: jest.fn().mockImplementation(() => ({})),
|
||||
}));
|
||||
|
||||
@@ -74,13 +69,8 @@ describe('google/initializeClient', () => {
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
checkUserKeyExpiry.mockImplementation((expiresAt, errorMessage) => {
|
||||
throw new Error(errorMessage);
|
||||
});
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Your Google Credentials have expired/,
|
||||
/expired_user_key/,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@ const {
|
||||
mapModelToAzureConfig,
|
||||
resolveHeaders,
|
||||
} = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { isEnabled, isUserProvided } = require('~/server/utils');
|
||||
const { getAzureCredentials } = require('~/utils');
|
||||
const { PluginsClient } = require('~/app');
|
||||
@@ -49,18 +49,8 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
|
||||
let userValues = null;
|
||||
if (expiresAt && (userProvidesKey || userProvidesURL)) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your OpenAI API values have expired. Please provide them again.',
|
||||
);
|
||||
userValues = await getUserKey({ userId: req.user.id, name: endpoint });
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Invalid JSON provided for ${endpoint} user values. Please provide them again.`,
|
||||
);
|
||||
}
|
||||
checkUserKeyExpiry(expiresAt, endpoint);
|
||||
userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint });
|
||||
}
|
||||
|
||||
let apiKey = userProvidesKey ? userValues?.apiKey : credentials[endpoint];
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
// gptPlugins/initializeClient.spec.js
|
||||
const { EModelEndpoint, validateAzureGroups } = require('librechat-data-provider');
|
||||
const { getUserKey } = require('~/server/services/UserService');
|
||||
const { EModelEndpoint, ErrorTypes, validateAzureGroups } = require('librechat-data-provider');
|
||||
const { getUserKey, getUserKeyValues } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initializeClient');
|
||||
const { PluginsClient } = require('~/app');
|
||||
|
||||
// Mock getUserKey since it's the only function we want to mock
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKey: jest.fn(),
|
||||
getUserKeyValues: jest.fn(),
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
}));
|
||||
|
||||
@@ -205,7 +206,7 @@ describe('gptPlugins/initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
getUserKey.mockResolvedValue(JSON.stringify({ apiKey: 'test-user-provided-openai-api-key' }));
|
||||
getUserKeyValues.mockResolvedValue({ apiKey: 'test-user-provided-openai-api-key' });
|
||||
|
||||
const { openAIApiKey } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
@@ -225,14 +226,12 @@ describe('gptPlugins/initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'test-model' } };
|
||||
|
||||
getUserKey.mockResolvedValue(
|
||||
JSON.stringify({
|
||||
apiKey: JSON.stringify({
|
||||
azureOpenAIApiKey: 'test-user-provided-azure-api-key',
|
||||
azureOpenAIApiDeploymentName: 'test-deployment',
|
||||
}),
|
||||
getUserKeyValues.mockResolvedValue({
|
||||
apiKey: JSON.stringify({
|
||||
azureOpenAIApiKey: 'test-user-provided-azure-api-key',
|
||||
azureOpenAIApiDeploymentName: 'test-deployment',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
const { azure } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
@@ -251,7 +250,9 @@ describe('gptPlugins/initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(/Your OpenAI API/);
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/expired_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw an error if the user-provided Azure key is invalid JSON', async () => {
|
||||
@@ -268,9 +269,22 @@ describe('gptPlugins/initializeClient', () => {
|
||||
|
||||
// Simulate an invalid JSON string returned from getUserKey
|
||||
getUserKey.mockResolvedValue('invalid-json');
|
||||
getUserKeyValues.mockImplementation(() => {
|
||||
let userValues = getUserKey();
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
});
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Invalid JSON provided/,
|
||||
/invalid_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -305,9 +319,22 @@ describe('gptPlugins/initializeClient', () => {
|
||||
|
||||
// Mock getUserKey to return a non-JSON string
|
||||
getUserKey.mockResolvedValue('not-a-json');
|
||||
getUserKeyValues.mockImplementation(() => {
|
||||
let userValues = getUserKey();
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
});
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Invalid JSON provided for openAI user values/,
|
||||
/invalid_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -369,9 +396,10 @@ describe('gptPlugins/initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
getUserKey.mockResolvedValue(
|
||||
JSON.stringify({ apiKey: 'test', baseURL: 'https://user-provided-url.com' }),
|
||||
);
|
||||
getUserKeyValues.mockResolvedValue({
|
||||
apiKey: 'test',
|
||||
baseURL: 'https://user-provided-url.com',
|
||||
});
|
||||
|
||||
const result = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
const {
|
||||
ErrorTypes,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { isEnabled, isUserProvided } = require('~/server/utils');
|
||||
const { getAzureCredentials } = require('~/utils');
|
||||
const { OpenAIClient } = require('~/app');
|
||||
@@ -36,18 +37,8 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
|
||||
let userValues = null;
|
||||
if (expiresAt && (userProvidesKey || userProvidesURL)) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your OpenAI API values have expired. Please provide them again.',
|
||||
);
|
||||
userValues = await getUserKey({ userId: req.user.id, name: endpoint });
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Invalid JSON provided for ${endpoint} user values. Please provide them again.`,
|
||||
);
|
||||
}
|
||||
checkUserKeyExpiry(expiresAt, endpoint);
|
||||
userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint });
|
||||
}
|
||||
|
||||
let apiKey = userProvidesKey ? userValues?.apiKey : credentials[endpoint];
|
||||
@@ -99,8 +90,16 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
apiKey = clientOptions.azure.azureOpenAIApiKey;
|
||||
}
|
||||
|
||||
if (userProvidesKey & !apiKey) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.NO_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error(`${endpoint} API key not provided. Please provide it again.`);
|
||||
throw new Error(`${endpoint} API Key not provided.`);
|
||||
}
|
||||
|
||||
const client = new OpenAIClient(apiKey, clientOptions);
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
const { EModelEndpoint, validateAzureGroups } = require('librechat-data-provider');
|
||||
const { getUserKey } = require('~/server/services/UserService');
|
||||
const { EModelEndpoint, ErrorTypes, validateAzureGroups } = require('librechat-data-provider');
|
||||
const { getUserKey, getUserKeyValues } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initializeClient');
|
||||
const { OpenAIClient } = require('~/app');
|
||||
|
||||
// Mock getUserKey since it's the only function we want to mock
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKey: jest.fn(),
|
||||
getUserKeyValues: jest.fn(),
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
}));
|
||||
|
||||
@@ -200,7 +201,9 @@ describe('initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(/Your OpenAI API/);
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/expired_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw an error if no API keys are provided in the environment', async () => {
|
||||
@@ -217,7 +220,7 @@ describe('initializeClient', () => {
|
||||
const endpointOption = {};
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
`${EModelEndpoint.openAI} API key not provided.`,
|
||||
`${EModelEndpoint.openAI} API Key not provided.`,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -241,7 +244,7 @@ describe('initializeClient', () => {
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
|
||||
// Mock getUserKey to return the expected key
|
||||
getUserKey.mockResolvedValue(JSON.stringify({ apiKey: 'test-user-provided-openai-api-key' }));
|
||||
getUserKeyValues.mockResolvedValue({ apiKey: 'test-user-provided-openai-api-key' });
|
||||
|
||||
// Call the initializeClient function
|
||||
const result = await initializeClient({ req, res, endpointOption });
|
||||
@@ -266,7 +269,9 @@ describe('initializeClient', () => {
|
||||
// Mock getUserKey to return an invalid key
|
||||
getUserKey.mockResolvedValue(invalidKey);
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(/Your OpenAI API/);
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/expired_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw an error when user-provided values are not valid JSON', async () => {
|
||||
@@ -281,9 +286,22 @@ describe('initializeClient', () => {
|
||||
|
||||
// Mock getUserKey to return a non-JSON string
|
||||
getUserKey.mockResolvedValue('not-a-json');
|
||||
getUserKeyValues.mockImplementation(() => {
|
||||
let userValues = getUserKey();
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
});
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Invalid JSON provided for openAI user values/,
|
||||
/invalid_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -347,9 +365,10 @@ describe('initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
getUserKey.mockResolvedValue(
|
||||
JSON.stringify({ apiKey: 'test', baseURL: 'https://user-provided-url.com' }),
|
||||
);
|
||||
getUserKeyValues.mockResolvedValue({
|
||||
apiKey: 'test',
|
||||
baseURL: 'https://user-provided-url.com',
|
||||
});
|
||||
|
||||
const result = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ const { updateFile } = require('~/models/File');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Converts an image file to the WebP format. The function first resizes the image based on the specified
|
||||
* Converts an image file to the target format. The function first resizes the image based on the specified
|
||||
* resolution.
|
||||
*
|
||||
* @param {Object} params - The params object.
|
||||
@@ -21,7 +21,7 @@ const { logger } = require('~/config');
|
||||
*
|
||||
* @returns {Promise<{ filepath: string, bytes: number, width: number, height: number}>}
|
||||
* A promise that resolves to an object containing:
|
||||
* - filepath: The path where the converted WebP image is saved.
|
||||
* - filepath: The path where the converted image is saved.
|
||||
* - bytes: The size of the converted image in bytes.
|
||||
* - width: The width of the converted image.
|
||||
* - height: The height of the converted image.
|
||||
@@ -39,15 +39,16 @@ async function uploadImageToFirebase({ req, file, file_id, endpoint, resolution
|
||||
|
||||
let webPBuffer;
|
||||
let fileName = `${file_id}__${path.basename(inputFilePath)}`;
|
||||
if (extension.toLowerCase() === '.webp') {
|
||||
const targetExtension = `.${req.app.locals.imageOutputType}`;
|
||||
if (extension.toLowerCase() === targetExtension) {
|
||||
webPBuffer = resizedBuffer;
|
||||
} else {
|
||||
webPBuffer = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
||||
webPBuffer = await sharp(resizedBuffer).toFormat(req.app.locals.imageOutputType).toBuffer();
|
||||
// Replace or append the correct extension
|
||||
const extRegExp = new RegExp(path.extname(fileName) + '$');
|
||||
fileName = fileName.replace(extRegExp, '.webp');
|
||||
fileName = fileName.replace(extRegExp, targetExtension);
|
||||
if (!path.extname(fileName)) {
|
||||
fileName += '.webp';
|
||||
fileName += targetExtension;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,7 +80,7 @@ async function prepareImageURL(req, file) {
|
||||
* If the 'manual' flag is set to 'true', it also updates the user's avatar URL in the database.
|
||||
*
|
||||
* @param {object} params - The parameters object.
|
||||
* @param {Buffer} params.buffer - The Buffer containing the avatar image in WebP format.
|
||||
* @param {Buffer} params.buffer - The Buffer containing the avatar image.
|
||||
* @param {string} params.userId - The user ID.
|
||||
* @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false').
|
||||
* @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar.
|
||||
|
||||
@@ -6,11 +6,11 @@ const { updateUser } = require('~/models/userMethods');
|
||||
const { updateFile } = require('~/models/File');
|
||||
|
||||
/**
|
||||
* Converts an image file to the WebP format. The function first resizes the image based on the specified
|
||||
* Converts an image file to the target format. The function first resizes the image based on the specified
|
||||
* resolution.
|
||||
*
|
||||
* If the original image is already in WebP format, it writes the resized image back. Otherwise,
|
||||
* it converts the image to WebP format before saving.
|
||||
* If the original image is already in target format, it writes the resized image back. Otherwise,
|
||||
* it converts the image to target format before saving.
|
||||
*
|
||||
* The original image is deleted after conversion.
|
||||
* @param {Object} params - The params object.
|
||||
@@ -24,7 +24,7 @@ const { updateFile } = require('~/models/File');
|
||||
*
|
||||
* @returns {Promise<{ filepath: string, bytes: number, width: number, height: number}>}
|
||||
* A promise that resolves to an object containing:
|
||||
* - filepath: The path where the converted WebP image is saved.
|
||||
* - filepath: The path where the converted image is saved.
|
||||
* - bytes: The size of the converted image in bytes.
|
||||
* - width: The width of the converted image.
|
||||
* - height: The height of the converted image.
|
||||
@@ -48,16 +48,17 @@ async function uploadLocalImage({ req, file, file_id, endpoint, resolution = 'hi
|
||||
|
||||
const fileName = `${file_id}__${path.basename(inputFilePath)}`;
|
||||
const newPath = path.join(userPath, fileName);
|
||||
const targetExtension = `.${req.app.locals.imageOutputType}`;
|
||||
|
||||
if (extension.toLowerCase() === '.webp') {
|
||||
if (extension.toLowerCase() === targetExtension) {
|
||||
const bytes = Buffer.byteLength(resizedBuffer);
|
||||
await fs.promises.writeFile(newPath, resizedBuffer);
|
||||
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(newPath));
|
||||
return { filepath, bytes, width, height };
|
||||
}
|
||||
|
||||
const outputFilePath = newPath.replace(extension, '.webp');
|
||||
const data = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
||||
const outputFilePath = newPath.replace(extension, targetExtension);
|
||||
const data = await sharp(resizedBuffer).toFormat(req.app.locals.imageOutputType).toBuffer();
|
||||
await fs.promises.writeFile(outputFilePath, data);
|
||||
const bytes = Buffer.byteLength(data);
|
||||
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(outputFilePath));
|
||||
@@ -109,7 +110,7 @@ async function prepareImagesLocal(req, file) {
|
||||
* If the 'manual' flag is set to 'true', it also updates the user's avatar URL in the database.
|
||||
*
|
||||
* @param {object} params - The parameters object.
|
||||
* @param {Buffer} params.buffer - The Buffer containing the avatar image in WebP format.
|
||||
* @param {Buffer} params.buffer - The Buffer containing the avatar image.
|
||||
* @param {string} params.userId - The user ID.
|
||||
* @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false').
|
||||
* @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar.
|
||||
|
||||
@@ -18,9 +18,12 @@ const { logger } = require('~/config');
|
||||
* file path is invalid or if there is an error in deletion.
|
||||
*/
|
||||
const deleteVectors = async (req, file) => {
|
||||
if (file.embedded && process.env.RAG_API_URL) {
|
||||
if (!file.embedded || !process.env.RAG_API_URL) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const jwtToken = req.headers.authorization.split(' ')[1];
|
||||
axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
return await axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
@@ -28,6 +31,9 @@ const deleteVectors = async (req, file) => {
|
||||
},
|
||||
data: [file.file_id],
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error deleting vectors', error);
|
||||
throw new Error(error.message || 'An error occurred during file deletion.');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
const sharp = require('sharp');
|
||||
const fs = require('fs').promises;
|
||||
const fetch = require('node-fetch');
|
||||
const { EImageOutputType } = require('librechat-data-provider');
|
||||
const { resizeAndConvert } = require('./resize');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Uploads an avatar image for a user. This function can handle various types of input (URL, Buffer, or File object),
|
||||
* processes the image to a square format, converts it to WebP format, and returns the resized buffer.
|
||||
* processes the image to a square format, converts it to target format, and returns the resized buffer.
|
||||
*
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier of the user for whom the avatar is being uploaded.
|
||||
* @param {string} options.desiredFormat - The desired output format of the image.
|
||||
* @param {(string|Buffer|File)} params.input - The input representing the avatar image. Can be a URL (string),
|
||||
* a Buffer, or a File object.
|
||||
*
|
||||
@@ -19,7 +21,7 @@ const { logger } = require('~/config');
|
||||
* @throws {Error} Throws an error if the user ID is undefined, the input type is invalid, the image fetching fails,
|
||||
* or any other error occurs during the processing.
|
||||
*/
|
||||
async function resizeAvatar({ userId, input }) {
|
||||
async function resizeAvatar({ userId, input, desiredFormat = EImageOutputType.PNG }) {
|
||||
try {
|
||||
if (userId === undefined) {
|
||||
throw new Error('User ID is undefined');
|
||||
@@ -53,7 +55,10 @@ async function resizeAvatar({ userId, input }) {
|
||||
})
|
||||
.toBuffer();
|
||||
|
||||
const { buffer } = await resizeAndConvert(squaredBuffer);
|
||||
const { buffer } = await resizeAndConvert({
|
||||
inputBuffer: squaredBuffer,
|
||||
desiredFormat,
|
||||
});
|
||||
return buffer;
|
||||
} catch (error) {
|
||||
logger.error('Error uploading the avatar:', error);
|
||||
|
||||
@@ -6,7 +6,7 @@ const { getStrategyFunctions } = require('../strategies');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Converts an image file or buffer to WebP format with specified resolution.
|
||||
* Converts an image file or buffer to target output type with specified resolution.
|
||||
*
|
||||
* @param {Express.Request} req - The request object, containing user and app configuration data.
|
||||
* @param {Buffer | Express.Multer.File} file - The file object, containing either a path or a buffer.
|
||||
@@ -15,7 +15,7 @@ const { logger } = require('~/config');
|
||||
* @returns {Promise<{filepath: string, bytes: number, width: number, height: number}>} An object containing the path, size, and dimensions of the converted image.
|
||||
* @throws Throws an error if there is an issue during the conversion process.
|
||||
*/
|
||||
async function convertToWebP(req, file, resolution = 'high', basename = '') {
|
||||
async function convertImage(req, file, resolution = 'high', basename = '') {
|
||||
try {
|
||||
let inputBuffer;
|
||||
let outputBuffer;
|
||||
@@ -38,13 +38,13 @@ async function convertToWebP(req, file, resolution = 'high', basename = '') {
|
||||
height,
|
||||
} = await resizeImageBuffer(inputBuffer, resolution);
|
||||
|
||||
// Check if the file is already in WebP format
|
||||
// If it isn't, convert it:
|
||||
if (extension === '.webp') {
|
||||
// Check if the file is already in target format; if it isn't, convert it:
|
||||
const targetExtension = `.${req.app.locals.imageOutputType}`;
|
||||
if (extension === targetExtension) {
|
||||
outputBuffer = resizedBuffer;
|
||||
} else {
|
||||
outputBuffer = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
||||
extension = '.webp';
|
||||
outputBuffer = await sharp(resizedBuffer).toFormat(req.app.locals.imageOutputType).toBuffer();
|
||||
extension = targetExtension;
|
||||
}
|
||||
|
||||
// Generate a new filename for the output file
|
||||
@@ -67,4 +67,4 @@ async function convertToWebP(req, file, resolution = 'high', basename = '') {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { convertToWebP };
|
||||
module.exports = { convertImage };
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const axios = require('axios');
|
||||
const { EModelEndpoint, FileSources } = require('librechat-data-provider');
|
||||
const { EModelEndpoint, FileSources, VisionModes } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('../strategies');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -30,11 +30,20 @@ const base64Only = new Set([EModelEndpoint.google, EModelEndpoint.anthropic]);
|
||||
* @param {Express.Request} req - The request object.
|
||||
* @param {Array<MongoFile>} files - The array of files to encode and format.
|
||||
* @param {EModelEndpoint} [endpoint] - Optional: The endpoint for the image.
|
||||
* @param {string} [mode] - Optional: The endpoint mode for the image.
|
||||
* @returns {Promise<Object>} - A promise that resolves to the result object containing the encoded images and file details.
|
||||
*/
|
||||
async function encodeAndFormat(req, files, endpoint) {
|
||||
async function encodeAndFormat(req, files, endpoint, mode) {
|
||||
const promises = [];
|
||||
const encodingMethods = {};
|
||||
const result = {
|
||||
files: [],
|
||||
image_urls: [],
|
||||
};
|
||||
|
||||
if (!files || !files.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
for (let file of files) {
|
||||
const source = file.source ?? FileSources.local;
|
||||
@@ -69,11 +78,6 @@ async function encodeAndFormat(req, files, endpoint) {
|
||||
/** @type {Array<[MongoFile, string]>} */
|
||||
const formattedImages = await Promise.all(promises);
|
||||
|
||||
const result = {
|
||||
files: [],
|
||||
image_urls: [],
|
||||
};
|
||||
|
||||
for (const [file, imageContent] of formattedImages) {
|
||||
const fileMetadata = {
|
||||
type: file.type,
|
||||
@@ -98,12 +102,18 @@ async function encodeAndFormat(req, files, endpoint) {
|
||||
image_url: {
|
||||
url: imageContent.startsWith('http')
|
||||
? imageContent
|
||||
: `data:image/webp;base64,${imageContent}`,
|
||||
: `data:${file.type};base64,${imageContent}`,
|
||||
detail,
|
||||
},
|
||||
};
|
||||
|
||||
if (endpoint && endpoint === EModelEndpoint.google) {
|
||||
if (endpoint && endpoint === EModelEndpoint.google && mode === VisionModes.generative) {
|
||||
delete imagePart.image_url;
|
||||
imagePart.inlineData = {
|
||||
mimeType: file.type,
|
||||
data: imageContent,
|
||||
};
|
||||
} else if (endpoint && endpoint === EModelEndpoint.google) {
|
||||
imagePart.image_url = imagePart.image_url.url;
|
||||
} else if (endpoint && endpoint === EModelEndpoint.anthropic) {
|
||||
imagePart.type = 'image';
|
||||
|
||||
@@ -62,14 +62,20 @@ async function resizeImageBuffer(inputBuffer, resolution, endpoint) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Resizes an image buffer to webp format as well as reduces by specified or default 150 px width.
|
||||
* Resizes an image buffer to a specified format and width.
|
||||
*
|
||||
* @param {Buffer} inputBuffer - The buffer of the image to be resized.
|
||||
* @returns {Promise<{ buffer: Buffer, width: number, height: number, bytes: number }>} An object containing the resized image buffer, its size and dimensions.
|
||||
* @throws Will throw an error if the resolution parameter is invalid.
|
||||
* @param {Object} options - The options for resizing and converting the image.
|
||||
* @param {Buffer} options.inputBuffer - The buffer of the image to be resized.
|
||||
* @param {string} options.desiredFormat - The desired output format of the image.
|
||||
* @param {number} [options.width=150] - The desired width of the image. Defaults to 150 pixels.
|
||||
* @returns {Promise<{ buffer: Buffer, width: number, height: number, bytes: number }>} An object containing the resized image buffer, its size, and dimensions.
|
||||
* @throws Will throw an error if the resolution or format parameters are invalid.
|
||||
*/
|
||||
async function resizeAndConvert(inputBuffer, width = 150) {
|
||||
const resizedBuffer = await sharp(inputBuffer).resize({ width }).toFormat('webp').toBuffer();
|
||||
async function resizeAndConvert({ inputBuffer, desiredFormat, width = 150 }) {
|
||||
const resizedBuffer = await sharp(inputBuffer)
|
||||
.resize({ width })
|
||||
.toFormat(desiredFormat)
|
||||
.toBuffer();
|
||||
const resizedMetadata = await sharp(resizedBuffer).metadata();
|
||||
return {
|
||||
buffer: resizedBuffer,
|
||||
|
||||
@@ -12,7 +12,7 @@ const {
|
||||
hostImageIdSuffix,
|
||||
hostImageNamePrefix,
|
||||
} = require('librechat-data-provider');
|
||||
const { convertToWebP, resizeAndConvert } = require('~/server/services/Files/images');
|
||||
const { convertImage, resizeAndConvert } = require('~/server/services/Files/images');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { createFile, updateFileUsage, deleteFiles } = require('~/models/File');
|
||||
const { LB_QueueAsyncCall } = require('~/server/utils/queue');
|
||||
@@ -207,7 +207,7 @@ const processImageFile = async ({ req, res, file, metadata }) => {
|
||||
filename: file.originalname,
|
||||
context: FileContext.message_attachment,
|
||||
source,
|
||||
type: 'image/webp',
|
||||
type: `image/${req.app.locals.imageOutputType}`,
|
||||
width,
|
||||
height,
|
||||
},
|
||||
@@ -223,9 +223,9 @@ const processImageFile = async ({ req, res, file, metadata }) => {
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {Express.Request} params.req - The Express request object.
|
||||
* @param {FileContext} params.context - The context of the file (e.g., 'avatar', 'image_generation', etc.)
|
||||
* @param {boolean} [params.resize=true] - Whether to resize and convert the image to WebP. Default is `true`.
|
||||
* @param {boolean} [params.resize=true] - Whether to resize and convert the image to target format. Default is `true`.
|
||||
* @param {{ buffer: Buffer, width: number, height: number, bytes: number, filename: string, type: string, file_id: string }} [params.metadata] - Required metadata for the file if resize is false.
|
||||
* @returns {Promise<{ filepath: string, filename: string, source: string, type: 'image/webp'}>}
|
||||
* @returns {Promise<{ filepath: string, filename: string, source: string, type: string}>}
|
||||
*/
|
||||
const uploadImageBuffer = async ({ req, context, metadata = {}, resize = true }) => {
|
||||
const source = req.app.locals.fileStrategy;
|
||||
@@ -233,9 +233,14 @@ const uploadImageBuffer = async ({ req, context, metadata = {}, resize = true })
|
||||
let { buffer, width, height, bytes, filename, file_id, type } = metadata;
|
||||
if (resize) {
|
||||
file_id = v4();
|
||||
type = 'image/webp';
|
||||
({ buffer, width, height, bytes } = await resizeAndConvert(req.file.buffer));
|
||||
filename = path.basename(req.file.originalname, path.extname(req.file.originalname)) + '.webp';
|
||||
type = `image/${req.app.locals.imageOutputType}`;
|
||||
({ buffer, width, height, bytes } = await resizeAndConvert({
|
||||
inputBuffer: buffer,
|
||||
desiredFormat: req.app.locals.imageOutputType,
|
||||
}));
|
||||
filename = `${path.basename(req.file.originalname, path.extname(req.file.originalname))}.${
|
||||
req.app.locals.imageOutputType
|
||||
}`;
|
||||
}
|
||||
|
||||
const filepath = await saveBuffer({ userId: req.user.id, fileName: filename, buffer });
|
||||
@@ -363,7 +368,7 @@ const processOpenAIFile = async ({
|
||||
};
|
||||
|
||||
/**
|
||||
* Process OpenAI image files, convert to webp, save and return file metadata.
|
||||
* Process OpenAI image files, convert to target format, save and return file metadata.
|
||||
* @param {object} params - The params object.
|
||||
* @param {Express.Request} params.req - The Express request object.
|
||||
* @param {Buffer} params.buffer - The image buffer.
|
||||
@@ -375,12 +380,12 @@ const processOpenAIFile = async ({
|
||||
const processOpenAIImageOutput = async ({ req, buffer, file_id, filename, fileExt }) => {
|
||||
const currentDate = new Date();
|
||||
const formattedDate = currentDate.toISOString();
|
||||
const _file = await convertToWebP(req, buffer, 'high', `${file_id}${fileExt}`);
|
||||
const _file = await convertImage(req, buffer, 'high', `${file_id}${fileExt}`);
|
||||
const file = {
|
||||
..._file,
|
||||
usage: 1,
|
||||
user: req.user.id,
|
||||
type: 'image/webp',
|
||||
type: `image/${req.app.locals.imageOutputType}`,
|
||||
createdAt: formattedDate,
|
||||
updatedAt: formattedDate,
|
||||
source: req.app.locals.fileStrategy,
|
||||
|
||||
@@ -270,14 +270,20 @@ async function processRequiredActions(client, requiredActions) {
|
||||
if (!actionSets.length) {
|
||||
actionSets =
|
||||
(await loadActionSets({
|
||||
user: client.req.user.id,
|
||||
assistant_id: client.req.body.assistant_id,
|
||||
})) ?? [];
|
||||
}
|
||||
|
||||
const actionSet = actionSets.find((action) =>
|
||||
currentAction.tool.includes(domainParser(client.req, action.metadata.domain, true)),
|
||||
);
|
||||
let actionSet = null;
|
||||
let currentDomain = '';
|
||||
for (let action of actionSets) {
|
||||
const domain = await domainParser(client.req, action.metadata.domain, true);
|
||||
if (currentAction.tool.includes(domain)) {
|
||||
currentDomain = domain;
|
||||
actionSet = action;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!actionSet) {
|
||||
// TODO: try `function` if no action set is found
|
||||
@@ -299,10 +305,8 @@ async function processRequiredActions(client, requiredActions) {
|
||||
builders = requestBuilders;
|
||||
}
|
||||
|
||||
const functionName = currentAction.tool.replace(
|
||||
`${actionDelimiter}${domainParser(client.req, actionSet.metadata.domain, true)}`,
|
||||
'',
|
||||
);
|
||||
const functionName = currentAction.tool.replace(`${actionDelimiter}${currentDomain}`, '');
|
||||
|
||||
const requestBuilder = builders[functionName];
|
||||
|
||||
if (!requestBuilder) {
|
||||
|
||||
@@ -1,7 +1,19 @@
|
||||
const { User, Key } = require('~/models');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { encrypt, decrypt } = require('~/server/utils');
|
||||
const { User, Key } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Updates the plugins for a user based on the action specified (install/uninstall).
|
||||
* @async
|
||||
* @param {Object} user - The user whose plugins are to be updated.
|
||||
* @param {string} pluginKey - The key of the plugin to install or uninstall.
|
||||
* @param {'install' | 'uninstall'} action - The action to perform, 'install' or 'uninstall'.
|
||||
* @returns {Promise<Object>} The result of the update operation.
|
||||
* @throws Logs the error internally if the update operation fails.
|
||||
* @description This function updates the plugin array of a user document based on the specified action.
|
||||
* It adds a plugin key to the plugins array for an 'install' action, and removes it for an 'uninstall' action.
|
||||
*/
|
||||
const updateUserPluginsService = async (user, pluginKey, action) => {
|
||||
try {
|
||||
if (action === 'install') {
|
||||
@@ -21,14 +33,64 @@ const updateUserPluginsService = async (user, pluginKey, action) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves and decrypts the key value for a given user identified by userId and identifier name.
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier for the user.
|
||||
* @param {string} params.name - The name associated with the key.
|
||||
* @returns {Promise<string>} The decrypted key value.
|
||||
* @throws {Error} Throws an error if the key is not found or if there is a problem during key retrieval.
|
||||
* @description This function searches for a user's key in the database using their userId and name.
|
||||
* If found, it decrypts the value of the key and returns it. If no key is found, it throws
|
||||
* an error indicating that there is no user key available.
|
||||
*/
|
||||
const getUserKey = async ({ userId, name }) => {
|
||||
const keyValue = await Key.findOne({ userId, name }).lean();
|
||||
if (!keyValue) {
|
||||
throw new Error('User-provided key not found');
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.NO_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return decrypt(keyValue.value);
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves, decrypts, and parses the key values for a given user identified by userId and name.
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier for the user.
|
||||
* @param {string} params.name - The name associated with the key.
|
||||
* @returns {Promise<Record<string,string>>} The decrypted and parsed key values.
|
||||
* @throws {Error} Throws an error if the key is invalid or if there is a problem during key value parsing.
|
||||
* @description This function retrieves a user's encrypted key using their userId and name, decrypts it,
|
||||
* and then attempts to parse the decrypted string into a JSON object. If the parsing fails,
|
||||
* it throws an error indicating that the user key is invalid.
|
||||
*/
|
||||
const getUserKeyValues = async ({ userId, name }) => {
|
||||
let userValues = await getUserKey({ userId, name });
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves the expiry information of a user's key identified by userId and name.
|
||||
* @async
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier for the user.
|
||||
* @param {string} params.name - The name associated with the key.
|
||||
* @returns {Promise<{expiresAt: Date | null}>} The expiry date of the key or null if the key doesn't exist.
|
||||
* @description This function fetches a user's key from the database using their userId and name and
|
||||
* returns its expiry date. If the key is not found, it returns null for the expiry date.
|
||||
*/
|
||||
const getUserKeyExpiry = async ({ userId, name }) => {
|
||||
const keyValue = await Key.findOne({ userId, name }).lean();
|
||||
if (!keyValue) {
|
||||
@@ -37,6 +99,18 @@ const getUserKeyExpiry = async ({ userId, name }) => {
|
||||
return { expiresAt: keyValue.expiresAt };
|
||||
};
|
||||
|
||||
/**
|
||||
* Updates or inserts a new key for a given user identified by userId and name, with a specified value and expiry date.
|
||||
* @async
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier for the user.
|
||||
* @param {string} params.name - The name associated with the key.
|
||||
* @param {string} params.value - The value to be encrypted and stored as the key's value.
|
||||
* @param {Date} params.expiresAt - The expiry date for the key.
|
||||
* @returns {Promise<Object>} The updated or newly inserted key document.
|
||||
* @description This function either updates an existing user key or inserts a new one into the database,
|
||||
* after encrypting the provided value. It sets the provided expiry date for the key.
|
||||
*/
|
||||
const updateUserKey = async ({ userId, name, value, expiresAt }) => {
|
||||
const encryptedValue = encrypt(value);
|
||||
return await Key.findOneAndUpdate(
|
||||
@@ -51,6 +125,18 @@ const updateUserKey = async ({ userId, name, value, expiresAt }) => {
|
||||
).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes a key or all keys for a given user identified by userId, optionally based on a specified name.
|
||||
* @async
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier for the user.
|
||||
* @param {string} [params.name] - The name associated with the key to delete. If not provided and all is true, deletes all keys.
|
||||
* @param {boolean} [params.all=false] - Whether to delete all keys for the user.
|
||||
* @returns {Promise<Object>} The result of the deletion operation.
|
||||
* @description This function deletes a specific key or all keys for a user from the database.
|
||||
* If a name is provided and all is false, it deletes only the key with that name.
|
||||
* If all is true, it ignores the name and deletes all keys for the user.
|
||||
*/
|
||||
const deleteUserKey = async ({ userId, name, all = false }) => {
|
||||
if (all) {
|
||||
return await Key.deleteMany({ userId });
|
||||
@@ -59,11 +145,23 @@ const deleteUserKey = async ({ userId, name, all = false }) => {
|
||||
await Key.findOneAndDelete({ userId, name }).lean();
|
||||
};
|
||||
|
||||
const checkUserKeyExpiry = (expiresAt, message) => {
|
||||
/**
|
||||
* Checks if a user key has expired based on the provided expiration date and endpoint.
|
||||
* If the key has expired, it throws an Error with details including the type of error, the expiration date, and the endpoint.
|
||||
*
|
||||
* @param {string} expiresAt - The expiration date of the user key in a format that can be parsed by the Date constructor.
|
||||
* @param {string} endpoint - The endpoint associated with the user key to be checked.
|
||||
* @throws {Error} Throws an error if the user key has expired. The error message is a stringified JSON object
|
||||
* containing the type of error (`ErrorTypes.EXPIRED_USER_KEY`), the expiration date in the local string format, and the endpoint.
|
||||
*/
|
||||
const checkUserKeyExpiry = (expiresAt, endpoint) => {
|
||||
const expiresAtDate = new Date(expiresAt);
|
||||
if (expiresAtDate < new Date()) {
|
||||
const expiryStr = `User-provided key expired at ${expiresAtDate.toLocaleString()}`;
|
||||
const errorMessage = message ? `${message}\n${expiryStr}` : expiryStr;
|
||||
const errorMessage = JSON.stringify({
|
||||
type: ErrorTypes.EXPIRED_USER_KEY,
|
||||
expiredAt: expiresAtDate.toLocaleString(),
|
||||
endpoint,
|
||||
});
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
};
|
||||
@@ -71,6 +169,7 @@ const checkUserKeyExpiry = (expiresAt, message) => {
|
||||
module.exports = {
|
||||
updateUserPluginsService,
|
||||
getUserKey,
|
||||
getUserKeyValues,
|
||||
getUserKeyExpiry,
|
||||
updateUserKey,
|
||||
deleteUserKey,
|
||||
|
||||
46
api/server/services/start/assistants.js
Normal file
46
api/server/services/start/assistants.js
Normal file
@@ -0,0 +1,46 @@
|
||||
const {
|
||||
Capabilities,
|
||||
EModelEndpoint,
|
||||
assistantEndpointSchema,
|
||||
} = require('librechat-data-provider');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Sets up the minimum, default Assistants configuration if Azure OpenAI Assistants option is enabled.
|
||||
* @returns {Partial<TAssistantEndpoint>} The Assistants endpoint configuration.
|
||||
*/
|
||||
function azureAssistantsDefaults() {
|
||||
return {
|
||||
capabilities: [Capabilities.tools, Capabilities.actions, Capabilities.code_interpreter],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up the Assistants configuration from the config (`librechat.yaml`) file.
|
||||
* @param {TCustomConfig} config - The loaded custom configuration.
|
||||
* @param {Partial<TAssistantEndpoint>} [prevConfig]
|
||||
* - The previously loaded assistants configuration from Azure OpenAI Assistants option.
|
||||
* @returns {Partial<TAssistantEndpoint>} The Assistants endpoint configuration.
|
||||
*/
|
||||
function assistantsConfigSetup(config, prevConfig = {}) {
|
||||
const assistantsConfig = config.endpoints[EModelEndpoint.assistants];
|
||||
const parsedConfig = assistantEndpointSchema.parse(assistantsConfig);
|
||||
if (assistantsConfig.supportedIds?.length && assistantsConfig.excludedIds?.length) {
|
||||
logger.warn(
|
||||
`Both \`supportedIds\` and \`excludedIds\` are defined for the ${EModelEndpoint.assistants} endpoint; \`excludedIds\` field will be ignored.`,
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
...prevConfig,
|
||||
retrievalModels: parsedConfig.retrievalModels,
|
||||
disableBuilder: parsedConfig.disableBuilder,
|
||||
pollIntervalMs: parsedConfig.pollIntervalMs,
|
||||
supportedIds: parsedConfig.supportedIds,
|
||||
capabilities: parsedConfig.capabilities,
|
||||
excludedIds: parsedConfig.excludedIds,
|
||||
timeoutMs: parsedConfig.timeoutMs,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { azureAssistantsDefaults, assistantsConfigSetup };
|
||||
54
api/server/services/start/azureOpenAI.js
Normal file
54
api/server/services/start/azureOpenAI.js
Normal file
@@ -0,0 +1,54 @@
|
||||
const {
|
||||
EModelEndpoint,
|
||||
validateAzureGroups,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Sets up the Azure OpenAI configuration from the config (`librechat.yaml`) file.
|
||||
* @param {TCustomConfig} config - The loaded custom configuration.
|
||||
* @returns {TAzureConfig} The Azure OpenAI configuration.
|
||||
*/
|
||||
function azureConfigSetup(config) {
|
||||
const { groups, ...azureConfiguration } = config.endpoints[EModelEndpoint.azureOpenAI];
|
||||
/** @type {TAzureConfigValidationResult} */
|
||||
const { isValid, modelNames, modelGroupMap, groupMap, errors } = validateAzureGroups(groups);
|
||||
|
||||
if (!isValid) {
|
||||
const errorString = errors.join('\n');
|
||||
const errorMessage = 'Invalid Azure OpenAI configuration:\n' + errorString;
|
||||
logger.error(errorMessage);
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
const assistantModels = [];
|
||||
const assistantGroups = new Set();
|
||||
for (const modelName of modelNames) {
|
||||
mapModelToAzureConfig({ modelName, modelGroupMap, groupMap });
|
||||
const groupName = modelGroupMap?.[modelName]?.group;
|
||||
const modelGroup = groupMap?.[groupName];
|
||||
let supportsAssistants = modelGroup?.assistants || modelGroup?.[modelName]?.assistants;
|
||||
if (supportsAssistants) {
|
||||
assistantModels.push(modelName);
|
||||
!assistantGroups.has(groupName) && assistantGroups.add(groupName);
|
||||
}
|
||||
}
|
||||
|
||||
if (azureConfiguration.assistants && assistantModels.length === 0) {
|
||||
throw new Error(
|
||||
'No Azure models are configured to support assistants. Please remove the `assistants` field or configure at least one model to support assistants.',
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
modelNames,
|
||||
modelGroupMap,
|
||||
groupMap,
|
||||
assistantModels,
|
||||
assistantGroups: Array.from(assistantGroups),
|
||||
...azureConfiguration,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { azureConfigSetup };
|
||||
107
api/server/services/start/checks.js
Normal file
107
api/server/services/start/checks.js
Normal file
@@ -0,0 +1,107 @@
|
||||
const {
|
||||
Constants,
|
||||
deprecatedAzureVariables,
|
||||
conflictingAzureVariables,
|
||||
} = require('librechat-data-provider');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const secretDefaults = {
|
||||
CREDS_KEY: 'f34be427ebb29de8d88c107a71546019685ed8b241d8f2ed00c3df97ad2566f0',
|
||||
CREDS_IV: 'e2341419ec3dd3d19b13a1a87fafcbfb',
|
||||
JWT_SECRET: '16f8c0ef4a5d391b26034086c628469d3f9f497f08163ab9b40137092f2909ef',
|
||||
JWT_REFRESH_SECRET: 'eaa5191f2914e30b9387fd84e254e4ba6fc51b4654968a9b0803b456a54b8418',
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks environment variables for default secrets and deprecated variables.
|
||||
* Logs warnings for any default secret values being used and for usage of deprecated `GOOGLE_API_KEY`.
|
||||
* Advises on replacing default secrets and updating deprecated variables.
|
||||
*/
|
||||
function checkVariables() {
|
||||
let hasDefaultSecrets = false;
|
||||
for (const [key, value] of Object.entries(secretDefaults)) {
|
||||
if (process.env[key] === value) {
|
||||
logger.warn(`Default value for ${key} is being used.`);
|
||||
!hasDefaultSecrets && (hasDefaultSecrets = true);
|
||||
}
|
||||
}
|
||||
|
||||
if (hasDefaultSecrets) {
|
||||
logger.info(
|
||||
`Please replace any default secret values.
|
||||
|
||||
For your conveninence, fork & run this replit to generate your own secret values:
|
||||
|
||||
https://replit.com/@daavila/crypto#index.js
|
||||
|
||||
`,
|
||||
);
|
||||
}
|
||||
|
||||
if (process.env.GOOGLE_API_KEY) {
|
||||
logger.warn(
|
||||
'The `GOOGLE_API_KEY` environment variable is deprecated.\nPlease use the `GOOGLE_SEARCH_API_KEY` environment variable instead.',
|
||||
);
|
||||
}
|
||||
|
||||
if (process.env.OPENROUTER_API_KEY) {
|
||||
logger.warn(
|
||||
`The \`OPENROUTER_API_KEY\` environment variable is deprecated and its functionality will be removed soon.
|
||||
Use of this environment variable is highly discouraged as it can lead to unexpected errors when using custom endpoints.
|
||||
Please use the config (\`librechat.yaml\`) file for setting up OpenRouter, and use \`OPENROUTER_KEY\` or another environment variable instead.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the health of auxiliary API's by attempting a fetch request to their respective `/health` endpoints.
|
||||
* Logs information or warning based on the API's availability and response.
|
||||
*/
|
||||
async function checkHealth() {
|
||||
try {
|
||||
const response = await fetch(`${process.env.RAG_API_URL}/health`);
|
||||
if (response?.ok && response?.status === 200) {
|
||||
logger.info(`RAG API is running and reachable at ${process.env.RAG_API_URL}.`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`RAG API is either not running or not reachable at ${process.env.RAG_API_URL}, you may experience errors with file uploads.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for the usage of deprecated and conflicting Azure variables.
|
||||
* Logs warnings for any deprecated or conflicting environment variables found, indicating potential issues with `azureOpenAI` endpoint configuration.
|
||||
*/
|
||||
function checkAzureVariables() {
|
||||
deprecatedAzureVariables.forEach(({ key, description }) => {
|
||||
if (process.env[key]) {
|
||||
logger.warn(
|
||||
`The \`${key}\` environment variable (related to ${description}) should not be used in combination with the \`azureOpenAI\` endpoint configuration, as you will experience conflicts and errors.`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
conflictingAzureVariables.forEach(({ key }) => {
|
||||
if (process.env[key]) {
|
||||
logger.warn(
|
||||
`The \`${key}\` environment variable should not be used in combination with the \`azureOpenAI\` endpoint configuration, as you may experience with the defined placeholders for mapping to the current model grouping using the same name.`,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs basic checks on the loaded config object.
|
||||
* @param {TCustomConfig} config - The loaded custom configuration.
|
||||
*/
|
||||
function checkConfig(config) {
|
||||
if (config.version !== Constants.CONFIG_VERSION) {
|
||||
logger.info(
|
||||
`\nOutdated Config version: ${config.version}. Current version: ${Constants.CONFIG_VERSION}\n\nCheck out the latest config file guide for new options and features.\nhttps://docs.librechat.ai/install/configuration/custom_config.html\n\n`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { checkVariables, checkHealth, checkConfig, checkAzureVariables };
|
||||
@@ -5,6 +5,7 @@ const handleText = require('./handleText');
|
||||
const cryptoUtils = require('./crypto');
|
||||
const citations = require('./citations');
|
||||
const sendEmail = require('./sendEmail');
|
||||
const mongoose = require('./mongoose');
|
||||
const queue = require('./queue');
|
||||
const files = require('./files');
|
||||
const math = require('./math');
|
||||
@@ -14,6 +15,7 @@ module.exports = {
|
||||
...cryptoUtils,
|
||||
...handleText,
|
||||
...citations,
|
||||
...mongoose,
|
||||
countTokens,
|
||||
removePorts,
|
||||
sendEmail,
|
||||
|
||||
25
api/server/utils/mongoose.js
Normal file
25
api/server/utils/mongoose.js
Normal file
@@ -0,0 +1,25 @@
|
||||
const mongoose = require('mongoose');
|
||||
/**
|
||||
* Executes a database operation within a session.
|
||||
* @param {() => Promise<any>} method - The method to execute. This method must accept a session as its first argument.
|
||||
* @param {...any} args - Additional arguments to pass to the method.
|
||||
* @returns {Promise<any>} - The result of the executed method.
|
||||
*/
|
||||
async function withSession(method, ...args) {
|
||||
const session = await mongoose.startSession();
|
||||
session.startTransaction();
|
||||
try {
|
||||
const result = await method(...args, session);
|
||||
await session.commitTransaction();
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (session.inTransaction()) {
|
||||
await session.abortTransaction();
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
await session.endSession();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { withSession };
|
||||
@@ -1,8 +1,7 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const axios = require('axios');
|
||||
const passport = require('passport');
|
||||
const jwtDecode = require('jsonwebtoken/decode');
|
||||
const { Issuer, Strategy: OpenIDStrategy } = require('openid-client');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { logger } = require('~/config');
|
||||
const User = require('~/models/User');
|
||||
|
||||
@@ -12,22 +11,31 @@ try {
|
||||
} catch (err) {
|
||||
logger.error('[openidStrategy] crypto support is disabled!', err);
|
||||
}
|
||||
/**
|
||||
* Downloads an image from a URL using an access token.
|
||||
* @param {string} url
|
||||
* @param {string} accessToken
|
||||
* @returns {Promise<Buffer>}
|
||||
*/
|
||||
const downloadImage = async (url, accessToken) => {
|
||||
if (!url) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const downloadImage = async (url, imagePath, accessToken) => {
|
||||
try {
|
||||
const response = await axios.get(url, {
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
responseType: 'arraybuffer',
|
||||
});
|
||||
|
||||
fs.mkdirSync(path.dirname(imagePath), { recursive: true });
|
||||
fs.writeFileSync(imagePath, response.data);
|
||||
|
||||
const fileName = path.basename(imagePath);
|
||||
|
||||
return `/images/openid/${fileName}`;
|
||||
if (response.ok) {
|
||||
const buffer = await response.buffer();
|
||||
return buffer;
|
||||
} else {
|
||||
throw new Error(`${response.statusText} (HTTP ${response.status})`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[openidStrategy] downloadImage: Error downloading image at URL "${url}": ${error}`,
|
||||
@@ -36,6 +44,26 @@ const downloadImage = async (url, imagePath, accessToken) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts an input into a string suitable for a username.
|
||||
* If the input is a string, it will be returned as is.
|
||||
* If the input is an array, elements will be joined with underscores.
|
||||
* In case of undefined or other falsy values, a default value will be returned.
|
||||
*
|
||||
* @param {string | string[] | undefined} input - The input value to be converted into a username.
|
||||
* @param {string} [defaultValue=''] - The default value to return if the input is falsy.
|
||||
* @returns {string} The processed input as a string suitable for a username.
|
||||
*/
|
||||
function convertToUsername(input, defaultValue = '') {
|
||||
if (typeof input === 'string') {
|
||||
return input;
|
||||
} else if (Array.isArray(input)) {
|
||||
return input.join('_');
|
||||
}
|
||||
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
async function setupOpenId() {
|
||||
try {
|
||||
const issuer = await Issuer.discover(process.env.OPENID_ISSUER);
|
||||
@@ -44,7 +72,9 @@ async function setupOpenId() {
|
||||
client_secret: process.env.OPENID_CLIENT_SECRET,
|
||||
redirect_uris: [process.env.DOMAIN_SERVER + process.env.OPENID_CALLBACK_URL],
|
||||
});
|
||||
|
||||
const requiredRole = process.env.OPENID_REQUIRED_ROLE;
|
||||
const requiredRoleParameterPath = process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH;
|
||||
const requiredRoleTokenKind = process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND;
|
||||
const openidLogin = new OpenIDStrategy(
|
||||
{
|
||||
client,
|
||||
@@ -71,11 +101,45 @@ async function setupOpenId() {
|
||||
fullName = userinfo.username || userinfo.email;
|
||||
}
|
||||
|
||||
if (requiredRole) {
|
||||
let decodedToken = '';
|
||||
if (requiredRoleTokenKind === 'access') {
|
||||
decodedToken = jwtDecode(tokenset.access_token);
|
||||
} else if (requiredRoleTokenKind === 'id') {
|
||||
decodedToken = jwtDecode(tokenset.id_token);
|
||||
}
|
||||
const pathParts = requiredRoleParameterPath.split('.');
|
||||
let found = true;
|
||||
let roles = pathParts.reduce((o, key) => {
|
||||
if (o === null || o === undefined || !(key in o)) {
|
||||
found = false;
|
||||
return [];
|
||||
}
|
||||
return o[key];
|
||||
}, decodedToken);
|
||||
|
||||
if (!found) {
|
||||
console.error(
|
||||
`Key '${requiredRoleParameterPath}' not found in ${requiredRoleTokenKind} token!`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!roles.includes(requiredRole)) {
|
||||
return done(null, false, {
|
||||
message: `You must have the "${requiredRole}" role to log in.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const username = convertToUsername(
|
||||
userinfo.username || userinfo.given_name || userinfo.email,
|
||||
);
|
||||
|
||||
if (!user) {
|
||||
user = new User({
|
||||
provider: 'openid',
|
||||
openidId: userinfo.sub,
|
||||
username: userinfo.username || userinfo.given_name || '',
|
||||
username,
|
||||
email: userinfo.email || '',
|
||||
emailVerified: userinfo.email_verified || false,
|
||||
name: fullName,
|
||||
@@ -83,11 +147,12 @@ async function setupOpenId() {
|
||||
} else {
|
||||
user.provider = 'openid';
|
||||
user.openidId = userinfo.sub;
|
||||
user.username = userinfo.username || userinfo.given_name || '';
|
||||
user.username = username;
|
||||
user.name = fullName;
|
||||
}
|
||||
|
||||
if (userinfo.picture) {
|
||||
/** @type {string | undefined} */
|
||||
const imageUrl = userinfo.picture;
|
||||
|
||||
let fileName;
|
||||
@@ -99,24 +164,18 @@ async function setupOpenId() {
|
||||
fileName = userinfo.sub + '.png';
|
||||
}
|
||||
|
||||
const imagePath = path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'client',
|
||||
'public',
|
||||
'images',
|
||||
'openid',
|
||||
fileName,
|
||||
);
|
||||
|
||||
const imagePathOrEmpty = await downloadImage(
|
||||
imageUrl,
|
||||
imagePath,
|
||||
tokenset.access_token,
|
||||
);
|
||||
|
||||
user.avatar = imagePathOrEmpty;
|
||||
const imageBuffer = await downloadImage(imageUrl, tokenset.access_token);
|
||||
const { saveBuffer } = getStrategyFunctions(process.env.CDN_PROVIDER);
|
||||
if (imageBuffer) {
|
||||
const imagePath = await saveBuffer({
|
||||
fileName,
|
||||
userId: user._id.toString(),
|
||||
buffer: imageBuffer,
|
||||
});
|
||||
user.avatar = imagePath ?? '';
|
||||
} else {
|
||||
user.avatar = '';
|
||||
}
|
||||
} else {
|
||||
user.avatar = '';
|
||||
}
|
||||
|
||||
@@ -25,12 +25,12 @@ const handleExistingUser = async (oldUser, avatarUrl) => {
|
||||
await oldUser.save();
|
||||
} else if (!isLocal && (oldUser.avatar === null || !oldUser.avatar.includes('?manual=true'))) {
|
||||
const userId = oldUser._id;
|
||||
const webPBuffer = await resizeAvatar({
|
||||
const resizedBuffer = await resizeAvatar({
|
||||
userId,
|
||||
input: avatarUrl,
|
||||
});
|
||||
const { processAvatar } = getStrategyFunctions(fileStrategy);
|
||||
oldUser.avatar = await processAvatar({ buffer: webPBuffer, userId });
|
||||
oldUser.avatar = await processAvatar({ buffer: resizedBuffer, userId });
|
||||
await oldUser.save();
|
||||
}
|
||||
};
|
||||
@@ -83,12 +83,12 @@ const createNewUser = async ({
|
||||
|
||||
if (!isLocal) {
|
||||
const userId = newUser._id;
|
||||
const webPBuffer = await resizeAvatar({
|
||||
const resizedBuffer = await resizeAvatar({
|
||||
userId,
|
||||
input: avatarUrl,
|
||||
});
|
||||
const { processAvatar } = getStrategyFunctions(fileStrategy);
|
||||
newUser.avatar = await processAvatar({ buffer: webPBuffer, userId });
|
||||
newUser.avatar = await processAvatar({ buffer: resizedBuffer, userId });
|
||||
await newUser.save();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Test database. You can use your actual MONGO_URI if you don't mind it potentially including test data.
|
||||
# Test DB URI. You can use your actual MONGO_URI if you don't mind it potentially including test data.
|
||||
MONGO_URI=mongodb://127.0.0.1:27017/chatgpt-jest
|
||||
|
||||
# Credential encryption/decryption for testing
|
||||
|
||||
@@ -14,6 +14,12 @@
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports GenerativeModel
|
||||
* @typedef {import('@google/generative-ai').GenerativeModel} GenerativeModel
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports AssistantStreamEvent
|
||||
* @typedef {import('openai').default.Beta.AssistantStreamEvent} AssistantStreamEvent
|
||||
@@ -44,6 +50,30 @@
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports ChatCompletionPayload
|
||||
* @typedef {import('openai').OpenAI.ChatCompletionCreateParams} ChatCompletionPayload
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports ChatCompletionMessages
|
||||
* @typedef {import('openai').OpenAI.ChatCompletionMessageParam} ChatCompletionMessages
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports CohereChatStreamRequest
|
||||
* @typedef {import('cohere-ai').Cohere.ChatStreamRequest} CohereChatStreamRequest
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports CohereChatRequest
|
||||
* @typedef {import('cohere-ai').Cohere.ChatRequest} CohereChatRequest
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports OpenAIRequestOptions
|
||||
* @typedef {import('openai').OpenAI.RequestOptions} OpenAIRequestOptions
|
||||
@@ -271,6 +301,18 @@
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TAzureConfigValidationResult
|
||||
* @typedef {import('librechat-data-provider').TAzureConfigValidationResult} TAzureConfigValidationResult
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports EImageOutputType
|
||||
* @typedef {import('librechat-data-provider').EImageOutputType} EImageOutputType
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TCustomConfig
|
||||
* @typedef {import('librechat-data-provider').TCustomConfig} TCustomConfig
|
||||
@@ -1062,3 +1104,44 @@
|
||||
* @method handleMessageEvent Handles events related to messages within the run.
|
||||
* @method messageCompleted Handles the completion of a message processing.
|
||||
*/
|
||||
|
||||
/* Native app/client methods */
|
||||
|
||||
/**
|
||||
* Accumulates tokens and sends them to the client for processing.
|
||||
* @callback onTokenProgress
|
||||
* @param {string} token - The current token generated by the model.
|
||||
* @returns {Promise<void>}
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* Main entrypoint for API completion calls
|
||||
* @callback sendCompletion
|
||||
* @param {Array<ChatCompletionMessages> | string} payload - The messages or prompt to send to the model
|
||||
* @param {object} opts - Options for the completion
|
||||
* @param {onTokenProgress} opts.onProgress - Callback function to handle token progress
|
||||
* @param {AbortController} opts.abortController - AbortController instance
|
||||
* @returns {Promise<string>}
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* Legacy completion handler for OpenAI API.
|
||||
* @callback getCompletion
|
||||
* @param {Array<ChatCompletionMessages> | string} input - Array of messages or a single prompt string
|
||||
* @param {(event: object | string) => Promise<void>} onProgress - SSE progress handler
|
||||
* @param {onTokenProgress} onTokenProgress - Token progress handler
|
||||
* @param {AbortController} [abortController] - AbortController instance
|
||||
* @returns {Promise<Object | string>} - Completion response
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* Cohere Stream handling. Note: abortController is not supported here.
|
||||
* @callback cohereChatCompletion
|
||||
* @param {object} params
|
||||
* @param {CohereChatStreamRequest | CohereChatRequest} params.payload
|
||||
* @param {onTokenProgress} params.onTokenProgress
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
const { CohereConstants } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* Extracts a valid OpenAI baseURL from a given string, matching "url/v1," followed by an optional suffix.
|
||||
* The suffix can be one of several predefined values (e.g., 'openai', 'azure-openai', etc.),
|
||||
@@ -19,6 +21,10 @@ function extractBaseURL(url) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (url.startsWith(CohereConstants.API_URL)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!url.includes('/v1')) {
|
||||
return url;
|
||||
}
|
||||
|
||||
@@ -49,7 +49,8 @@ const openAIModels = {
|
||||
'gpt-4-1106': 127990, // -10 from max
|
||||
'gpt-4-0125': 127990, // -10 from max
|
||||
'gpt-4-turbo': 127990, // -10 from max
|
||||
'gpt-3.5-turbo': 4092, // -5 from max
|
||||
'gpt-4-vision': 127990, // -10 from max
|
||||
'gpt-3.5-turbo': 16375, // -10 from max
|
||||
'gpt-3.5-turbo-0613': 4092, // -5 from max
|
||||
'gpt-3.5-turbo-0301': 4092, // -5 from max
|
||||
'gpt-3.5-turbo-16k': 16375, // -10 from max
|
||||
@@ -59,9 +60,20 @@ const openAIModels = {
|
||||
'mistral-': 31990, // -10 from max
|
||||
};
|
||||
|
||||
const cohereModels = {
|
||||
'command-light': 4086, // -10 from max
|
||||
'command-light-nightly': 8182, // -10 from max
|
||||
command: 4086, // -10 from max
|
||||
'command-nightly': 8182, // -10 from max
|
||||
'command-r': 127500, // -500 from max
|
||||
'command-r-plus': 127500, // -500 from max
|
||||
};
|
||||
|
||||
const googleModels = {
|
||||
/* Max I/O is combined so we subtract the amount from max response tokens for actual total */
|
||||
gemini: 32750, // -10 from max
|
||||
gemini: 30720, // -2048 from max
|
||||
'gemini-pro-vision': 12288, // -4096 from max
|
||||
'gemini-1.5': 1048576, // -8192 from max
|
||||
'text-bison-32k': 32758, // -10 from max
|
||||
'chat-bison-32k': 32758, // -10 from max
|
||||
'code-bison-32k': 32758, // -10 from max
|
||||
@@ -83,11 +95,13 @@ const anthropicModels = {
|
||||
'claude-3-opus': 200000,
|
||||
};
|
||||
|
||||
const aggregateModels = { ...openAIModels, ...googleModels, ...anthropicModels, ...cohereModels };
|
||||
|
||||
// Order is important here: by model series and context size (gpt-4 then gpt-3, ascending)
|
||||
const maxTokensMap = {
|
||||
[EModelEndpoint.azureOpenAI]: openAIModels,
|
||||
[EModelEndpoint.openAI]: { ...openAIModels, ...googleModels, ...anthropicModels },
|
||||
[EModelEndpoint.custom]: { ...openAIModels, ...googleModels, ...anthropicModels },
|
||||
[EModelEndpoint.openAI]: aggregateModels,
|
||||
[EModelEndpoint.custom]: aggregateModels,
|
||||
[EModelEndpoint.google]: googleModels,
|
||||
[EModelEndpoint.anthropic]: anthropicModels,
|
||||
};
|
||||
@@ -204,6 +218,12 @@ function processModelData(input) {
|
||||
|
||||
for (const model of data) {
|
||||
const modelKey = model.id;
|
||||
if (modelKey === 'openrouter/auto') {
|
||||
model.pricing = {
|
||||
prompt: '0.00001',
|
||||
completion: '0.00003',
|
||||
};
|
||||
}
|
||||
const prompt = parseFloat(model.pricing.prompt) * 1000000;
|
||||
const completion = parseFloat(model.pricing.completion) * 1000000;
|
||||
|
||||
|
||||
@@ -59,6 +59,12 @@ describe('getModelMaxTokens', () => {
|
||||
expect(getModelMaxTokens('gpt-4-1106')).toBe(maxTokensMap[EModelEndpoint.openAI]['gpt-4-1106']);
|
||||
});
|
||||
|
||||
test('should return correct tokens for gpt-4-vision exact match', () => {
|
||||
expect(getModelMaxTokens('gpt-4-vision')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['gpt-4-vision'],
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct tokens for gpt-3.5-turbo-1106 partial match', () => {
|
||||
expect(getModelMaxTokens('something-/gpt-3.5-turbo-1106')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['gpt-3.5-turbo-1106'],
|
||||
@@ -131,6 +137,18 @@ describe('getModelMaxTokens', () => {
|
||||
});
|
||||
|
||||
test('should return correct tokens for partial match - Google models', () => {
|
||||
expect(getModelMaxTokens('gemini-1.5-pro-latest', EModelEndpoint.google)).toBe(
|
||||
maxTokensMap[EModelEndpoint.google]['gemini-1.5'],
|
||||
);
|
||||
expect(getModelMaxTokens('gemini-1.5-pro-preview-0409', EModelEndpoint.google)).toBe(
|
||||
maxTokensMap[EModelEndpoint.google]['gemini-1.5'],
|
||||
);
|
||||
expect(getModelMaxTokens('gemini-pro-vision', EModelEndpoint.google)).toBe(
|
||||
maxTokensMap[EModelEndpoint.google]['gemini-pro-vision'],
|
||||
);
|
||||
expect(getModelMaxTokens('gemini-1.0', EModelEndpoint.google)).toBe(
|
||||
maxTokensMap[EModelEndpoint.google]['gemini'],
|
||||
);
|
||||
expect(getModelMaxTokens('gemini-pro', EModelEndpoint.google)).toBe(
|
||||
maxTokensMap[EModelEndpoint.google]['gemini'],
|
||||
);
|
||||
@@ -142,6 +160,15 @@ describe('getModelMaxTokens', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct tokens for partial match - Cohere models', () => {
|
||||
expect(getModelMaxTokens('command', EModelEndpoint.custom)).toBe(
|
||||
maxTokensMap[EModelEndpoint.custom]['command'],
|
||||
);
|
||||
expect(getModelMaxTokens('command-r-plus', EModelEndpoint.custom)).toBe(
|
||||
maxTokensMap[EModelEndpoint.custom]['command-r-plus'],
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct tokens when using a custom endpointTokenConfig', () => {
|
||||
const customTokenConfig = {
|
||||
'custom-model': 12345,
|
||||
|
||||
51
client/check_updates.sh
Executable file
51
client/check_updates.sh
Executable file
@@ -0,0 +1,51 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Set the directory containing the package.json file
|
||||
dir=${1:-.}
|
||||
|
||||
# Today's date and the date 3 days ago in seconds since the Unix epoch
|
||||
today=$(date +%s)
|
||||
three_days_ago=$(date -d "3 days ago" +%s)
|
||||
|
||||
# Read dependencies and devDependencies from package.json
|
||||
dependencies=$(jq -r '.dependencies,.devDependencies|keys[]' "$dir/package.json")
|
||||
packages=($dependencies) # Convert JSON array to bash array
|
||||
|
||||
# Array to hold update messages
|
||||
declare -a updates
|
||||
|
||||
# Loop over each package
|
||||
for pkg in "${packages[@]}"
|
||||
do
|
||||
echo "Checking $pkg..."
|
||||
# Retrieve the version time information as JSON
|
||||
times=$(npm view "$pkg" time --json)
|
||||
|
||||
# Loop through dates from the JSON object and check if any are within the last 3 days
|
||||
echo $times | jq -r '. | to_entries[] | select(.key as $k | $k|test("^[0-9]")) | [.key, .value] | @csv' | while IFS="," read -r version date
|
||||
do
|
||||
# Format the date to remove quotes and trim it
|
||||
date=$(echo $date | tr -d '"' | xargs)
|
||||
# Convert date to seconds since the Unix epoch
|
||||
version_date=$(date -d "$date" +%s)
|
||||
|
||||
# Check if this date is within the last three days
|
||||
if (( version_date > three_days_ago && version_date <= today ))
|
||||
then
|
||||
# Convert UTC to Eastern Time (ET), ensuring compatibility
|
||||
et_date=$(date -u -d "$date" +"%Y-%m-%d %H:%M:%S UTC")
|
||||
et_date=$(date -d "$et_date -4 hours" +"%Y-%m-%d %H:%M:%S ET")
|
||||
update_message="Version $version of $pkg was released on $et_date"
|
||||
echo "$update_message"
|
||||
updates+=("$update_message")
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
# Display all collected updates
|
||||
if [ ${#updates[@]} -eq 0 ]; then
|
||||
echo "No recent updates found within the last three days."
|
||||
else
|
||||
echo "Recent updates within the last three days:"
|
||||
printf "%s\n" "${updates[@]}"
|
||||
fi
|
||||
@@ -5,6 +5,7 @@
|
||||
<meta name="theme-color" content="#171717">
|
||||
<meta name="mobile-web-app-capable" content="yes">
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
<meta name="apple-mobile-web-app-status-bar-style" content="black-translucent">
|
||||
<title>LibreChat</title>
|
||||
<link
|
||||
rel="shortcut icon"
|
||||
|
||||
@@ -14,12 +14,12 @@ server {
|
||||
# The default limits for image uploads as of 11/22/23 is 20MB/file, and 25MB/request
|
||||
client_max_body_size 25M;
|
||||
|
||||
location /api {
|
||||
proxy_pass http://api:3080/api;
|
||||
location /api/ {
|
||||
proxy_pass http://api:3080$request_uri;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_pass http://api:3080;
|
||||
proxy_pass http://api:3080/;
|
||||
}
|
||||
|
||||
######################################## SSL ########################################
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
{
|
||||
"name": "@librechat/frontend",
|
||||
"version": "0.7.0",
|
||||
"version": "0.7.1",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"data-provider": "cd .. && npm run build:data-provider",
|
||||
"build:file": "cross-env NODE_ENV=production vite build --debug > vite-output.log 2>&1",
|
||||
"build": "cross-env NODE_ENV=production vite build",
|
||||
"build:ci": "cross-env NODE_ENV=development vite build --mode ci",
|
||||
"dev": "cross-env NODE_ENV=development vite",
|
||||
@@ -27,6 +28,7 @@
|
||||
},
|
||||
"homepage": "https://librechat.ai",
|
||||
"dependencies": {
|
||||
"@ariakit/react": "^0.4.5",
|
||||
"@dicebear/collection": "^7.0.4",
|
||||
"@dicebear/core": "^7.0.4",
|
||||
"@headlessui/react": "^1.7.13",
|
||||
@@ -65,6 +67,7 @@
|
||||
"librechat-data-provider": "*",
|
||||
"lodash": "^4.17.21",
|
||||
"lucide-react": "^0.220.0",
|
||||
"match-sorter": "^6.3.4",
|
||||
"rc-input-number": "^7.4.2",
|
||||
"react": "^18.2.0",
|
||||
"react-dnd": "^16.0.1",
|
||||
@@ -125,7 +128,7 @@
|
||||
"ts-jest": "^29.1.0",
|
||||
"typescript": "^5.0.4",
|
||||
"vite": "^5.1.1",
|
||||
"vite-plugin-html": "^3.2.0",
|
||||
"vite-plugin-node-polyfills": "^0.17.0"
|
||||
"vite-plugin-node-polyfills": "^0.17.0",
|
||||
"vite-plugin-pwa": "^0.19.8"
|
||||
}
|
||||
}
|
||||
|
||||
BIN
client/public/assets/cohere.png
Normal file
BIN
client/public/assets/cohere.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 26 KiB |
BIN
client/public/assets/maskable-icon.png
Normal file
BIN
client/public/assets/maskable-icon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 138 KiB |
@@ -2,6 +2,7 @@ import { FileSources } from 'librechat-data-provider';
|
||||
import type { ColumnDef } from '@tanstack/react-table';
|
||||
import type { SetterOrUpdater } from 'recoil';
|
||||
import type {
|
||||
TSetOption as SetOption,
|
||||
TConversation,
|
||||
TMessage,
|
||||
TPreset,
|
||||
@@ -20,6 +21,17 @@ export type GenericSetter<T> = (value: T | ((currentValue: T) => T)) => void;
|
||||
|
||||
export type LastSelectedModels = Record<EModelEndpoint, string>;
|
||||
|
||||
export type LocalizeFunction = (phraseKey: string, ...values: string[]) => string;
|
||||
|
||||
export const mainTextareaId = 'prompt-textarea';
|
||||
|
||||
export enum IconContext {
|
||||
landing = 'landing',
|
||||
menuItem = 'menu-item',
|
||||
nav = 'nav',
|
||||
message = 'message',
|
||||
}
|
||||
|
||||
export type NavLink = {
|
||||
title: string;
|
||||
label?: string;
|
||||
@@ -82,15 +94,16 @@ export type AssistantPanelProps = {
|
||||
|
||||
export type AugmentedColumnDef<TData, TValue> = ColumnDef<TData, TValue> & ColumnMeta;
|
||||
|
||||
export type TSetOption = (
|
||||
param: number | string,
|
||||
) => (newValue: number | string | boolean | Partial<TPreset>) => void;
|
||||
export type TSetOption = SetOption;
|
||||
|
||||
export type TSetExample = (
|
||||
i: number,
|
||||
type: string,
|
||||
newValue: number | string | boolean | null,
|
||||
) => void;
|
||||
|
||||
export const defaultDebouncedDelay = 450;
|
||||
|
||||
export enum ESide {
|
||||
Top = 'top',
|
||||
Right = 'right',
|
||||
@@ -174,6 +187,7 @@ export type TAskProps = {
|
||||
export type TOptions = {
|
||||
editedMessageId?: string | null;
|
||||
editedText?: string | null;
|
||||
resubmitFiles?: boolean;
|
||||
isRegenerate?: boolean;
|
||||
isContinued?: boolean;
|
||||
isEdited?: boolean;
|
||||
@@ -296,6 +310,8 @@ export type Option = Record<string, unknown> & {
|
||||
value: string | number | null;
|
||||
};
|
||||
|
||||
export type OptionWithIcon = Option & { icon?: React.ReactNode };
|
||||
|
||||
export type TOptionSettings = {
|
||||
showExamples?: boolean;
|
||||
isCodeChat?: boolean;
|
||||
@@ -319,3 +335,9 @@ export interface ExtendedFile {
|
||||
}
|
||||
|
||||
export type ContextType = { navVisible: boolean; setNavVisible: (visible: boolean) => void };
|
||||
|
||||
export interface SwitcherProps {
|
||||
endpoint?: EModelEndpoint | null;
|
||||
endpointKeyProvided: boolean;
|
||||
isCollapsed: boolean;
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import { TextareaAutosize } from '~/components/ui';
|
||||
import { useGetFileConfig } from '~/data-provider';
|
||||
import { cn, removeFocusOutlines } from '~/utils';
|
||||
import AttachFile from './Files/AttachFile';
|
||||
import { mainTextareaId } from '~/common';
|
||||
import StopButton from './StopButton';
|
||||
import SendButton from './SendButton';
|
||||
import FileRow from './Files/FileRow';
|
||||
@@ -28,14 +29,11 @@ const ChatForm = ({ index = 0 }) => {
|
||||
defaultValues: { text: '' },
|
||||
});
|
||||
|
||||
const { handlePaste, handleKeyUp, handleKeyDown, handleCompositionStart, handleCompositionEnd } =
|
||||
useTextarea({
|
||||
textAreaRef,
|
||||
submitButtonRef,
|
||||
disabled: !!requiresKey,
|
||||
setValue: methods.setValue,
|
||||
getValues: methods.getValues,
|
||||
});
|
||||
const { handlePaste, handleKeyDown, handleCompositionStart, handleCompositionEnd } = useTextarea({
|
||||
textAreaRef,
|
||||
submitButtonRef,
|
||||
disabled: !!requiresKey,
|
||||
});
|
||||
|
||||
const {
|
||||
ask,
|
||||
@@ -57,9 +55,6 @@ const ChatForm = ({ index = 0 }) => {
|
||||
}
|
||||
ask({ text: data.text });
|
||||
methods.reset();
|
||||
if (textAreaRef.current) {
|
||||
textAreaRef.current.value = '';
|
||||
}
|
||||
},
|
||||
[ask, methods],
|
||||
);
|
||||
@@ -83,6 +78,13 @@ const ChatForm = ({ index = 0 }) => {
|
||||
[requiresKey, invalidAssistant],
|
||||
);
|
||||
|
||||
const { ref, ...registerProps } = methods.register('text', {
|
||||
required: true,
|
||||
onChange: (e) => {
|
||||
methods.setValue('text', e.target.value);
|
||||
},
|
||||
});
|
||||
|
||||
return (
|
||||
<form
|
||||
onSubmit={methods.handleSubmit((data) => submitMessage(data))}
|
||||
@@ -103,23 +105,18 @@ const ChatForm = ({ index = 0 }) => {
|
||||
/>
|
||||
{endpoint && (
|
||||
<TextareaAutosize
|
||||
{...methods.register('text', {
|
||||
required: true,
|
||||
onChange: (e) => {
|
||||
methods.setValue('text', e.target.value);
|
||||
},
|
||||
})}
|
||||
{...registerProps}
|
||||
autoFocus
|
||||
ref={(e) => {
|
||||
ref(e);
|
||||
textAreaRef.current = e;
|
||||
}}
|
||||
disabled={disableInputs}
|
||||
onPaste={handlePaste}
|
||||
onKeyUp={handleKeyUp}
|
||||
onKeyDown={handleKeyDown}
|
||||
onCompositionStart={handleCompositionStart}
|
||||
onCompositionEnd={handleCompositionEnd}
|
||||
id="prompt-textarea"
|
||||
id={mainTextareaId}
|
||||
tabIndex={0}
|
||||
data-testid="text-input"
|
||||
style={{ height: 44, overflowY: 'auto' }}
|
||||
|
||||
@@ -17,6 +17,7 @@ export const files: TFile[] = [
|
||||
updatedAt: '2024-01-23T18:25:48.153Z',
|
||||
usage: 0,
|
||||
user: '652ac880c4102a77fe54c5db',
|
||||
embedded: false,
|
||||
},
|
||||
{
|
||||
_id: '65b004abd70ce86b9146e861',
|
||||
@@ -34,6 +35,7 @@ export const files: TFile[] = [
|
||||
usage: 0,
|
||||
user: '652ac880c4102a77fe54c5db',
|
||||
width: 1024,
|
||||
embedded: false,
|
||||
},
|
||||
{
|
||||
_id: '65b00495d70ce86b9146adc1',
|
||||
@@ -51,6 +53,7 @@ export const files: TFile[] = [
|
||||
usage: 0,
|
||||
user: '652ac880c4102a77fe54c5db',
|
||||
width: 1024,
|
||||
embedded: false,
|
||||
},
|
||||
{
|
||||
_id: '65b00494d70ce86b9146ace6',
|
||||
@@ -68,5 +71,6 @@ export const files: TFile[] = [
|
||||
usage: 0,
|
||||
user: '652ac880c4102a77fe54c5db',
|
||||
width: 1024,
|
||||
embedded: false,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1,5 +1,44 @@
|
||||
import { EModelEndpoint, KnownEndpoints } from 'librechat-data-provider';
|
||||
import { CustomMinimalIcon } from '~/components/svg';
|
||||
import { IconContext } from '~/common';
|
||||
|
||||
const knownEndpointAssets = {
|
||||
[KnownEndpoints.mistral]: '/assets/mistral.png',
|
||||
[KnownEndpoints.openrouter]: '/assets/openrouter.png',
|
||||
[KnownEndpoints.groq]: '/assets/groq.png',
|
||||
[KnownEndpoints.shuttleai]: '/assets/shuttleai.png',
|
||||
[KnownEndpoints.anyscale]: '/assets/anyscale.png',
|
||||
[KnownEndpoints.fireworks]: '/assets/fireworks.png',
|
||||
[KnownEndpoints.ollama]: '/assets/ollama.png',
|
||||
[KnownEndpoints.perplexity]: '/assets/perplexity.png',
|
||||
[KnownEndpoints['together.ai']]: '/assets/together.png',
|
||||
[KnownEndpoints.cohere]: '/assets/cohere.png',
|
||||
};
|
||||
|
||||
const knownEndpointClasses = {
|
||||
[KnownEndpoints.cohere]: {
|
||||
[IconContext.landing]: 'p-2',
|
||||
},
|
||||
};
|
||||
|
||||
const getKnownClass = ({
|
||||
currentEndpoint,
|
||||
context = '',
|
||||
className,
|
||||
}: {
|
||||
currentEndpoint: string;
|
||||
context?: string;
|
||||
className: string;
|
||||
}) => {
|
||||
if (currentEndpoint === KnownEndpoints.openrouter) {
|
||||
return className;
|
||||
}
|
||||
|
||||
const match = knownEndpointClasses[currentEndpoint]?.[context];
|
||||
const defaultClass = context === IconContext.landing ? '' : className;
|
||||
|
||||
return match ?? defaultClass;
|
||||
};
|
||||
|
||||
export default function UnknownIcon({
|
||||
className = '',
|
||||
@@ -20,73 +59,23 @@ export default function UnknownIcon({
|
||||
|
||||
if (iconURL) {
|
||||
return <img className={className} src={iconURL} alt={`${endpoint} Icon`} />;
|
||||
} else if (currentEndpoint === KnownEndpoints.mistral) {
|
||||
return (
|
||||
<img
|
||||
className={context === 'landing' ? '' : className}
|
||||
src="/assets/mistral.png"
|
||||
alt="Mistral AI Icon"
|
||||
/>
|
||||
);
|
||||
} else if (currentEndpoint === KnownEndpoints.openrouter) {
|
||||
return <img className={className} src="/assets/openrouter.png" alt="OpenRouter Icon" />;
|
||||
} else if (currentEndpoint === KnownEndpoints.groq) {
|
||||
return (
|
||||
<img
|
||||
className={context === 'landing' ? '' : className}
|
||||
src="/assets/groq.png"
|
||||
alt="Groq Cloud Icon"
|
||||
/>
|
||||
);
|
||||
} else if (currentEndpoint === KnownEndpoints.shuttleai) {
|
||||
return (
|
||||
<img
|
||||
className={context === 'landing' ? '' : className}
|
||||
src="/assets/shuttleai.png"
|
||||
alt="ShuttleAI Icon"
|
||||
/>
|
||||
);
|
||||
} else if (currentEndpoint === KnownEndpoints.anyscale) {
|
||||
return (
|
||||
<img
|
||||
className={context === 'landing' ? '' : className}
|
||||
src="/assets/anyscale.png"
|
||||
alt="Anyscale Icon"
|
||||
/>
|
||||
);
|
||||
} else if (currentEndpoint === KnownEndpoints.fireworks) {
|
||||
return (
|
||||
<img
|
||||
className={context === 'landing' ? '' : className}
|
||||
src="/assets/fireworks.png"
|
||||
alt="Fireworks Icon"
|
||||
/>
|
||||
);
|
||||
} else if (currentEndpoint === KnownEndpoints.ollama) {
|
||||
return (
|
||||
<img
|
||||
className={context === 'landing' ? '' : className}
|
||||
src="/assets/ollama.png"
|
||||
alt="Ollama Icon"
|
||||
/>
|
||||
);
|
||||
} else if (currentEndpoint === KnownEndpoints.perplexity) {
|
||||
return (
|
||||
<img
|
||||
className={context === 'landing' ? '' : className}
|
||||
src="/assets/perplexity.png"
|
||||
alt="Perplexity Icon"
|
||||
/>
|
||||
);
|
||||
} else if (currentEndpoint === KnownEndpoints['together.ai']) {
|
||||
return (
|
||||
<img
|
||||
className={context === 'landing' ? '' : className}
|
||||
src="/assets/together.png"
|
||||
alt="together.ai Icon"
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return <CustomMinimalIcon className={className} />;
|
||||
const assetPath = knownEndpointAssets[currentEndpoint];
|
||||
|
||||
if (!assetPath) {
|
||||
return <CustomMinimalIcon className={className} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<img
|
||||
className={getKnownClass({
|
||||
currentEndpoint,
|
||||
context: context,
|
||||
className,
|
||||
})}
|
||||
src={assetPath}
|
||||
alt={`${currentEndpoint} Icon`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { Trash2 } from 'lucide-react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { Close } from '@radix-ui/react-popover';
|
||||
import { Flipper, Flipped } from 'react-flip-toolkit';
|
||||
@@ -13,6 +12,7 @@ import { Dialog, DialogTrigger, Label } from '~/components/ui/';
|
||||
import { MenuSeparator, MenuItem } from '../UI';
|
||||
import { icons } from '../Endpoints/Icons';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { cn } from '~/utils';
|
||||
import store from '~/store';
|
||||
|
||||
const PresetItems: FC<{
|
||||
@@ -143,7 +143,12 @@ const PresetItems: FC<{
|
||||
>
|
||||
<div className="flex h-full items-center justify-end gap-1">
|
||||
<button
|
||||
className="m-0 h-full rounded-md p-2 text-gray-400 hover:text-gray-700 dark:bg-gray-600 dark:text-gray-400 dark:hover:text-gray-200 sm:invisible sm:group-hover:visible"
|
||||
className={cn(
|
||||
'm-0 h-full rounded-md bg-transparent p-2 text-gray-400 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200',
|
||||
defaultPreset?.presetId === preset.presetId
|
||||
? ''
|
||||
: 'sm:invisible sm:group-hover:visible',
|
||||
)}
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
@@ -85,14 +85,14 @@ const CodeInProgress = ({
|
||||
style={{ opacity: 1, transform: 'none' }}
|
||||
data-projection-id="77"
|
||||
>
|
||||
<div>
|
||||
<div className='absolute right-[1.5px] bottom-[1.5px]'>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlnsXlink="http://www.w3.org/1999/xlink"
|
||||
viewBox="0 0 20 20"
|
||||
width="20"
|
||||
height="20"
|
||||
style={{ width: '100%', height: '100%', transform: 'translate3d(0px, 0px, 0px)' }}
|
||||
style={{ transform: 'translate3d(0px, 0px, 0px)' }}
|
||||
preserveAspectRatio="xMidYMid meet"
|
||||
>
|
||||
<defs>
|
||||
@@ -101,7 +101,10 @@ const CodeInProgress = ({
|
||||
</clipPath>
|
||||
</defs>
|
||||
<g clipPath="url(#__lottie_element_11)">
|
||||
<g style={{ display: 'block', transform: 'matrix(1,0,0,1,-2,-2)', opacity: 1 }}>
|
||||
<g
|
||||
style={{ display: 'block', transform: 'matrix(1,0,0,1,-2,-2)', opacity: 1 }}
|
||||
className="slide-from-left"
|
||||
>
|
||||
<g opacity="1" transform="matrix(1,0,0,1,7.026679992675781,8.834091186523438)">
|
||||
<path
|
||||
fill="rgb(177,98,253)"
|
||||
@@ -119,7 +122,10 @@ const CodeInProgress = ({
|
||||
/>
|
||||
</g>
|
||||
</g>
|
||||
<g style={{ display: 'block', transform: 'matrix(1,0,0,1,-2,-2)', opacity: 1 }}>
|
||||
<g
|
||||
style={{ display: 'block', transform: 'matrix(1,0,0,1,-2,-2)', opacity: 1 }}
|
||||
className="slide-to-down"
|
||||
>
|
||||
<g opacity="1" transform="matrix(1,0,0,1,11.79640007019043,13.512199401855469)">
|
||||
<path
|
||||
fill="rgb(177,98,253)"
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
// Container Component
|
||||
const Container = ({ children }: { children: React.ReactNode }) => (
|
||||
import { TMessage } from 'librechat-data-provider';
|
||||
import Files from './Files';
|
||||
|
||||
const Container = ({ children, message }: { children: React.ReactNode; message: TMessage }) => (
|
||||
<div className="text-message flex min-h-[20px] flex-col items-start gap-3 overflow-x-auto [.text-message+&]:mt-5">
|
||||
{message.isCreatedByUser && <Files message={message} />}
|
||||
{children}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -38,7 +38,7 @@ any) => {
|
||||
{!isSubmitting && unfinished && (
|
||||
<Suspense>
|
||||
<DelayedRender delay={250}>
|
||||
<UnfinishedMessage key={`unfinished-${messageId}`} />
|
||||
<UnfinishedMessage message={message} key={`unfinished-${messageId}`} />
|
||||
</DelayedRender>
|
||||
</Suspense>
|
||||
)}
|
||||
|
||||
@@ -3,10 +3,10 @@ import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { useState, useRef, useEffect, useCallback } from 'react';
|
||||
import { useUpdateMessageMutation } from 'librechat-data-provider/react-query';
|
||||
import type { TEditProps } from '~/common';
|
||||
import Container from '~/components/Messages/Content/Container';
|
||||
import { cn, removeFocusOutlines } from '~/utils';
|
||||
import { useChatContext } from '~/Providers';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import Container from './Container';
|
||||
|
||||
const EditMessage = ({
|
||||
text,
|
||||
@@ -39,11 +39,16 @@ const EditMessage = ({
|
||||
|
||||
const resubmitMessage = () => {
|
||||
if (message.isCreatedByUser) {
|
||||
ask({
|
||||
text: editedText,
|
||||
parentMessageId,
|
||||
conversationId,
|
||||
});
|
||||
ask(
|
||||
{
|
||||
text: editedText,
|
||||
parentMessageId,
|
||||
conversationId,
|
||||
},
|
||||
{
|
||||
resubmitFiles: true,
|
||||
},
|
||||
);
|
||||
|
||||
setSiblingIdx((siblingIdx ?? 0) - 1);
|
||||
} else {
|
||||
@@ -105,7 +110,7 @@ const EditMessage = ({
|
||||
);
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<Container message={message}>
|
||||
<TextareaAutosize
|
||||
ref={textAreaRef}
|
||||
onChange={(e) => {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user