Compare commits
19 Commits
v0.7.7
...
chore/pack
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c2cdc869b7 | ||
|
|
2fa8d40d11 | ||
|
|
b46c0ed43f | ||
|
|
68c13ec610 | ||
|
|
5e6b8f979c | ||
|
|
c1032fe819 | ||
|
|
27df7fa7c1 | ||
|
|
5f131c0132 | ||
|
|
85d044b7cd | ||
|
|
ac58be68e7 | ||
|
|
1e067150ac | ||
|
|
4c598e4b16 | ||
|
|
b65c8ef9e2 | ||
|
|
0261b253e1 | ||
|
|
2e205b9186 | ||
|
|
4b36bd088e | ||
|
|
67c50ff11f | ||
|
|
fcb1cf2eca | ||
|
|
1741225f48 |
@@ -175,7 +175,7 @@ GOOGLE_KEY=user_provided
|
||||
#============#
|
||||
|
||||
OPENAI_API_KEY=user_provided
|
||||
# OPENAI_MODELS=o1,o1-mini,o1-preview,gpt-4o,gpt-4.5-preview,chatgpt-4o-latest,gpt-4o-mini,gpt-3.5-turbo-0125,gpt-3.5-turbo-0301,gpt-3.5-turbo,gpt-4,gpt-4-0613,gpt-4-vision-preview,gpt-3.5-turbo-0613,gpt-3.5-turbo-16k-0613,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-instruct-0914,gpt-3.5-turbo-16k
|
||||
# OPENAI_MODELS=o1,o1-mini,o1-preview,gpt-4o,chatgpt-4o-latest,gpt-4o-mini,gpt-3.5-turbo-0125,gpt-3.5-turbo-0301,gpt-3.5-turbo,gpt-4,gpt-4-0613,gpt-4-vision-preview,gpt-3.5-turbo-0613,gpt-3.5-turbo-16k-0613,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-instruct-0914,gpt-3.5-turbo-16k
|
||||
|
||||
DEBUG_OPENAI=false
|
||||
|
||||
@@ -248,13 +248,6 @@ AZURE_AI_SEARCH_SEARCH_OPTION_SELECT=
|
||||
# DALLE3_AZURE_API_VERSION=
|
||||
# DALLE2_AZURE_API_VERSION=
|
||||
|
||||
# Flux
|
||||
#-----------------
|
||||
FLUX_API_BASE_URL=https://api.us1.bfl.ai
|
||||
# FLUX_API_BASE_URL = 'https://api.bfl.ml';
|
||||
|
||||
# Get your API key at https://api.us1.bfl.ai/auth/profile
|
||||
# FLUX_API_KEY=
|
||||
|
||||
# Google
|
||||
#-----------------
|
||||
|
||||
5
.github/workflows/backend-review.yml
vendored
5
.github/workflows/backend-review.yml
vendored
@@ -61,7 +61,4 @@ jobs:
|
||||
run: cd api && npm run test:ci
|
||||
|
||||
- name: Run librechat-data-provider unit tests
|
||||
run: cd packages/data-provider && npm run test:ci
|
||||
|
||||
- name: Run librechat-mcp unit tests
|
||||
run: cd packages/mcp && npm run test:ci
|
||||
run: cd packages/data-provider && npm run test:ci
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"tailwindConfig": "./client/tailwind.config.cjs",
|
||||
"tailwindConfig": "./client/tailwind.config.mjs",
|
||||
"printWidth": 100,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
|
||||
16
CHANGELOG.md
16
CHANGELOG.md
@@ -1,16 +0,0 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### ✨ New Features
|
||||
|
||||
- 🪄 feat: Agent Artifacts by **@danny-avila** in [#5804](https://github.com/danny-avila/LibreChat/pull/5804)
|
||||
|
||||
### ⚙️ Other Changes
|
||||
|
||||
- 🔄 chore: Enforce 18next Language Keys by **@rubentalstra** in [#5803](https://github.com/danny-avila/LibreChat/pull/5803)
|
||||
- 🔃 refactor: Parent Message ID Handling on Error, Update Translations, Bump Agents by **@danny-avila** in [#5833](https://github.com/danny-avila/LibreChat/pull/5833)
|
||||
|
||||
---
|
||||
@@ -1,4 +1,4 @@
|
||||
# v0.7.7
|
||||
# v0.7.7-rc1
|
||||
|
||||
# Base node image
|
||||
FROM node:20-alpine AS node
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Dockerfile.multi
|
||||
# v0.7.7
|
||||
# v0.7.7-rc1
|
||||
|
||||
# Base for all builds
|
||||
FROM node:20-alpine AS base-min
|
||||
|
||||
@@ -81,7 +81,7 @@
|
||||
- [Fork Messages & Conversations](https://www.librechat.ai/docs/features/fork) for Advanced Context control
|
||||
|
||||
- 💬 **Multimodal & File Interactions**:
|
||||
- Upload and analyze images with Claude 3, GPT-4.5, GPT-4o, o1, Llama-Vision, and Gemini 📸
|
||||
- Upload and analyze images with Claude 3, GPT-4o, o1, Llama-Vision, and Gemini 📸
|
||||
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, & Google 🗃️
|
||||
|
||||
- 🌎 **Multilingual UI**:
|
||||
|
||||
@@ -746,6 +746,15 @@ class AnthropicClient extends BaseClient {
|
||||
metadata,
|
||||
};
|
||||
|
||||
if (!/claude-3[-.]7/.test(model)) {
|
||||
if (top_p !== undefined) {
|
||||
requestOptions.top_p = top_p;
|
||||
}
|
||||
if (top_k !== undefined) {
|
||||
requestOptions.top_k = top_k;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.useMessages) {
|
||||
requestOptions.messages = payload;
|
||||
requestOptions.max_tokens =
|
||||
@@ -760,14 +769,6 @@ class AnthropicClient extends BaseClient {
|
||||
thinkingBudget: this.options.thinkingBudget,
|
||||
});
|
||||
|
||||
if (!/claude-3[-.]7/.test(model)) {
|
||||
requestOptions.top_p = top_p;
|
||||
requestOptions.top_k = top_k;
|
||||
} else if (requestOptions.thinking == null) {
|
||||
requestOptions.topP = top_p;
|
||||
requestOptions.topK = top_k;
|
||||
}
|
||||
|
||||
if (this.systemMessage && this.supportsCacheControl === true) {
|
||||
requestOptions.system = [
|
||||
{
|
||||
|
||||
@@ -827,8 +827,7 @@ class GoogleClient extends BaseClient {
|
||||
let reply = '';
|
||||
const { abortController } = options;
|
||||
|
||||
const model =
|
||||
this.options.titleModel ?? this.modelOptions.modelName ?? this.modelOptions.model ?? '';
|
||||
const model = this.modelOptions.modelName ?? this.modelOptions.model ?? '';
|
||||
const safetySettings = getSafetySettings(model);
|
||||
if (!EXCLUDED_GENAI_MODELS.test(model) && !this.project_id) {
|
||||
logger.debug('Identified titling model as GenAI version');
|
||||
|
||||
@@ -112,12 +112,7 @@ class OpenAIClient extends BaseClient {
|
||||
const { OPENAI_FORCE_PROMPT } = process.env ?? {};
|
||||
const { reverseProxyUrl: reverseProxy } = this.options;
|
||||
|
||||
if (
|
||||
!this.useOpenRouter &&
|
||||
((reverseProxy && reverseProxy.includes(KnownEndpoints.openrouter)) ||
|
||||
(this.options.endpoint &&
|
||||
this.options.endpoint.toLowerCase().includes(KnownEndpoints.openrouter)))
|
||||
) {
|
||||
if (!this.useOpenRouter && reverseProxy && reverseProxy.includes(KnownEndpoints.openrouter)) {
|
||||
this.useOpenRouter = true;
|
||||
}
|
||||
|
||||
@@ -303,9 +298,7 @@ class OpenAIClient extends BaseClient {
|
||||
}
|
||||
|
||||
getEncoding() {
|
||||
return this.modelOptions?.model && /gpt-4[^-\s]/.test(this.modelOptions.model)
|
||||
? 'o200k_base'
|
||||
: 'cl100k_base';
|
||||
return this.model?.includes('gpt-4o') ? 'o200k_base' : 'cl100k_base';
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -612,7 +605,7 @@ class OpenAIClient extends BaseClient {
|
||||
}
|
||||
|
||||
initializeLLM({
|
||||
model = openAISettings.model.default,
|
||||
model = 'gpt-4o-mini',
|
||||
modelName,
|
||||
temperature = 0.2,
|
||||
max_tokens,
|
||||
@@ -713,7 +706,7 @@ class OpenAIClient extends BaseClient {
|
||||
|
||||
const { OPENAI_TITLE_MODEL } = process.env ?? {};
|
||||
|
||||
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? openAISettings.model.default;
|
||||
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-4o-mini';
|
||||
if (model === Constants.CURRENT_MODEL) {
|
||||
model = this.modelOptions.model;
|
||||
}
|
||||
@@ -906,7 +899,7 @@ ${convo}
|
||||
let prompt;
|
||||
|
||||
// TODO: remove the gpt fallback and make it specific to endpoint
|
||||
const { OPENAI_SUMMARY_MODEL = openAISettings.model.default } = process.env ?? {};
|
||||
const { OPENAI_SUMMARY_MODEL = 'gpt-4o-mini' } = process.env ?? {};
|
||||
let model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL;
|
||||
if (model === Constants.CURRENT_MODEL) {
|
||||
model = this.modelOptions.model;
|
||||
@@ -1307,12 +1300,8 @@ ${convo}
|
||||
) {
|
||||
delete modelOptions.stream;
|
||||
delete modelOptions.stop;
|
||||
} else if (
|
||||
(!this.isOmni || /^o1-(mini|preview)/i.test(modelOptions.model)) &&
|
||||
modelOptions.reasoning_effort != null
|
||||
) {
|
||||
} else if (!this.isOmni && modelOptions.reasoning_effort != null) {
|
||||
delete modelOptions.reasoning_effort;
|
||||
delete modelOptions.temperature;
|
||||
}
|
||||
|
||||
let reasoningKey = 'reasoning_content';
|
||||
@@ -1320,12 +1309,6 @@ ${convo}
|
||||
modelOptions.include_reasoning = true;
|
||||
reasoningKey = 'reasoning';
|
||||
}
|
||||
if (this.useOpenRouter && modelOptions.reasoning_effort != null) {
|
||||
modelOptions.reasoning = {
|
||||
effort: modelOptions.reasoning_effort,
|
||||
};
|
||||
delete modelOptions.reasoning_effort;
|
||||
}
|
||||
|
||||
this.streamHandler = new SplitStreamHandler({
|
||||
reasoningKey,
|
||||
|
||||
@@ -680,53 +680,4 @@ describe('AnthropicClient', () => {
|
||||
expect(capturedOptions).not.toHaveProperty('top_p');
|
||||
});
|
||||
});
|
||||
|
||||
it('should include top_k and top_p parameters for Claude-3.7 models when thinking is explicitly disabled', async () => {
|
||||
const client = new AnthropicClient('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
temperature: 0.7,
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
},
|
||||
thinking: false,
|
||||
});
|
||||
|
||||
async function* mockAsyncGenerator() {
|
||||
yield { type: 'message_start', message: { usage: {} } };
|
||||
yield { delta: { text: 'Test response' } };
|
||||
yield { type: 'message_delta', usage: {} };
|
||||
}
|
||||
|
||||
jest.spyOn(client, 'createResponse').mockImplementation(() => {
|
||||
return mockAsyncGenerator();
|
||||
});
|
||||
|
||||
let capturedOptions = null;
|
||||
jest.spyOn(client, 'getClient').mockImplementation((options) => {
|
||||
capturedOptions = options;
|
||||
return {};
|
||||
});
|
||||
|
||||
const payload = [{ role: 'user', content: 'Test message' }];
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('topK', 10);
|
||||
expect(capturedOptions).toHaveProperty('topP', 0.9);
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.7-sonnet',
|
||||
temperature: 0.7,
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
},
|
||||
thinking: false,
|
||||
});
|
||||
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('topK', 10);
|
||||
expect(capturedOptions).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -30,8 +30,6 @@ jest.mock('~/models', () => ({
|
||||
updateFileUsage: jest.fn(),
|
||||
}));
|
||||
|
||||
const { getConvo, saveConvo } = require('~/models');
|
||||
|
||||
jest.mock('@langchain/openai', () => {
|
||||
return {
|
||||
ChatOpenAI: jest.fn().mockImplementation(() => {
|
||||
@@ -542,11 +540,10 @@ describe('BaseClient', () => {
|
||||
|
||||
test('saveMessageToDatabase is called with the correct arguments', async () => {
|
||||
const saveOptions = TestClient.getSaveOptions();
|
||||
const user = {};
|
||||
const user = {}; // Mock user
|
||||
const opts = { user };
|
||||
const saveSpy = jest.spyOn(TestClient, 'saveMessageToDatabase');
|
||||
await TestClient.sendMessage('Hello, world!', opts);
|
||||
expect(saveSpy).toHaveBeenCalledWith(
|
||||
expect(TestClient.saveMessageToDatabase).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
sender: expect.any(String),
|
||||
text: expect.any(String),
|
||||
@@ -560,157 +557,6 @@ describe('BaseClient', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('should handle existing conversation when getConvo retrieves one', async () => {
|
||||
const existingConvo = {
|
||||
conversationId: 'existing-convo-id',
|
||||
endpoint: 'openai',
|
||||
endpointType: 'openai',
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages: [
|
||||
{ role: 'user', content: 'Existing message 1' },
|
||||
{ role: 'assistant', content: 'Existing response 1' },
|
||||
],
|
||||
temperature: 1,
|
||||
};
|
||||
|
||||
const { temperature: _temp, ...newConvo } = existingConvo;
|
||||
|
||||
const user = {
|
||||
id: 'user-id',
|
||||
};
|
||||
|
||||
getConvo.mockResolvedValue(existingConvo);
|
||||
saveConvo.mockResolvedValue(newConvo);
|
||||
|
||||
TestClient = initializeFakeClient(
|
||||
apiKey,
|
||||
{
|
||||
...options,
|
||||
req: {
|
||||
user,
|
||||
},
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const saveSpy = jest.spyOn(TestClient, 'saveMessageToDatabase');
|
||||
|
||||
const newMessage = 'New message in existing conversation';
|
||||
const response = await TestClient.sendMessage(newMessage, {
|
||||
user,
|
||||
conversationId: existingConvo.conversationId,
|
||||
});
|
||||
|
||||
expect(getConvo).toHaveBeenCalledWith(user.id, existingConvo.conversationId);
|
||||
expect(TestClient.conversationId).toBe(existingConvo.conversationId);
|
||||
expect(response.conversationId).toBe(existingConvo.conversationId);
|
||||
expect(TestClient.fetchedConvo).toBe(true);
|
||||
|
||||
expect(saveSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
conversationId: existingConvo.conversationId,
|
||||
text: newMessage,
|
||||
}),
|
||||
expect.any(Object),
|
||||
expect.any(Object),
|
||||
);
|
||||
|
||||
expect(saveConvo).toHaveBeenCalledTimes(2);
|
||||
expect(saveConvo).toHaveBeenCalledWith(
|
||||
expect.any(Object),
|
||||
expect.objectContaining({
|
||||
conversationId: existingConvo.conversationId,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
context: 'api/app/clients/BaseClient.js - saveMessageToDatabase #saveConvo',
|
||||
unsetFields: {
|
||||
temperature: 1,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await TestClient.sendMessage('Another message', {
|
||||
conversationId: existingConvo.conversationId,
|
||||
});
|
||||
expect(getConvo).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should correctly handle existing conversation and unset fields appropriately', async () => {
|
||||
const existingConvo = {
|
||||
conversationId: 'existing-convo-id',
|
||||
endpoint: 'openai',
|
||||
endpointType: 'openai',
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages: [
|
||||
{ role: 'user', content: 'Existing message 1' },
|
||||
{ role: 'assistant', content: 'Existing response 1' },
|
||||
],
|
||||
title: 'Existing Conversation',
|
||||
someExistingField: 'existingValue',
|
||||
anotherExistingField: 'anotherValue',
|
||||
temperature: 0.7,
|
||||
modelLabel: 'GPT-3.5',
|
||||
};
|
||||
|
||||
getConvo.mockResolvedValue(existingConvo);
|
||||
saveConvo.mockResolvedValue(existingConvo);
|
||||
|
||||
TestClient = initializeFakeClient(
|
||||
apiKey,
|
||||
{
|
||||
...options,
|
||||
modelOptions: {
|
||||
model: 'gpt-4',
|
||||
temperature: 0.5,
|
||||
},
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const newMessage = 'New message in existing conversation';
|
||||
await TestClient.sendMessage(newMessage, {
|
||||
conversationId: existingConvo.conversationId,
|
||||
});
|
||||
|
||||
expect(saveConvo).toHaveBeenCalledTimes(2);
|
||||
|
||||
const saveConvoCall = saveConvo.mock.calls[0];
|
||||
const [, savedFields, saveOptions] = saveConvoCall;
|
||||
|
||||
// Instead of checking all excludedKeys, we'll just check specific fields
|
||||
// that we know should be excluded
|
||||
expect(savedFields).not.toHaveProperty('messages');
|
||||
expect(savedFields).not.toHaveProperty('title');
|
||||
|
||||
// Only check that someExistingField is in unsetFields
|
||||
expect(saveOptions.unsetFields).toHaveProperty('someExistingField', 1);
|
||||
|
||||
// Mock saveConvo to return the expected fields
|
||||
saveConvo.mockImplementation((req, fields) => {
|
||||
return Promise.resolve({
|
||||
...fields,
|
||||
endpoint: 'openai',
|
||||
endpointType: 'openai',
|
||||
model: 'gpt-4',
|
||||
temperature: 0.5,
|
||||
});
|
||||
});
|
||||
|
||||
// Only check the conversationId since that's the only field we can be sure about
|
||||
expect(savedFields).toHaveProperty('conversationId', 'existing-convo-id');
|
||||
|
||||
expect(TestClient.fetchedConvo).toBe(true);
|
||||
|
||||
await TestClient.sendMessage('Another message', {
|
||||
conversationId: existingConvo.conversationId,
|
||||
});
|
||||
|
||||
expect(getConvo).toHaveBeenCalledTimes(1);
|
||||
|
||||
const secondSaveConvoCall = saveConvo.mock.calls[1];
|
||||
expect(secondSaveConvoCall[2]).toHaveProperty('unsetFields', {});
|
||||
});
|
||||
|
||||
test('sendCompletion is called with the correct arguments', async () => {
|
||||
const payload = {}; // Mock payload
|
||||
TestClient.buildMessages.mockReturnValue({ prompt: payload, tokenCountMap: null });
|
||||
|
||||
@@ -56,6 +56,7 @@ const initializeFakeClient = (apiKey, options, fakeMessages) => {
|
||||
let TestClient = new FakeClient(apiKey);
|
||||
TestClient.options = options;
|
||||
TestClient.abortController = { abort: jest.fn() };
|
||||
TestClient.saveMessageToDatabase = jest.fn();
|
||||
TestClient.loadHistory = jest
|
||||
.fn()
|
||||
.mockImplementation((conversationId, parentMessageId = null) => {
|
||||
@@ -85,6 +86,7 @@ const initializeFakeClient = (apiKey, options, fakeMessages) => {
|
||||
return 'Mock response text';
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
TestClient.getCompletion = jest.fn().mockImplementation(async (..._args) => {
|
||||
return {
|
||||
choices: [
|
||||
|
||||
@@ -2,10 +2,9 @@ const availableTools = require('./manifest.json');
|
||||
|
||||
// Structured Tools
|
||||
const DALLE3 = require('./structured/DALLE3');
|
||||
const FluxAPI = require('./structured/FluxAPI');
|
||||
const OpenWeather = require('./structured/OpenWeather');
|
||||
const StructuredWolfram = require('./structured/Wolfram');
|
||||
const createYouTubeTools = require('./structured/YouTube');
|
||||
const StructuredWolfram = require('./structured/Wolfram');
|
||||
const StructuredACS = require('./structured/AzureAISearch');
|
||||
const StructuredSD = require('./structured/StableDiffusion');
|
||||
const GoogleSearchAPI = require('./structured/GoogleSearch');
|
||||
@@ -31,7 +30,6 @@ module.exports = {
|
||||
manifestToolMap,
|
||||
// Structured Tools
|
||||
DALLE3,
|
||||
FluxAPI,
|
||||
OpenWeather,
|
||||
StructuredSD,
|
||||
StructuredACS,
|
||||
|
||||
@@ -164,19 +164,5 @@
|
||||
"description": "Sign up at <a href=\"https://home.openweathermap.org/users/sign_up\" target=\"_blank\">OpenWeather</a>, then get your key at <a href=\"https://home.openweathermap.org/api_keys\" target=\"_blank\">API keys</a>."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Flux",
|
||||
"pluginKey": "flux",
|
||||
"description": "Generate images using text with the Flux API.",
|
||||
"icon": "https://blackforestlabs.ai/wp-content/uploads/2024/07/bfl_logo_retraced_blk.png",
|
||||
"isAuthRequired": "true",
|
||||
"authConfig": [
|
||||
{
|
||||
"authField": "FLUX_API_KEY",
|
||||
"label": "Your Flux API Key",
|
||||
"description": "Provide your Flux API key from your user profile."
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
const { z } = require('zod');
|
||||
const path = require('path');
|
||||
const OpenAI = require('openai');
|
||||
const fetch = require('node-fetch');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { Tool } = require('@langchain/core/tools');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||
const { FileContext } = require('librechat-data-provider');
|
||||
const { getImageBasename } = require('~/server/services/Files/images');
|
||||
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const displayMessage =
|
||||
'DALL-E displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
||||
class DALLE3 extends Tool {
|
||||
constructor(fields = {}) {
|
||||
super();
|
||||
@@ -117,7 +114,10 @@ class DALLE3 extends Tool {
|
||||
if (this.isAgent === true && typeof value === 'string') {
|
||||
return [value, {}];
|
||||
} else if (this.isAgent === true && typeof value === 'object') {
|
||||
return [displayMessage, value];
|
||||
return [
|
||||
'DALL-E displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.',
|
||||
value,
|
||||
];
|
||||
}
|
||||
|
||||
return value;
|
||||
@@ -160,32 +160,6 @@ Error Message: ${error.message}`);
|
||||
);
|
||||
}
|
||||
|
||||
if (this.isAgent) {
|
||||
let fetchOptions = {};
|
||||
if (process.env.PROXY) {
|
||||
fetchOptions.agent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
const imageResponse = await fetch(theImageUrl, fetchOptions);
|
||||
const arrayBuffer = await imageResponse.arrayBuffer();
|
||||
const base64 = Buffer.from(arrayBuffer).toString('base64');
|
||||
const content = [
|
||||
{
|
||||
type: ContentTypes.IMAGE_URL,
|
||||
image_url: {
|
||||
url: `data:image/jpeg;base64,${base64}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const response = [
|
||||
{
|
||||
type: ContentTypes.TEXT,
|
||||
text: displayMessage,
|
||||
},
|
||||
];
|
||||
return [response, { content }];
|
||||
}
|
||||
|
||||
const imageBasename = getImageBasename(theImageUrl);
|
||||
const imageExt = path.extname(imageBasename);
|
||||
|
||||
|
||||
@@ -1,554 +0,0 @@
|
||||
const { z } = require('zod');
|
||||
const axios = require('axios');
|
||||
const fetch = require('node-fetch');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { Tool } = require('@langchain/core/tools');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const displayMessage =
|
||||
'Flux displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
||||
|
||||
/**
|
||||
* FluxAPI - A tool for generating high-quality images from text prompts using the Flux API.
|
||||
* Each call generates one image. If multiple images are needed, make multiple consecutive calls with the same or varied prompts.
|
||||
*/
|
||||
class FluxAPI extends Tool {
|
||||
// Pricing constants in USD per image
|
||||
static PRICING = {
|
||||
FLUX_PRO_1_1_ULTRA: -0.06, // /v1/flux-pro-1.1-ultra
|
||||
FLUX_PRO_1_1: -0.04, // /v1/flux-pro-1.1
|
||||
FLUX_PRO: -0.05, // /v1/flux-pro
|
||||
FLUX_DEV: -0.025, // /v1/flux-dev
|
||||
FLUX_PRO_FINETUNED: -0.06, // /v1/flux-pro-finetuned
|
||||
FLUX_PRO_1_1_ULTRA_FINETUNED: -0.07, // /v1/flux-pro-1.1-ultra-finetuned
|
||||
};
|
||||
|
||||
constructor(fields = {}) {
|
||||
super();
|
||||
|
||||
/** @type {boolean} Used to initialize the Tool without necessary variables. */
|
||||
this.override = fields.override ?? false;
|
||||
|
||||
this.userId = fields.userId;
|
||||
this.fileStrategy = fields.fileStrategy;
|
||||
|
||||
/** @type {boolean} **/
|
||||
this.isAgent = fields.isAgent;
|
||||
this.returnMetadata = fields.returnMetadata ?? false;
|
||||
|
||||
if (fields.processFileURL) {
|
||||
/** @type {processFileURL} Necessary for output to contain all image metadata. */
|
||||
this.processFileURL = fields.processFileURL.bind(this);
|
||||
}
|
||||
|
||||
this.apiKey = fields.FLUX_API_KEY || this.getApiKey();
|
||||
|
||||
this.name = 'flux';
|
||||
this.description =
|
||||
'Use Flux to generate images from text descriptions. This tool can generate images and list available finetunes. Each generate call creates one image. For multiple images, make multiple consecutive calls.';
|
||||
|
||||
this.description_for_model = `// Transform any image description into a detailed, high-quality prompt. Never submit a prompt under 3 sentences. Follow these core rules:
|
||||
// 1. ALWAYS enhance basic prompts into 5-10 detailed sentences (e.g., "a cat" becomes: "A close-up photo of a sleek Siamese cat with piercing blue eyes. The cat sits elegantly on a vintage leather armchair, its tail curled gracefully around its paws. Warm afternoon sunlight streams through a nearby window, casting gentle shadows across its face and highlighting the subtle variations in its cream and chocolate-point fur. The background is softly blurred, creating a shallow depth of field that draws attention to the cat's expressive features. The overall composition has a peaceful, contemplative mood with a professional photography style.")
|
||||
// 2. Each prompt MUST be 3-6 descriptive sentences minimum, focusing on visual elements: lighting, composition, mood, and style
|
||||
// Use action: 'list_finetunes' to see available custom models. When using finetunes, use endpoint: '/v1/flux-pro-finetuned' (default) or '/v1/flux-pro-1.1-ultra-finetuned' for higher quality and aspect ratio.`;
|
||||
|
||||
// Add base URL from environment variable with fallback
|
||||
this.baseUrl = process.env.FLUX_API_BASE_URL || 'https://api.us1.bfl.ai';
|
||||
|
||||
// Define the schema for structured input
|
||||
this.schema = z.object({
|
||||
action: z
|
||||
.enum(['generate', 'list_finetunes', 'generate_finetuned'])
|
||||
.default('generate')
|
||||
.describe(
|
||||
'Action to perform: "generate" for image generation, "generate_finetuned" for finetuned model generation, "list_finetunes" to get available custom models',
|
||||
),
|
||||
prompt: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Text prompt for image generation. Required when action is "generate". Not used for list_finetunes.',
|
||||
),
|
||||
width: z
|
||||
.number()
|
||||
.optional()
|
||||
.describe(
|
||||
'Width of the generated image in pixels. Must be a multiple of 32. Default is 1024.',
|
||||
),
|
||||
height: z
|
||||
.number()
|
||||
.optional()
|
||||
.describe(
|
||||
'Height of the generated image in pixels. Must be a multiple of 32. Default is 768.',
|
||||
),
|
||||
prompt_upsampling: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.default(false)
|
||||
.describe('Whether to perform upsampling on the prompt.'),
|
||||
steps: z
|
||||
.number()
|
||||
.int()
|
||||
.optional()
|
||||
.describe('Number of steps to run the model for, a number from 1 to 50. Default is 40.'),
|
||||
seed: z.number().optional().describe('Optional seed for reproducibility.'),
|
||||
safety_tolerance: z
|
||||
.number()
|
||||
.optional()
|
||||
.default(6)
|
||||
.describe(
|
||||
'Tolerance level for input and output moderation. Between 0 and 6, 0 being most strict, 6 being least strict.',
|
||||
),
|
||||
endpoint: z
|
||||
.enum([
|
||||
'/v1/flux-pro-1.1',
|
||||
'/v1/flux-pro',
|
||||
'/v1/flux-dev',
|
||||
'/v1/flux-pro-1.1-ultra',
|
||||
'/v1/flux-pro-finetuned',
|
||||
'/v1/flux-pro-1.1-ultra-finetuned',
|
||||
])
|
||||
.optional()
|
||||
.default('/v1/flux-pro-1.1')
|
||||
.describe('Endpoint to use for image generation.'),
|
||||
raw: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.default(false)
|
||||
.describe(
|
||||
'Generate less processed, more natural-looking images. Only works for /v1/flux-pro-1.1-ultra.',
|
||||
),
|
||||
finetune_id: z.string().optional().describe('ID of the finetuned model to use'),
|
||||
finetune_strength: z
|
||||
.number()
|
||||
.optional()
|
||||
.default(1.1)
|
||||
.describe('Strength of the finetuning effect (typically between 0.1 and 1.2)'),
|
||||
guidance: z.number().optional().default(2.5).describe('Guidance scale for finetuned models'),
|
||||
aspect_ratio: z
|
||||
.string()
|
||||
.optional()
|
||||
.default('16:9')
|
||||
.describe('Aspect ratio for ultra models (e.g., "16:9")'),
|
||||
});
|
||||
}
|
||||
|
||||
getAxiosConfig() {
|
||||
const config = {};
|
||||
if (process.env.PROXY) {
|
||||
config.httpsAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
return config;
|
||||
}
|
||||
|
||||
/** @param {Object|string} value */
|
||||
getDetails(value) {
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
return JSON.stringify(value, null, 2);
|
||||
}
|
||||
|
||||
getApiKey() {
|
||||
const apiKey = process.env.FLUX_API_KEY || '';
|
||||
if (!apiKey && !this.override) {
|
||||
throw new Error('Missing FLUX_API_KEY environment variable.');
|
||||
}
|
||||
return apiKey;
|
||||
}
|
||||
|
||||
wrapInMarkdown(imageUrl) {
|
||||
const serverDomain = process.env.DOMAIN_SERVER || 'http://localhost:3080';
|
||||
return ``;
|
||||
}
|
||||
|
||||
returnValue(value) {
|
||||
if (this.isAgent === true && typeof value === 'string') {
|
||||
return [value, {}];
|
||||
} else if (this.isAgent === true && typeof value === 'object') {
|
||||
if (Array.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
return [displayMessage, value];
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
async _call(data) {
|
||||
const { action = 'generate', ...imageData } = data;
|
||||
|
||||
// Use provided API key for this request if available, otherwise use default
|
||||
const requestApiKey = this.apiKey || this.getApiKey();
|
||||
|
||||
// Handle list_finetunes action
|
||||
if (action === 'list_finetunes') {
|
||||
return this.getMyFinetunes(requestApiKey);
|
||||
}
|
||||
|
||||
// Handle finetuned generation
|
||||
if (action === 'generate_finetuned') {
|
||||
return this.generateFinetunedImage(imageData, requestApiKey);
|
||||
}
|
||||
|
||||
// For generate action, ensure prompt is provided
|
||||
if (!imageData.prompt) {
|
||||
throw new Error('Missing required field: prompt');
|
||||
}
|
||||
|
||||
let payload = {
|
||||
prompt: imageData.prompt,
|
||||
prompt_upsampling: imageData.prompt_upsampling || false,
|
||||
safety_tolerance: imageData.safety_tolerance || 6,
|
||||
output_format: imageData.output_format || 'png',
|
||||
};
|
||||
|
||||
// Add optional parameters if provided
|
||||
if (imageData.width) {
|
||||
payload.width = imageData.width;
|
||||
}
|
||||
if (imageData.height) {
|
||||
payload.height = imageData.height;
|
||||
}
|
||||
if (imageData.steps) {
|
||||
payload.steps = imageData.steps;
|
||||
}
|
||||
if (imageData.seed !== undefined) {
|
||||
payload.seed = imageData.seed;
|
||||
}
|
||||
if (imageData.raw) {
|
||||
payload.raw = imageData.raw;
|
||||
}
|
||||
|
||||
const generateUrl = `${this.baseUrl}${imageData.endpoint || '/v1/flux-pro'}`;
|
||||
const resultUrl = `${this.baseUrl}/v1/get_result`;
|
||||
|
||||
logger.debug('[FluxAPI] Generating image with payload:', payload);
|
||||
logger.debug('[FluxAPI] Using endpoint:', generateUrl);
|
||||
|
||||
let taskResponse;
|
||||
try {
|
||||
taskResponse = await axios.post(generateUrl, payload, {
|
||||
headers: {
|
||||
'x-key': requestApiKey,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
},
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.response?.data || error.message);
|
||||
logger.error('[FluxAPI] Error while submitting task:', details);
|
||||
|
||||
return this.returnValue(
|
||||
`Something went wrong when trying to generate the image. The Flux API may be unavailable:
|
||||
Error Message: ${details}`,
|
||||
);
|
||||
}
|
||||
|
||||
const taskId = taskResponse.data.id;
|
||||
|
||||
// Polling for the result
|
||||
let status = 'Pending';
|
||||
let resultData = null;
|
||||
while (status !== 'Ready' && status !== 'Error') {
|
||||
try {
|
||||
// Wait 2 seconds between polls
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
const resultResponse = await axios.get(resultUrl, {
|
||||
headers: {
|
||||
'x-key': requestApiKey,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
params: { id: taskId },
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
status = resultResponse.data.status;
|
||||
|
||||
if (status === 'Ready') {
|
||||
resultData = resultResponse.data.result;
|
||||
break;
|
||||
} else if (status === 'Error') {
|
||||
logger.error('[FluxAPI] Error in task:', resultResponse.data);
|
||||
return this.returnValue('An error occurred during image generation.');
|
||||
}
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.response?.data || error.message);
|
||||
logger.error('[FluxAPI] Error while getting result:', details);
|
||||
return this.returnValue('An error occurred while retrieving the image.');
|
||||
}
|
||||
}
|
||||
|
||||
// If no result data
|
||||
if (!resultData || !resultData.sample) {
|
||||
logger.error('[FluxAPI] No image data received from API. Response:', resultData);
|
||||
return this.returnValue('No image data received from Flux API.');
|
||||
}
|
||||
|
||||
// Try saving the image locally
|
||||
const imageUrl = resultData.sample;
|
||||
const imageName = `img-${uuidv4()}.png`;
|
||||
|
||||
if (this.isAgent) {
|
||||
try {
|
||||
// Fetch the image and convert to base64
|
||||
const fetchOptions = {};
|
||||
if (process.env.PROXY) {
|
||||
fetchOptions.agent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
const imageResponse = await fetch(imageUrl, fetchOptions);
|
||||
const arrayBuffer = await imageResponse.arrayBuffer();
|
||||
const base64 = Buffer.from(arrayBuffer).toString('base64');
|
||||
const content = [
|
||||
{
|
||||
type: ContentTypes.IMAGE_URL,
|
||||
image_url: {
|
||||
url: `data:image/png;base64,${base64}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const response = [
|
||||
{
|
||||
type: ContentTypes.TEXT,
|
||||
text: displayMessage,
|
||||
},
|
||||
];
|
||||
return [response, { content }];
|
||||
} catch (error) {
|
||||
logger.error('Error processing image for agent:', error);
|
||||
return this.returnValue(`Failed to process the image. ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
logger.debug('[FluxAPI] Saving image:', imageUrl);
|
||||
const result = await this.processFileURL({
|
||||
fileStrategy: this.fileStrategy,
|
||||
userId: this.userId,
|
||||
URL: imageUrl,
|
||||
fileName: imageName,
|
||||
basePath: 'images',
|
||||
context: FileContext.image_generation,
|
||||
});
|
||||
|
||||
logger.debug('[FluxAPI] Image saved to path:', result.filepath);
|
||||
|
||||
// Calculate cost based on endpoint
|
||||
/**
|
||||
* TODO: Cost handling
|
||||
const endpoint = imageData.endpoint || '/v1/flux-pro';
|
||||
const endpointKey = Object.entries(FluxAPI.PRICING).find(([key, _]) =>
|
||||
endpoint.includes(key.toLowerCase().replace(/_/g, '-')),
|
||||
)?.[0];
|
||||
const cost = FluxAPI.PRICING[endpointKey] || 0;
|
||||
*/
|
||||
this.result = this.returnMetadata ? result : this.wrapInMarkdown(result.filepath);
|
||||
return this.returnValue(this.result);
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.message ?? 'No additional error details.');
|
||||
logger.error('Error while saving the image:', details);
|
||||
return this.returnValue(`Failed to save the image locally. ${details}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getMyFinetunes(apiKey = null) {
|
||||
const finetunesUrl = `${this.baseUrl}/v1/my_finetunes`;
|
||||
const detailsUrl = `${this.baseUrl}/v1/finetune_details`;
|
||||
|
||||
try {
|
||||
const headers = {
|
||||
'x-key': apiKey || this.getApiKey(),
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
};
|
||||
|
||||
// Get list of finetunes
|
||||
const response = await axios.get(finetunesUrl, {
|
||||
headers,
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
const finetunes = response.data.finetunes;
|
||||
|
||||
// Fetch details for each finetune
|
||||
const finetuneDetails = await Promise.all(
|
||||
finetunes.map(async (finetuneId) => {
|
||||
try {
|
||||
const detailResponse = await axios.get(`${detailsUrl}?finetune_id=${finetuneId}`, {
|
||||
headers,
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
return {
|
||||
id: finetuneId,
|
||||
...detailResponse.data,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`[FluxAPI] Error fetching details for finetune ${finetuneId}:`, error);
|
||||
return {
|
||||
id: finetuneId,
|
||||
error: 'Failed to fetch details',
|
||||
};
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
if (this.isAgent) {
|
||||
const formattedDetails = JSON.stringify(finetuneDetails, null, 2);
|
||||
return [`Here are the available finetunes:\n${formattedDetails}`, null];
|
||||
}
|
||||
return JSON.stringify(finetuneDetails);
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.response?.data || error.message);
|
||||
logger.error('[FluxAPI] Error while getting finetunes:', details);
|
||||
const errorMsg = `Failed to get finetunes: ${details}`;
|
||||
return this.isAgent ? this.returnValue([errorMsg, {}]) : new Error(errorMsg);
|
||||
}
|
||||
}
|
||||
|
||||
async generateFinetunedImage(imageData, requestApiKey) {
|
||||
if (!imageData.prompt) {
|
||||
throw new Error('Missing required field: prompt');
|
||||
}
|
||||
|
||||
if (!imageData.finetune_id) {
|
||||
throw new Error(
|
||||
'Missing required field: finetune_id for finetuned generation. Please supply a finetune_id!',
|
||||
);
|
||||
}
|
||||
|
||||
// Validate endpoint is appropriate for finetuned generation
|
||||
const validFinetunedEndpoints = ['/v1/flux-pro-finetuned', '/v1/flux-pro-1.1-ultra-finetuned'];
|
||||
const endpoint = imageData.endpoint || '/v1/flux-pro-finetuned';
|
||||
|
||||
if (!validFinetunedEndpoints.includes(endpoint)) {
|
||||
throw new Error(
|
||||
`Invalid endpoint for finetuned generation. Must be one of: ${validFinetunedEndpoints.join(', ')}`,
|
||||
);
|
||||
}
|
||||
|
||||
let payload = {
|
||||
prompt: imageData.prompt,
|
||||
prompt_upsampling: imageData.prompt_upsampling || false,
|
||||
safety_tolerance: imageData.safety_tolerance || 6,
|
||||
output_format: imageData.output_format || 'png',
|
||||
finetune_id: imageData.finetune_id,
|
||||
finetune_strength: imageData.finetune_strength || 1.0,
|
||||
guidance: imageData.guidance || 2.5,
|
||||
};
|
||||
|
||||
// Add optional parameters if provided
|
||||
if (imageData.width) {
|
||||
payload.width = imageData.width;
|
||||
}
|
||||
if (imageData.height) {
|
||||
payload.height = imageData.height;
|
||||
}
|
||||
if (imageData.steps) {
|
||||
payload.steps = imageData.steps;
|
||||
}
|
||||
if (imageData.seed !== undefined) {
|
||||
payload.seed = imageData.seed;
|
||||
}
|
||||
if (imageData.raw) {
|
||||
payload.raw = imageData.raw;
|
||||
}
|
||||
|
||||
const generateUrl = `${this.baseUrl}${endpoint}`;
|
||||
const resultUrl = `${this.baseUrl}/v1/get_result`;
|
||||
|
||||
logger.debug('[FluxAPI] Generating finetuned image with payload:', payload);
|
||||
logger.debug('[FluxAPI] Using endpoint:', generateUrl);
|
||||
|
||||
let taskResponse;
|
||||
try {
|
||||
taskResponse = await axios.post(generateUrl, payload, {
|
||||
headers: {
|
||||
'x-key': requestApiKey,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
},
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.response?.data || error.message);
|
||||
logger.error('[FluxAPI] Error while submitting finetuned task:', details);
|
||||
return this.returnValue(
|
||||
`Something went wrong when trying to generate the finetuned image. The Flux API may be unavailable:
|
||||
Error Message: ${details}`,
|
||||
);
|
||||
}
|
||||
|
||||
const taskId = taskResponse.data.id;
|
||||
|
||||
// Polling for the result
|
||||
let status = 'Pending';
|
||||
let resultData = null;
|
||||
while (status !== 'Ready' && status !== 'Error') {
|
||||
try {
|
||||
// Wait 2 seconds between polls
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
const resultResponse = await axios.get(resultUrl, {
|
||||
headers: {
|
||||
'x-key': requestApiKey,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
params: { id: taskId },
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
status = resultResponse.data.status;
|
||||
|
||||
if (status === 'Ready') {
|
||||
resultData = resultResponse.data.result;
|
||||
break;
|
||||
} else if (status === 'Error') {
|
||||
logger.error('[FluxAPI] Error in finetuned task:', resultResponse.data);
|
||||
return this.returnValue('An error occurred during finetuned image generation.');
|
||||
}
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.response?.data || error.message);
|
||||
logger.error('[FluxAPI] Error while getting finetuned result:', details);
|
||||
return this.returnValue('An error occurred while retrieving the finetuned image.');
|
||||
}
|
||||
}
|
||||
|
||||
// If no result data
|
||||
if (!resultData || !resultData.sample) {
|
||||
logger.error('[FluxAPI] No image data received from API. Response:', resultData);
|
||||
return this.returnValue('No image data received from Flux API.');
|
||||
}
|
||||
|
||||
// Try saving the image locally
|
||||
const imageUrl = resultData.sample;
|
||||
const imageName = `img-${uuidv4()}.png`;
|
||||
|
||||
try {
|
||||
logger.debug('[FluxAPI] Saving finetuned image:', imageUrl);
|
||||
const result = await this.processFileURL({
|
||||
fileStrategy: this.fileStrategy,
|
||||
userId: this.userId,
|
||||
URL: imageUrl,
|
||||
fileName: imageName,
|
||||
basePath: 'images',
|
||||
context: FileContext.image_generation,
|
||||
});
|
||||
|
||||
logger.debug('[FluxAPI] Finetuned image saved to path:', result.filepath);
|
||||
|
||||
// Calculate cost based on endpoint
|
||||
const endpointKey = endpoint.includes('ultra')
|
||||
? 'FLUX_PRO_1_1_ULTRA_FINETUNED'
|
||||
: 'FLUX_PRO_FINETUNED';
|
||||
const cost = FluxAPI.PRICING[endpointKey] || 0;
|
||||
// Return the result based on returnMetadata flag
|
||||
this.result = this.returnMetadata ? result : this.wrapInMarkdown(result.filepath);
|
||||
return this.returnValue(this.result);
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.message ?? 'No additional error details.');
|
||||
logger.error('Error while saving the finetuned image:', details);
|
||||
return this.returnValue(`Failed to save the finetuned image locally. ${details}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FluxAPI;
|
||||
@@ -6,13 +6,10 @@ const axios = require('axios');
|
||||
const sharp = require('sharp');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { Tool } = require('@langchain/core/tools');
|
||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||
const { FileContext } = require('librechat-data-provider');
|
||||
const paths = require('~/config/paths');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const displayMessage =
|
||||
'Stable Diffusion displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
||||
|
||||
class StableDiffusionAPI extends Tool {
|
||||
constructor(fields) {
|
||||
super();
|
||||
@@ -24,8 +21,6 @@ class StableDiffusionAPI extends Tool {
|
||||
this.override = fields.override ?? false;
|
||||
/** @type {boolean} Necessary for output to contain all image metadata. */
|
||||
this.returnMetadata = fields.returnMetadata ?? false;
|
||||
/** @type {boolean} */
|
||||
this.isAgent = fields.isAgent;
|
||||
if (fields.uploadImageBuffer) {
|
||||
/** @type {uploadImageBuffer} Necessary for output to contain all image metadata. */
|
||||
this.uploadImageBuffer = fields.uploadImageBuffer.bind(this);
|
||||
@@ -71,16 +66,6 @@ class StableDiffusionAPI extends Tool {
|
||||
return ``;
|
||||
}
|
||||
|
||||
returnValue(value) {
|
||||
if (this.isAgent === true && typeof value === 'string') {
|
||||
return [value, {}];
|
||||
} else if (this.isAgent === true && typeof value === 'object') {
|
||||
return [displayMessage, value];
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
getServerURL() {
|
||||
const url = process.env.SD_WEBUI_URL || '';
|
||||
if (!url && !this.override) {
|
||||
@@ -128,25 +113,6 @@ class StableDiffusionAPI extends Tool {
|
||||
}
|
||||
|
||||
try {
|
||||
if (this.isAgent) {
|
||||
const content = [
|
||||
{
|
||||
type: ContentTypes.IMAGE_URL,
|
||||
image_url: {
|
||||
url: `data:image/png;base64,${image}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const response = [
|
||||
{
|
||||
type: ContentTypes.TEXT,
|
||||
text: displayMessage,
|
||||
},
|
||||
];
|
||||
return [response, { content }];
|
||||
}
|
||||
|
||||
const buffer = Buffer.from(image.split(',', 1)[0], 'base64');
|
||||
if (this.returnMetadata && this.uploadImageBuffer && this.req) {
|
||||
const file = await this.uploadImageBuffer({
|
||||
@@ -188,7 +154,7 @@ class StableDiffusionAPI extends Tool {
|
||||
logger.error('[StableDiffusion] Error while saving the image:', error);
|
||||
}
|
||||
|
||||
return this.returnValue(this.result);
|
||||
return this.result;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ const {
|
||||
GoogleSearchAPI,
|
||||
// Structured Tools
|
||||
DALLE3,
|
||||
FluxAPI,
|
||||
OpenWeather,
|
||||
StructuredSD,
|
||||
StructuredACS,
|
||||
@@ -183,7 +182,6 @@ const loadTools = async ({
|
||||
returnMap = false,
|
||||
}) => {
|
||||
const toolConstructors = {
|
||||
flux: FluxAPI,
|
||||
calculator: Calculator,
|
||||
google: GoogleSearchAPI,
|
||||
open_weather: OpenWeather,
|
||||
@@ -232,10 +230,9 @@ const loadTools = async ({
|
||||
};
|
||||
|
||||
const toolOptions = {
|
||||
flux: imageGenOptions,
|
||||
serpapi: { location: 'Austin,Texas,United States', hl: 'en', gl: 'us' },
|
||||
dalle: imageGenOptions,
|
||||
'stable-diffusion': imageGenOptions,
|
||||
serpapi: { location: 'Austin,Texas,United States', hl: 'en', gl: 'us' },
|
||||
};
|
||||
|
||||
const toolContextMap = {};
|
||||
|
||||
@@ -56,10 +56,6 @@ const conversationPreset = {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
maxTokens: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
presence_penalty: {
|
||||
type: Number,
|
||||
required: false,
|
||||
|
||||
@@ -79,7 +79,6 @@ const tokenValues = Object.assign(
|
||||
'o1-mini': { prompt: 1.1, completion: 4.4 },
|
||||
'o1-preview': { prompt: 15, completion: 60 },
|
||||
o1: { prompt: 15, completion: 60 },
|
||||
'gpt-4.5': { prompt: 75, completion: 150 },
|
||||
'gpt-4o-mini': { prompt: 0.15, completion: 0.6 },
|
||||
'gpt-4o': { prompt: 2.5, completion: 10 },
|
||||
'gpt-4o-2024-05-13': { prompt: 5, completion: 15 },
|
||||
@@ -168,8 +167,6 @@ const getValueKey = (model, endpoint) => {
|
||||
return 'o1-mini';
|
||||
} else if (modelName.includes('o1')) {
|
||||
return 'o1';
|
||||
} else if (modelName.includes('gpt-4.5')) {
|
||||
return 'gpt-4.5';
|
||||
} else if (modelName.includes('gpt-4o-2024-05-13')) {
|
||||
return 'gpt-4o-2024-05-13';
|
||||
} else if (modelName.includes('gpt-4o-mini')) {
|
||||
|
||||
@@ -50,16 +50,6 @@ describe('getValueKey', () => {
|
||||
expect(getValueKey('gpt-4-0125')).toBe('gpt-4-1106');
|
||||
});
|
||||
|
||||
it('should return "gpt-4.5" for model type of "gpt-4.5"', () => {
|
||||
expect(getValueKey('gpt-4.5-preview')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-2024-08-06')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-2024-08-06-0718')).toBe('gpt-4.5');
|
||||
expect(getValueKey('openai/gpt-4.5')).toBe('gpt-4.5');
|
||||
expect(getValueKey('openai/gpt-4.5-2024-08-06')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-turbo')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-0125')).toBe('gpt-4.5');
|
||||
});
|
||||
|
||||
it('should return "gpt-4o" for model type of "gpt-4o"', () => {
|
||||
expect(getValueKey('gpt-4o-2024-08-06')).toBe('gpt-4o');
|
||||
expect(getValueKey('gpt-4o-2024-08-06-0718')).toBe('gpt-4o');
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "v0.7.7",
|
||||
"version": "v0.7.7-rc1",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
@@ -36,7 +36,7 @@
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.37.0",
|
||||
"@azure/search-documents": "^12.0.0",
|
||||
"@google/generative-ai": "^0.23.0",
|
||||
"@google/generative-ai": "^0.21.0",
|
||||
"@googleapis/youtube": "^20.0.0",
|
||||
"@keyv/mongo": "^2.1.8",
|
||||
"@keyv/redis": "^2.8.1",
|
||||
@@ -45,7 +45,7 @@
|
||||
"@langchain/google-genai": "^0.1.9",
|
||||
"@langchain/google-vertexai": "^0.2.0",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.2.0",
|
||||
"@librechat/agents": "^2.1.3",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "1.7.8",
|
||||
"bcryptjs": "^2.4.3",
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { loadDefaultModels, loadConfigModels } = require('~/server/services/Config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req
|
||||
@@ -37,13 +36,8 @@ async function loadModels(req) {
|
||||
}
|
||||
|
||||
async function modelController(req, res) {
|
||||
try {
|
||||
const modelConfig = await loadModels(req);
|
||||
res.send(modelConfig);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching models:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
const modelConfig = await loadModels(req);
|
||||
res.send(modelConfig);
|
||||
}
|
||||
|
||||
module.exports = { modelController, loadModels, getModelsConfig };
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
const { nanoid } = require('nanoid');
|
||||
const { Tools, StepTypes, FileContext } = require('librechat-data-provider');
|
||||
const { Tools, StepTypes, imageGenTools, FileContext } = require('librechat-data-provider');
|
||||
const {
|
||||
EnvVar,
|
||||
Providers,
|
||||
@@ -243,6 +242,32 @@ function createToolEndCallback({ req, res, artifactPromises }) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (imageGenTools.has(output.name)) {
|
||||
artifactPromises.push(
|
||||
(async () => {
|
||||
const fileMetadata = Object.assign(output.artifact, {
|
||||
messageId: metadata.run_id,
|
||||
toolCallId: output.tool_call_id,
|
||||
conversationId: metadata.thread_id,
|
||||
});
|
||||
if (!res.headersSent) {
|
||||
return fileMetadata;
|
||||
}
|
||||
|
||||
if (!fileMetadata) {
|
||||
return null;
|
||||
}
|
||||
|
||||
res.write(`event: attachment\ndata: ${JSON.stringify(fileMetadata)}\n\n`);
|
||||
return fileMetadata;
|
||||
})().catch((error) => {
|
||||
logger.error('Error processing code output:', error);
|
||||
return null;
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (output.artifact.content) {
|
||||
/** @type {FormattedContent[]} */
|
||||
const content = output.artifact.content;
|
||||
@@ -253,7 +278,7 @@ function createToolEndCallback({ req, res, artifactPromises }) {
|
||||
const { url } = part.image_url;
|
||||
artifactPromises.push(
|
||||
(async () => {
|
||||
const filename = `${output.name}_${output.tool_call_id}_img_${nanoid()}`;
|
||||
const filename = `${output.tool_call_id}-image-${new Date().getTime()}`;
|
||||
const file = await saveBase64Image(url, {
|
||||
req,
|
||||
filename,
|
||||
|
||||
@@ -17,7 +17,7 @@ const {
|
||||
KnownEndpoints,
|
||||
anthropicSchema,
|
||||
isAgentsEndpoint,
|
||||
bedrockInputSchema,
|
||||
bedrockOutputParser,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
@@ -27,11 +27,10 @@ const {
|
||||
formatContentStrings,
|
||||
createContextHandlers,
|
||||
} = require('~/app/clients/prompts');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { getCustomEndpointConfig } = require('~/server/services/Config');
|
||||
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
|
||||
const Tokenizer = require('~/server/services/Tokenizer');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const BaseClient = require('~/app/clients/BaseClient');
|
||||
const { createRun } = require('./run');
|
||||
const { logger } = require('~/config');
|
||||
@@ -40,10 +39,10 @@ const { logger } = require('~/config');
|
||||
/** @typedef {import('@langchain/core/runnables').RunnableConfig} RunnableConfig */
|
||||
|
||||
const providerParsers = {
|
||||
[EModelEndpoint.openAI]: openAISchema.parse,
|
||||
[EModelEndpoint.azureOpenAI]: openAISchema.parse,
|
||||
[EModelEndpoint.anthropic]: anthropicSchema.parse,
|
||||
[EModelEndpoint.bedrock]: bedrockInputSchema.parse,
|
||||
[EModelEndpoint.openAI]: openAISchema,
|
||||
[EModelEndpoint.azureOpenAI]: openAISchema,
|
||||
[EModelEndpoint.anthropic]: anthropicSchema,
|
||||
[EModelEndpoint.bedrock]: bedrockOutputParser,
|
||||
};
|
||||
|
||||
const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deepseek]);
|
||||
@@ -188,14 +187,7 @@ class AgentClient extends BaseClient {
|
||||
: {};
|
||||
|
||||
if (parseOptions) {
|
||||
try {
|
||||
runOptions = parseOptions(this.options.agent.model_parameters);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #getSaveOptions] Error parsing options',
|
||||
error,
|
||||
);
|
||||
}
|
||||
runOptions = parseOptions(this.options.agent.model_parameters);
|
||||
}
|
||||
|
||||
return removeNullishValues(
|
||||
@@ -388,34 +380,15 @@ class AgentClient extends BaseClient {
|
||||
if (!collectedUsage || !collectedUsage.length) {
|
||||
return;
|
||||
}
|
||||
const input_tokens =
|
||||
(collectedUsage[0]?.input_tokens || 0) +
|
||||
(Number(collectedUsage[0]?.input_token_details?.cache_creation) || 0) +
|
||||
(Number(collectedUsage[0]?.input_token_details?.cache_read) || 0);
|
||||
const input_tokens = collectedUsage[0]?.input_tokens || 0;
|
||||
|
||||
let output_tokens = 0;
|
||||
let previousTokens = input_tokens; // Start with original input
|
||||
for (let i = 0; i < collectedUsage.length; i++) {
|
||||
const usage = collectedUsage[i];
|
||||
if (!usage) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const cache_creation = Number(usage.input_token_details?.cache_creation) || 0;
|
||||
const cache_read = Number(usage.input_token_details?.cache_read) || 0;
|
||||
|
||||
const txMetadata = {
|
||||
context,
|
||||
conversationId: this.conversationId,
|
||||
user: this.user ?? this.options.req.user?.id,
|
||||
endpointTokenConfig: this.options.endpointTokenConfig,
|
||||
model: usage.model ?? model ?? this.model ?? this.options.agent.model_parameters.model,
|
||||
};
|
||||
|
||||
if (i > 0) {
|
||||
// Count new tokens generated (input_tokens minus previous accumulated tokens)
|
||||
output_tokens +=
|
||||
(Number(usage.input_tokens) || 0) + cache_creation + cache_read - previousTokens;
|
||||
output_tokens += (Number(usage.input_tokens) || 0) - previousTokens;
|
||||
}
|
||||
|
||||
// Add this message's output tokens
|
||||
@@ -423,26 +396,16 @@ class AgentClient extends BaseClient {
|
||||
|
||||
// Update previousTokens to include this message's output
|
||||
previousTokens += Number(usage.output_tokens) || 0;
|
||||
|
||||
if (cache_creation > 0 || cache_read > 0) {
|
||||
spendStructuredTokens(txMetadata, {
|
||||
promptTokens: {
|
||||
input: usage.input_tokens,
|
||||
write: cache_creation,
|
||||
read: cache_read,
|
||||
},
|
||||
completionTokens: usage.output_tokens,
|
||||
}).catch((err) => {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #recordCollectedUsage] Error spending structured tokens',
|
||||
err,
|
||||
);
|
||||
});
|
||||
}
|
||||
spendTokens(txMetadata, {
|
||||
promptTokens: usage.input_tokens,
|
||||
completionTokens: usage.output_tokens,
|
||||
}).catch((err) => {
|
||||
spendTokens(
|
||||
{
|
||||
context,
|
||||
conversationId: this.conversationId,
|
||||
user: this.user ?? this.options.req.user?.id,
|
||||
endpointTokenConfig: this.options.endpointTokenConfig,
|
||||
model: usage.model ?? model ?? this.model ?? this.options.agent.model_parameters.model,
|
||||
},
|
||||
{ promptTokens: usage.input_tokens, completionTokens: usage.output_tokens },
|
||||
).catch((err) => {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #recordCollectedUsage] Error spending tokens',
|
||||
err,
|
||||
@@ -803,10 +766,6 @@ class AgentClient extends BaseClient {
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #sendCompletion] Operation aborted',
|
||||
err,
|
||||
);
|
||||
if (!abortController.signal.aborted) {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #sendCompletion] Unhandled error type',
|
||||
@@ -814,6 +773,11 @@ class AgentClient extends BaseClient {
|
||||
);
|
||||
throw err;
|
||||
}
|
||||
|
||||
logger.warn(
|
||||
'[api/server/controllers/agents/client.js #sendCompletion] Operation aborted',
|
||||
err,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -828,20 +792,14 @@ class AgentClient extends BaseClient {
|
||||
throw new Error('Run not initialized');
|
||||
}
|
||||
const { handleLLMEnd, collected: collectedMetadata } = createMetadataAggregator();
|
||||
/** @type {import('@librechat/agents').ClientOptions} */
|
||||
const clientOptions = {
|
||||
maxTokens: 75,
|
||||
};
|
||||
let endpointConfig = this.options.req.app.locals[this.options.agent.endpoint];
|
||||
if (!endpointConfig) {
|
||||
endpointConfig = await getCustomEndpointConfig(this.options.agent.endpoint);
|
||||
}
|
||||
const clientOptions = {};
|
||||
const providerConfig = this.options.req.app.locals[this.options.agent.provider];
|
||||
if (
|
||||
endpointConfig &&
|
||||
endpointConfig.titleModel &&
|
||||
endpointConfig.titleModel !== Constants.CURRENT_MODEL
|
||||
providerConfig &&
|
||||
providerConfig.titleModel &&
|
||||
providerConfig.titleModel !== Constants.CURRENT_MODEL
|
||||
) {
|
||||
clientOptions.model = endpointConfig.titleModel;
|
||||
clientOptions.model = providerConfig.titleModel;
|
||||
}
|
||||
try {
|
||||
const titleResult = await this.run.generateTitle({
|
||||
|
||||
@@ -45,10 +45,7 @@ async function createRun({
|
||||
|
||||
/** @type {'reasoning_content' | 'reasoning'} */
|
||||
let reasoningKey;
|
||||
if (
|
||||
llmConfig.configuration?.baseURL?.includes(KnownEndpoints.openrouter) ||
|
||||
(agent.endpoint && agent.endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
|
||||
) {
|
||||
if (llmConfig.configuration?.baseURL?.includes(KnownEndpoints.openrouter)) {
|
||||
reasoningKey = 'reasoning';
|
||||
}
|
||||
if (/o1(?!-(?:mini|preview)).*$/.test(llmConfig.model)) {
|
||||
|
||||
@@ -1,18 +1,32 @@
|
||||
const passport = require('passport');
|
||||
const { logger } = require('~/config');
|
||||
const DebugControl = require('../../utils/debug.js');
|
||||
|
||||
function log({ title, parameters }) {
|
||||
DebugControl.log.functionName(title);
|
||||
if (parameters) {
|
||||
DebugControl.log.parameters(parameters);
|
||||
}
|
||||
}
|
||||
|
||||
const requireLocalAuth = (req, res, next) => {
|
||||
passport.authenticate('local', (err, user, info) => {
|
||||
if (err) {
|
||||
logger.error('[requireLocalAuth] Error at passport.authenticate:', err);
|
||||
log({
|
||||
title: '(requireLocalAuth) Error at passport.authenticate',
|
||||
parameters: [{ name: 'error', value: err }],
|
||||
});
|
||||
return next(err);
|
||||
}
|
||||
if (!user) {
|
||||
logger.debug('[requireLocalAuth] Error: No user');
|
||||
log({
|
||||
title: '(requireLocalAuth) Error: No user',
|
||||
});
|
||||
return res.status(404).send(info);
|
||||
}
|
||||
if (info && info.message) {
|
||||
logger.debug('[requireLocalAuth] Error: ' + info.message);
|
||||
log({
|
||||
title: '(requireLocalAuth) Error: ' + info.message,
|
||||
});
|
||||
return res.status(422).send({ message: info.message });
|
||||
}
|
||||
req.user = user;
|
||||
|
||||
@@ -47,10 +47,10 @@ router.get('/', async function (req, res) {
|
||||
githubLoginEnabled: !!process.env.GITHUB_CLIENT_ID && !!process.env.GITHUB_CLIENT_SECRET,
|
||||
googleLoginEnabled: !!process.env.GOOGLE_CLIENT_ID && !!process.env.GOOGLE_CLIENT_SECRET,
|
||||
appleLoginEnabled:
|
||||
!!process.env.APPLE_CLIENT_ID &&
|
||||
!!process.env.APPLE_TEAM_ID &&
|
||||
!!process.env.APPLE_KEY_ID &&
|
||||
!!process.env.APPLE_PRIVATE_KEY_PATH,
|
||||
!!process.env.APPLE_CLIENT_ID &&
|
||||
!!process.env.APPLE_TEAM_ID &&
|
||||
!!process.env.APPLE_KEY_ID &&
|
||||
!!process.env.APPLE_PRIVATE_KEY_PATH,
|
||||
openidLoginEnabled:
|
||||
!!process.env.OPENID_CLIENT_ID &&
|
||||
!!process.env.OPENID_CLIENT_SECRET &&
|
||||
@@ -80,7 +80,6 @@ router.get('/', async function (req, res) {
|
||||
publicSharedLinksEnabled,
|
||||
analyticsGtmId: process.env.ANALYTICS_GTM_ID,
|
||||
instanceProjectId: instanceProject._id.toString(),
|
||||
bundlerURL: process.env.SANDPACK_BUNDLER_URL,
|
||||
};
|
||||
|
||||
if (ldap) {
|
||||
|
||||
@@ -47,7 +47,7 @@ async function loadConfigModels(req) {
|
||||
);
|
||||
|
||||
/**
|
||||
* @type {Record<string, Promise<string[]>>}
|
||||
* @type {Record<string, string[]>}
|
||||
* Map for promises keyed by unique combination of baseURL and apiKey */
|
||||
const fetchPromisesMap = {};
|
||||
/**
|
||||
@@ -102,7 +102,7 @@ async function loadConfigModels(req) {
|
||||
|
||||
for (const name of associatedNames) {
|
||||
const endpoint = endpointsMap[name];
|
||||
modelsConfig[name] = !modelData?.length ? (endpoint.models.default ?? []) : modelData;
|
||||
modelsConfig[name] = !modelData?.length ? endpoint.models.default ?? [] : modelData;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@ const {
|
||||
getGoogleModels,
|
||||
getBedrockModels,
|
||||
getAnthropicModels,
|
||||
getChatGPTBrowserModels,
|
||||
} = require('~/server/services/ModelService');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Loads the default models for the application.
|
||||
@@ -15,68 +15,31 @@ const { logger } = require('~/config');
|
||||
* @param {Express.Request} req - The Express request object.
|
||||
*/
|
||||
async function loadDefaultModels(req) {
|
||||
try {
|
||||
const [
|
||||
openAI,
|
||||
anthropic,
|
||||
azureOpenAI,
|
||||
gptPlugins,
|
||||
assistants,
|
||||
azureAssistants,
|
||||
google,
|
||||
bedrock,
|
||||
] = await Promise.all([
|
||||
getOpenAIModels({ user: req.user.id }).catch((error) => {
|
||||
logger.error('Error fetching OpenAI models:', error);
|
||||
return [];
|
||||
}),
|
||||
getAnthropicModels({ user: req.user.id }).catch((error) => {
|
||||
logger.error('Error fetching Anthropic models:', error);
|
||||
return [];
|
||||
}),
|
||||
getOpenAIModels({ user: req.user.id, azure: true }).catch((error) => {
|
||||
logger.error('Error fetching Azure OpenAI models:', error);
|
||||
return [];
|
||||
}),
|
||||
getOpenAIModels({ user: req.user.id, azure: useAzurePlugins, plugins: true }).catch(
|
||||
(error) => {
|
||||
logger.error('Error fetching Plugin models:', error);
|
||||
return [];
|
||||
},
|
||||
),
|
||||
getOpenAIModels({ assistants: true }).catch((error) => {
|
||||
logger.error('Error fetching OpenAI Assistants API models:', error);
|
||||
return [];
|
||||
}),
|
||||
getOpenAIModels({ azureAssistants: true }).catch((error) => {
|
||||
logger.error('Error fetching Azure OpenAI Assistants API models:', error);
|
||||
return [];
|
||||
}),
|
||||
Promise.resolve(getGoogleModels()).catch((error) => {
|
||||
logger.error('Error getting Google models:', error);
|
||||
return [];
|
||||
}),
|
||||
Promise.resolve(getBedrockModels()).catch((error) => {
|
||||
logger.error('Error getting Bedrock models:', error);
|
||||
return [];
|
||||
}),
|
||||
]);
|
||||
const google = getGoogleModels();
|
||||
const openAI = await getOpenAIModels({ user: req.user.id });
|
||||
const anthropic = getAnthropicModels();
|
||||
const chatGPTBrowser = getChatGPTBrowserModels();
|
||||
const azureOpenAI = await getOpenAIModels({ user: req.user.id, azure: true });
|
||||
const gptPlugins = await getOpenAIModels({
|
||||
user: req.user.id,
|
||||
azure: useAzurePlugins,
|
||||
plugins: true,
|
||||
});
|
||||
const assistants = await getOpenAIModels({ assistants: true });
|
||||
const azureAssistants = await getOpenAIModels({ azureAssistants: true });
|
||||
|
||||
return {
|
||||
[EModelEndpoint.openAI]: openAI,
|
||||
[EModelEndpoint.agents]: openAI,
|
||||
[EModelEndpoint.google]: google,
|
||||
[EModelEndpoint.anthropic]: anthropic,
|
||||
[EModelEndpoint.gptPlugins]: gptPlugins,
|
||||
[EModelEndpoint.azureOpenAI]: azureOpenAI,
|
||||
[EModelEndpoint.assistants]: assistants,
|
||||
[EModelEndpoint.azureAssistants]: azureAssistants,
|
||||
[EModelEndpoint.bedrock]: bedrock,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Error fetching default models:', error);
|
||||
throw new Error(`Failed to load default models: ${error.message}`);
|
||||
}
|
||||
return {
|
||||
[EModelEndpoint.openAI]: openAI,
|
||||
[EModelEndpoint.agents]: openAI,
|
||||
[EModelEndpoint.google]: google,
|
||||
[EModelEndpoint.anthropic]: anthropic,
|
||||
[EModelEndpoint.gptPlugins]: gptPlugins,
|
||||
[EModelEndpoint.azureOpenAI]: azureOpenAI,
|
||||
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
|
||||
[EModelEndpoint.assistants]: assistants,
|
||||
[EModelEndpoint.azureAssistants]: azureAssistants,
|
||||
[EModelEndpoint.bedrock]: getBedrockModels(),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = loadDefaultModels;
|
||||
|
||||
@@ -22,7 +22,6 @@ const { getAgent } = require('~/models/Agent');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const providerConfigMap = {
|
||||
[Providers.XAI]: initCustom,
|
||||
[Providers.OLLAMA]: initCustom,
|
||||
[Providers.DEEPSEEK]: initCustom,
|
||||
[Providers.OPENROUTER]: initCustom,
|
||||
@@ -102,7 +101,6 @@ const initializeAgentOptions = async ({
|
||||
});
|
||||
|
||||
const provider = agent.provider;
|
||||
agent.endpoint = provider;
|
||||
let getOptions = providerConfigMap[provider];
|
||||
if (!getOptions && providerConfigMap[provider.toLowerCase()] != null) {
|
||||
agent.provider = provider.toLowerCase();
|
||||
@@ -114,7 +112,9 @@ const initializeAgentOptions = async ({
|
||||
}
|
||||
getOptions = initCustom;
|
||||
agent.provider = Providers.OPENAI;
|
||||
agent.endpoint = provider.toLowerCase();
|
||||
}
|
||||
|
||||
const model_parameters = Object.assign(
|
||||
{},
|
||||
agent.model_parameters ?? { model: agent.model },
|
||||
|
||||
@@ -20,19 +20,10 @@ const addTitle = async (req, { text, response, client }) => {
|
||||
|
||||
const titleCache = getLogStores(CacheKeys.GEN_TITLE);
|
||||
const key = `${req.user.id}-${response.conversationId}`;
|
||||
const responseText =
|
||||
response?.content && Array.isArray(response?.content)
|
||||
? response.content.reduce((acc, block) => {
|
||||
if (block?.type === 'text') {
|
||||
return acc + block.text;
|
||||
}
|
||||
return acc;
|
||||
}, '')
|
||||
: (response?.content ?? response?.text ?? '');
|
||||
|
||||
const title = await client.titleConvo({
|
||||
text,
|
||||
responseText,
|
||||
responseText: response?.text ?? '',
|
||||
conversationId: response.conversationId,
|
||||
});
|
||||
await titleCache.set(key, title, 120000);
|
||||
|
||||
@@ -48,8 +48,7 @@ function getClaudeHeaders(model, supportsCacheControl) {
|
||||
};
|
||||
} else if (/claude-3[-.]7/.test(model)) {
|
||||
return {
|
||||
'anthropic-beta':
|
||||
'token-efficient-tools-2025-02-19,output-128k-2025-02-19,prompt-caching-2024-07-31',
|
||||
'anthropic-beta': 'output-128k-2025-02-19,prompt-caching-2024-07-31',
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
|
||||
@@ -27,7 +27,6 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
|
||||
|
||||
if (anthropicConfig) {
|
||||
clientOptions.streamRate = anthropicConfig.streamRate;
|
||||
clientOptions.titleModel = anthropicConfig.titleModel;
|
||||
}
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { anthropicSettings, removeNullishValues } = require('librechat-data-provider');
|
||||
const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers');
|
||||
const { checkPromptCacheSupport, getClaudeHeaders } = require('./helpers');
|
||||
|
||||
/**
|
||||
* Generates configuration options for creating an Anthropic language model (LLM) instance.
|
||||
@@ -49,14 +49,13 @@ function getLLMConfig(apiKey, options = {}) {
|
||||
clientOptions: {},
|
||||
};
|
||||
|
||||
requestOptions = configureReasoning(requestOptions, systemOptions);
|
||||
|
||||
if (!/claude-3[-.]7/.test(mergedOptions.model)) {
|
||||
requestOptions.topP = mergedOptions.topP;
|
||||
requestOptions.topK = mergedOptions.topK;
|
||||
} else if (requestOptions.thinking == null) {
|
||||
requestOptions.topP = mergedOptions.topP;
|
||||
requestOptions.topK = mergedOptions.topK;
|
||||
if (mergedOptions.topP !== undefined) {
|
||||
requestOptions.topP = mergedOptions.topP;
|
||||
}
|
||||
if (mergedOptions.topK !== undefined) {
|
||||
requestOptions.topK = mergedOptions.topK;
|
||||
}
|
||||
}
|
||||
|
||||
const supportsCacheControl =
|
||||
|
||||
@@ -109,45 +109,4 @@ describe('getLLMConfig', () => {
|
||||
// Just verifying that the promptCache setting is processed
|
||||
expect(result.llmConfig).toBeDefined();
|
||||
});
|
||||
|
||||
it('should include topK and topP for Claude-3.7 models when thinking is not enabled', () => {
|
||||
// Test with thinking explicitly set to null/undefined
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
thinking: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).toHaveProperty('topK', 10);
|
||||
expect(result.llmConfig).toHaveProperty('topP', 0.9);
|
||||
|
||||
// Test with thinking explicitly set to false
|
||||
const result2 = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
thinking: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result2.llmConfig).toHaveProperty('topK', 10);
|
||||
expect(result2.llmConfig).toHaveProperty('topP', 0.9);
|
||||
|
||||
// Test with decimal notation as well
|
||||
const result3 = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3.7-sonnet',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
thinking: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result3.llmConfig).toHaveProperty('topK', 10);
|
||||
expect(result3.llmConfig).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { removeNullishValues } = require('librechat-data-provider');
|
||||
const { removeNullishValues, bedrockInputParser } = require('librechat-data-provider');
|
||||
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const {
|
||||
@@ -14,6 +15,12 @@ const buildOptions = (endpoint, parsedBody) => {
|
||||
artifacts,
|
||||
...model_parameters
|
||||
} = parsedBody;
|
||||
let parsedParams = model_parameters;
|
||||
try {
|
||||
parsedParams = bedrockInputParser.parse(model_parameters);
|
||||
} catch (error) {
|
||||
logger.warn('Failed to parse bedrock input', error);
|
||||
}
|
||||
const endpointOption = removeNullishValues({
|
||||
endpoint,
|
||||
name,
|
||||
@@ -24,7 +31,7 @@ const buildOptions = (endpoint, parsedBody) => {
|
||||
spec,
|
||||
promptPrefix,
|
||||
maxContextTokens,
|
||||
model_parameters,
|
||||
model_parameters: parsedParams,
|
||||
});
|
||||
|
||||
if (typeof artifacts === 'string') {
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const {
|
||||
AuthType,
|
||||
Constants,
|
||||
EModelEndpoint,
|
||||
bedrockInputParser,
|
||||
bedrockOutputParser,
|
||||
Constants,
|
||||
AuthType,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { sleep } = require('~/server/utils');
|
||||
|
||||
const getOptions = async ({ req, overrideModel, endpointOption }) => {
|
||||
const getOptions = async ({ req, endpointOption }) => {
|
||||
const {
|
||||
BEDROCK_AWS_SECRET_ACCESS_KEY,
|
||||
BEDROCK_AWS_ACCESS_KEY_ID,
|
||||
@@ -64,44 +62,39 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => {
|
||||
|
||||
/** @type {BedrockClientOptions} */
|
||||
const requestOptions = {
|
||||
model: overrideModel ?? endpointOption.model,
|
||||
model: endpointOption.model,
|
||||
region: BEDROCK_AWS_DEFAULT_REGION,
|
||||
streaming: true,
|
||||
streamUsage: true,
|
||||
callbacks: [
|
||||
{
|
||||
handleLLMNewToken: async () => {
|
||||
if (!streamRate) {
|
||||
return;
|
||||
}
|
||||
await sleep(streamRate);
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
if (credentials) {
|
||||
requestOptions.credentials = credentials;
|
||||
}
|
||||
|
||||
if (BEDROCK_REVERSE_PROXY) {
|
||||
requestOptions.endpointHost = BEDROCK_REVERSE_PROXY;
|
||||
}
|
||||
|
||||
const configOptions = {};
|
||||
if (PROXY) {
|
||||
/** NOTE: NOT SUPPORTED BY BEDROCK */
|
||||
configOptions.httpAgent = new HttpsProxyAgent(PROXY);
|
||||
}
|
||||
|
||||
const llmConfig = bedrockOutputParser(
|
||||
bedrockInputParser.parse(
|
||||
removeNullishValues(Object.assign(requestOptions, endpointOption.model_parameters)),
|
||||
),
|
||||
);
|
||||
|
||||
if (credentials) {
|
||||
llmConfig.credentials = credentials;
|
||||
}
|
||||
|
||||
if (BEDROCK_REVERSE_PROXY) {
|
||||
llmConfig.endpointHost = BEDROCK_REVERSE_PROXY;
|
||||
}
|
||||
|
||||
llmConfig.callbacks = [
|
||||
{
|
||||
handleLLMNewToken: async () => {
|
||||
if (!streamRate) {
|
||||
return;
|
||||
}
|
||||
await sleep(streamRate);
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
return {
|
||||
/** @type {BedrockClientOptions} */
|
||||
llmConfig,
|
||||
llmConfig: removeNullishValues(Object.assign(requestOptions, endpointOption.model_parameters)),
|
||||
configOptions,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -141,8 +141,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
||||
},
|
||||
clientOptions,
|
||||
);
|
||||
clientOptions.modelOptions.user = req.user.id;
|
||||
const options = getLLMConfig(apiKey, clientOptions, endpoint);
|
||||
const options = getLLMConfig(apiKey, clientOptions);
|
||||
if (!customOptions.streamRate) {
|
||||
return options;
|
||||
}
|
||||
|
||||
@@ -5,7 +5,12 @@ const { isEnabled } = require('~/server/utils');
|
||||
const { GoogleClient } = require('~/app');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption, overrideModel, optionsOnly }) => {
|
||||
const { GOOGLE_KEY, GOOGLE_REVERSE_PROXY, GOOGLE_AUTH_HEADER, PROXY } = process.env;
|
||||
const {
|
||||
GOOGLE_KEY,
|
||||
GOOGLE_REVERSE_PROXY,
|
||||
GOOGLE_AUTH_HEADER,
|
||||
PROXY,
|
||||
} = process.env;
|
||||
const isUserProvided = GOOGLE_KEY === 'user_provided';
|
||||
const { key: expiresAt } = req.body;
|
||||
|
||||
@@ -38,7 +43,6 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
|
||||
|
||||
if (googleConfig) {
|
||||
clientOptions.streamRate = googleConfig.streamRate;
|
||||
clientOptions.titleModel = googleConfig.titleModel;
|
||||
}
|
||||
|
||||
if (allConfig) {
|
||||
|
||||
@@ -113,7 +113,6 @@ const initializeClient = async ({
|
||||
|
||||
if (!isAzureOpenAI && openAIConfig) {
|
||||
clientOptions.streamRate = openAIConfig.streamRate;
|
||||
clientOptions.titleModel = openAIConfig.titleModel;
|
||||
}
|
||||
|
||||
/** @type {undefined | TBaseEndpoint} */
|
||||
@@ -141,7 +140,6 @@ const initializeClient = async ({
|
||||
},
|
||||
clientOptions,
|
||||
);
|
||||
clientOptions.modelOptions.user = req.user.id;
|
||||
const options = getLLMConfig(apiKey, clientOptions);
|
||||
if (!clientOptions.streamRate) {
|
||||
return options;
|
||||
|
||||
@@ -9,7 +9,6 @@ const { isEnabled } = require('~/server/utils');
|
||||
* @param {Object} options - Additional options for configuring the LLM.
|
||||
* @param {Object} [options.modelOptions] - Model-specific options.
|
||||
* @param {string} [options.modelOptions.model] - The name of the model to use.
|
||||
* @param {string} [options.modelOptions.user] - The user ID
|
||||
* @param {number} [options.modelOptions.temperature] - Controls randomness in output generation (0-2).
|
||||
* @param {number} [options.modelOptions.top_p] - Controls diversity via nucleus sampling (0-1).
|
||||
* @param {number} [options.modelOptions.frequency_penalty] - Reduces repetition of token sequences (-2 to 2).
|
||||
@@ -24,13 +23,13 @@ const { isEnabled } = require('~/server/utils');
|
||||
* @param {boolean} [options.streaming] - Whether to use streaming mode.
|
||||
* @param {Object} [options.addParams] - Additional parameters to add to the model options.
|
||||
* @param {string[]} [options.dropParams] - Parameters to remove from the model options.
|
||||
* @param {string|null} [endpoint=null] - The endpoint name
|
||||
* @returns {Object} Configuration options for creating an LLM instance.
|
||||
*/
|
||||
function getLLMConfig(apiKey, options = {}, endpoint = null) {
|
||||
function getLLMConfig(apiKey, options = {}) {
|
||||
const {
|
||||
modelOptions = {},
|
||||
reverseProxyUrl,
|
||||
useOpenRouter,
|
||||
defaultQuery,
|
||||
headers,
|
||||
proxy,
|
||||
@@ -57,14 +56,9 @@ function getLLMConfig(apiKey, options = {}, endpoint = null) {
|
||||
});
|
||||
}
|
||||
|
||||
let useOpenRouter;
|
||||
/** @type {OpenAIClientOptions['configuration']} */
|
||||
const configOptions = {};
|
||||
if (
|
||||
(reverseProxyUrl && reverseProxyUrl.includes(KnownEndpoints.openrouter)) ||
|
||||
(endpoint && endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
|
||||
) {
|
||||
useOpenRouter = true;
|
||||
if (useOpenRouter || (reverseProxyUrl && reverseProxyUrl.includes(KnownEndpoints.openrouter))) {
|
||||
llmConfig.include_reasoning = true;
|
||||
configOptions.baseURL = reverseProxyUrl;
|
||||
configOptions.defaultHeaders = Object.assign(
|
||||
@@ -124,13 +118,6 @@ function getLLMConfig(apiKey, options = {}, endpoint = null) {
|
||||
llmConfig.organization = process.env.OPENAI_ORGANIZATION;
|
||||
}
|
||||
|
||||
if (useOpenRouter && llmConfig.reasoning_effort != null) {
|
||||
llmConfig.reasoning = {
|
||||
effort: llmConfig.reasoning_effort,
|
||||
};
|
||||
delete llmConfig.reasoning_effort;
|
||||
}
|
||||
|
||||
return {
|
||||
/** @type {OpenAIClientOptions} */
|
||||
llmConfig,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
// Code Files
|
||||
const axios = require('axios');
|
||||
const FormData = require('form-data');
|
||||
const { getCodeBaseURL } = require('@librechat/agents');
|
||||
@@ -15,8 +16,7 @@ const MAX_FILE_SIZE = 150 * 1024 * 1024;
|
||||
async function getCodeOutputDownloadStream(fileIdentifier, apiKey) {
|
||||
try {
|
||||
const baseURL = getCodeBaseURL();
|
||||
/** @type {import('axios').AxiosRequestConfig} */
|
||||
const options = {
|
||||
const response = await axios({
|
||||
method: 'get',
|
||||
url: `${baseURL}/download/${fileIdentifier}`,
|
||||
responseType: 'stream',
|
||||
@@ -25,22 +25,10 @@ async function getCodeOutputDownloadStream(fileIdentifier, apiKey) {
|
||||
'X-API-Key': apiKey,
|
||||
},
|
||||
timeout: 15000,
|
||||
};
|
||||
});
|
||||
|
||||
if (process.env.PROXY) {
|
||||
options.proxy = {
|
||||
host: process.env.PROXY,
|
||||
protocol: process.env.PROXY.startsWith('https') ? 'https' : 'http',
|
||||
};
|
||||
}
|
||||
|
||||
const response = await axios(options);
|
||||
return response;
|
||||
} catch (error) {
|
||||
logAxiosError({
|
||||
message: `Error downloading code environment file stream: ${error.message}`,
|
||||
error,
|
||||
});
|
||||
throw new Error(`Error downloading file: ${error.message}`);
|
||||
}
|
||||
}
|
||||
@@ -66,8 +54,7 @@ async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = ''
|
||||
form.append('file', stream, filename);
|
||||
|
||||
const baseURL = getCodeBaseURL();
|
||||
/** @type {import('axios').AxiosRequestConfig} */
|
||||
const options = {
|
||||
const response = await axios.post(`${baseURL}/upload`, form, {
|
||||
headers: {
|
||||
...form.getHeaders(),
|
||||
'Content-Type': 'multipart/form-data',
|
||||
@@ -77,16 +64,7 @@ async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = ''
|
||||
},
|
||||
maxContentLength: MAX_FILE_SIZE,
|
||||
maxBodyLength: MAX_FILE_SIZE,
|
||||
};
|
||||
|
||||
if (process.env.PROXY) {
|
||||
options.proxy = {
|
||||
host: process.env.PROXY,
|
||||
protocol: process.env.PROXY.startsWith('https') ? 'https' : 'http',
|
||||
};
|
||||
}
|
||||
|
||||
const response = await axios.post(`${baseURL}/upload`, form, options);
|
||||
});
|
||||
|
||||
/** @type {{ message: string; session_id: string; files: Array<{ fileId: string; filename: string }> }} */
|
||||
const result = response.data;
|
||||
|
||||
@@ -4,9 +4,7 @@ const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider');
|
||||
const { inputSchema, logAxiosError, extractBaseURL, processModelData } = require('~/utils');
|
||||
const { OllamaClient } = require('~/app/clients/OllamaClient');
|
||||
const { isUserProvided } = require('~/server/utils');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Splits a string by commas and trims each resulting value.
|
||||
@@ -44,7 +42,7 @@ const fetchModels = async ({
|
||||
user,
|
||||
apiKey,
|
||||
baseURL,
|
||||
name = EModelEndpoint.openAI,
|
||||
name = 'OpenAI',
|
||||
azure = false,
|
||||
userIdQuery = false,
|
||||
createTokenConfig = true,
|
||||
@@ -66,19 +64,12 @@ const fetchModels = async ({
|
||||
|
||||
try {
|
||||
const options = {
|
||||
headers: {},
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
timeout: 5000,
|
||||
};
|
||||
|
||||
if (name === EModelEndpoint.anthropic) {
|
||||
options.headers = {
|
||||
'x-api-key': apiKey,
|
||||
'anthropic-version': process.env.ANTHROPIC_VERSION || '2023-06-01',
|
||||
};
|
||||
} else {
|
||||
options.headers.Authorization = `Bearer ${apiKey}`;
|
||||
}
|
||||
|
||||
if (process.env.PROXY) {
|
||||
options.httpsAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
@@ -157,7 +148,7 @@ const fetchOpenAIModels = async (opts, _models = []) => {
|
||||
baseURL,
|
||||
azure: opts.azure,
|
||||
user: opts.user,
|
||||
name: EModelEndpoint.openAI,
|
||||
name: baseURL,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -240,71 +231,13 @@ const getChatGPTBrowserModels = () => {
|
||||
return models;
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetches models from the Anthropic API.
|
||||
* @async
|
||||
* @function
|
||||
* @param {object} opts - The options for fetching the models.
|
||||
* @param {string} opts.user - The user ID to send to the API.
|
||||
* @param {string[]} [_models=[]] - The models to use as a fallback.
|
||||
*/
|
||||
const fetchAnthropicModels = async (opts, _models = []) => {
|
||||
let models = _models.slice() ?? [];
|
||||
let apiKey = process.env.ANTHROPIC_API_KEY;
|
||||
const anthropicBaseURL = 'https://api.anthropic.com/v1';
|
||||
let baseURL = anthropicBaseURL;
|
||||
let reverseProxyUrl = process.env.ANTHROPIC_REVERSE_PROXY;
|
||||
|
||||
if (reverseProxyUrl) {
|
||||
baseURL = extractBaseURL(reverseProxyUrl);
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
return models;
|
||||
}
|
||||
|
||||
const modelsCache = getLogStores(CacheKeys.MODEL_QUERIES);
|
||||
|
||||
const cachedModels = await modelsCache.get(baseURL);
|
||||
if (cachedModels) {
|
||||
return cachedModels;
|
||||
}
|
||||
|
||||
if (baseURL) {
|
||||
models = await fetchModels({
|
||||
apiKey,
|
||||
baseURL,
|
||||
user: opts.user,
|
||||
name: EModelEndpoint.anthropic,
|
||||
tokenKey: EModelEndpoint.anthropic,
|
||||
});
|
||||
}
|
||||
|
||||
if (models.length === 0) {
|
||||
return _models;
|
||||
}
|
||||
|
||||
await modelsCache.set(baseURL, models);
|
||||
return models;
|
||||
};
|
||||
|
||||
const getAnthropicModels = async (opts = {}) => {
|
||||
const getAnthropicModels = () => {
|
||||
let models = defaultModels[EModelEndpoint.anthropic];
|
||||
if (process.env.ANTHROPIC_MODELS) {
|
||||
models = splitAndTrim(process.env.ANTHROPIC_MODELS);
|
||||
return models;
|
||||
}
|
||||
|
||||
if (isUserProvided(process.env.ANTHROPIC_API_KEY)) {
|
||||
return models;
|
||||
}
|
||||
|
||||
try {
|
||||
return await fetchAnthropicModels(opts, models);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching Anthropic models:', error);
|
||||
return models;
|
||||
}
|
||||
return models;
|
||||
};
|
||||
|
||||
const getGoogleModels = () => {
|
||||
|
||||
@@ -352,15 +352,15 @@ describe('splitAndTrim', () => {
|
||||
});
|
||||
|
||||
describe('getAnthropicModels', () => {
|
||||
it('returns default models when ANTHROPIC_MODELS is not set', async () => {
|
||||
it('returns default models when ANTHROPIC_MODELS is not set', () => {
|
||||
delete process.env.ANTHROPIC_MODELS;
|
||||
const models = await getAnthropicModels();
|
||||
const models = getAnthropicModels();
|
||||
expect(models).toEqual(defaultModels[EModelEndpoint.anthropic]);
|
||||
});
|
||||
|
||||
it('returns models from ANTHROPIC_MODELS when set', async () => {
|
||||
it('returns models from ANTHROPIC_MODELS when set', () => {
|
||||
process.env.ANTHROPIC_MODELS = 'claude-1, claude-2 ';
|
||||
const models = await getAnthropicModels();
|
||||
const models = getAnthropicModels();
|
||||
expect(models).toEqual(['claude-1', 'claude-2']);
|
||||
});
|
||||
});
|
||||
|
||||
56
api/utils/debug.js
Normal file
56
api/utils/debug.js
Normal file
@@ -0,0 +1,56 @@
|
||||
const levels = {
|
||||
NONE: 0,
|
||||
LOW: 1,
|
||||
MEDIUM: 2,
|
||||
HIGH: 3,
|
||||
};
|
||||
|
||||
let level = levels.HIGH;
|
||||
|
||||
module.exports = {
|
||||
levels,
|
||||
setLevel: (l) => (level = l),
|
||||
log: {
|
||||
parameters: (parameters) => {
|
||||
if (levels.HIGH > level) {
|
||||
return;
|
||||
}
|
||||
console.group();
|
||||
parameters.forEach((p) => console.log(`${p.name}:`, p.value));
|
||||
console.groupEnd();
|
||||
},
|
||||
functionName: (name) => {
|
||||
if (levels.MEDIUM > level) {
|
||||
return;
|
||||
}
|
||||
console.log(`\nEXECUTING: ${name}\n`);
|
||||
},
|
||||
flow: (flow) => {
|
||||
if (levels.LOW > level) {
|
||||
return;
|
||||
}
|
||||
console.log(`\n\n\nBEGIN FLOW: ${flow}\n\n\n`);
|
||||
},
|
||||
variable: ({ name, value }) => {
|
||||
if (levels.HIGH > level) {
|
||||
return;
|
||||
}
|
||||
console.group();
|
||||
console.group();
|
||||
console.log(`VARIABLE ${name}:`, value);
|
||||
console.groupEnd();
|
||||
console.groupEnd();
|
||||
},
|
||||
request: () => (req, res, next) => {
|
||||
if (levels.HIGH > level) {
|
||||
return next();
|
||||
}
|
||||
console.log('Hit URL', req.url, 'with following:');
|
||||
console.group();
|
||||
console.log('Query:', req.query);
|
||||
console.log('Body:', req.body);
|
||||
console.groupEnd();
|
||||
return next();
|
||||
},
|
||||
},
|
||||
};
|
||||
@@ -13,7 +13,6 @@ const openAIModels = {
|
||||
'gpt-4-32k-0613': 32758, // -10 from max
|
||||
'gpt-4-1106': 127500, // -500 from max
|
||||
'gpt-4-0125': 127500, // -500 from max
|
||||
'gpt-4.5': 127500, // -500 from max
|
||||
'gpt-4o': 127500, // -500 from max
|
||||
'gpt-4o-mini': 127500, // -500 from max
|
||||
'gpt-4o-2024-05-13': 127500, // -500 from max
|
||||
|
||||
@@ -103,16 +103,6 @@ describe('getModelMaxTokens', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct tokens for gpt-4.5 matches', () => {
|
||||
expect(getModelMaxTokens('gpt-4.5')).toBe(maxTokensMap[EModelEndpoint.openAI]['gpt-4.5']);
|
||||
expect(getModelMaxTokens('gpt-4.5-preview')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['gpt-4.5'],
|
||||
);
|
||||
expect(getModelMaxTokens('openai/gpt-4.5-preview')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['gpt-4.5'],
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct tokens for Anthropic models', () => {
|
||||
const models = [
|
||||
'claude-2.1',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/frontend",
|
||||
"version": "v0.7.7",
|
||||
"version": "v0.7.7-rc1",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
@@ -65,7 +65,7 @@
|
||||
"framer-motion": "^11.5.4",
|
||||
"html-to-image": "^1.11.11",
|
||||
"i18next": "^24.2.2",
|
||||
"i18next-browser-languagedetector": "^8.0.3",
|
||||
"i18next-browser-languagedetector": "^8.0.4",
|
||||
"input-otp": "^1.4.2",
|
||||
"js-cookie": "^3.0.5",
|
||||
"librechat-data-provider": "*",
|
||||
@@ -83,7 +83,7 @@
|
||||
"react-flip-toolkit": "^7.1.0",
|
||||
"react-gtm-module": "^2.0.11",
|
||||
"react-hook-form": "^7.43.9",
|
||||
"react-i18next": "^15.4.0",
|
||||
"react-i18next": "^15.4.1",
|
||||
"react-lazy-load-image-component": "^1.6.0",
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-resizable-panels": "^2.1.7",
|
||||
@@ -101,9 +101,9 @@
|
||||
"remark-math": "^6.0.0",
|
||||
"remark-supersub": "^1.0.0",
|
||||
"sse.js": "^2.5.0",
|
||||
"tailwind-merge": "^1.9.1",
|
||||
"tailwindcss-animate": "^1.0.5",
|
||||
"tailwindcss-radix": "^2.8.0",
|
||||
"tailwind-merge": "^3.0.2",
|
||||
"tailwindcss-animate": "^1.0.7",
|
||||
"tailwindcss-radix": "^4.0.2",
|
||||
"zod": "^3.22.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -121,7 +121,7 @@
|
||||
"@types/node": "^20.3.0",
|
||||
"@types/react": "^18.2.11",
|
||||
"@types/react-dom": "^18.2.4",
|
||||
"@vitejs/plugin-react": "^4.2.1",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
"autoprefixer": "^10.4.20",
|
||||
"babel-plugin-replace-ts-export-assignment": "^0.0.2",
|
||||
"babel-plugin-root-import": "^6.6.0",
|
||||
@@ -134,10 +134,8 @@
|
||||
"jest-environment-jsdom": "^29.7.0",
|
||||
"jest-file-loader": "^1.0.3",
|
||||
"jest-junit": "^16.0.0",
|
||||
"postcss": "^8.4.31",
|
||||
"postcss-loader": "^7.1.0",
|
||||
"postcss-preset-env": "^8.2.0",
|
||||
"tailwindcss": "^3.4.1",
|
||||
"tailwindcss": "^4.0.9",
|
||||
"@tailwindcss/vite": "^4.0.9",
|
||||
"ts-jest": "^29.2.5",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^6.1.0",
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
module.exports = {
|
||||
plugins: [
|
||||
require('postcss-import'),
|
||||
require('postcss-preset-env'),
|
||||
require('tailwindcss'),
|
||||
require('autoprefixer'),
|
||||
],
|
||||
};
|
||||
@@ -35,7 +35,7 @@ const App = () => {
|
||||
<RouterProvider router={router} />
|
||||
<ReactQueryDevtools initialIsOpen={false} position="top-right" />
|
||||
<Toast />
|
||||
<RadixToast.Viewport className="pointer-events-none fixed inset-0 z-[1000] mx-auto my-2 flex max-w-[560px] flex-col items-stretch justify-start md:pb-5" />
|
||||
<RadixToast.Viewport className="pointer-events-none fixed inset-0 z-1000 mx-auto my-2 flex max-w-[560px] flex-col items-stretch justify-start md:pb-5" />
|
||||
</DndProvider>
|
||||
</ToastProvider>
|
||||
</RadixToast.Provider>
|
||||
|
||||
@@ -8,8 +8,8 @@ import {
|
||||
import { SandpackProviderProps } from '@codesandbox/sandpack-react/unstyled';
|
||||
import type { CodeEditorRef } from '@codesandbox/sandpack-react';
|
||||
import type { ArtifactFiles, Artifact } from '~/common';
|
||||
import { useEditArtifact, useGetStartupConfig } from '~/data-provider';
|
||||
import { sharedFiles, sharedOptions } from '~/utils/artifacts';
|
||||
import { useEditArtifact } from '~/data-provider';
|
||||
import { useEditorContext } from '~/Providers';
|
||||
|
||||
const createDebouncedMutation = (
|
||||
@@ -124,17 +124,6 @@ export const ArtifactCodeEditor = memo(function ({
|
||||
sharedProps: Partial<SandpackProviderProps>;
|
||||
editorRef: React.MutableRefObject<CodeEditorRef>;
|
||||
}) {
|
||||
const { data: config } = useGetStartupConfig();
|
||||
const options: typeof sharedOptions = useMemo(() => {
|
||||
if (!config) {
|
||||
return sharedOptions;
|
||||
}
|
||||
return {
|
||||
...sharedOptions,
|
||||
bundlerURL: config.bundlerURL,
|
||||
};
|
||||
}, [config]);
|
||||
|
||||
if (Object.keys(files).length === 0) {
|
||||
return null;
|
||||
}
|
||||
@@ -146,7 +135,7 @@ export const ArtifactCodeEditor = memo(function ({
|
||||
...files,
|
||||
...sharedFiles,
|
||||
}}
|
||||
options={options}
|
||||
options={{ ...sharedOptions }}
|
||||
{...sharedProps}
|
||||
template={template}
|
||||
>
|
||||
|
||||
@@ -7,7 +7,6 @@ import {
|
||||
import type { SandpackPreviewRef } from '@codesandbox/sandpack-react/unstyled';
|
||||
import type { ArtifactFiles } from '~/common';
|
||||
import { sharedFiles, sharedOptions } from '~/utils/artifacts';
|
||||
import { useGetStartupConfig } from '~/data-provider';
|
||||
import { useEditorContext } from '~/Providers';
|
||||
|
||||
export const ArtifactPreview = memo(function ({
|
||||
@@ -24,8 +23,6 @@ export const ArtifactPreview = memo(function ({
|
||||
previewRef: React.MutableRefObject<SandpackPreviewRef>;
|
||||
}) {
|
||||
const { currentCode } = useEditorContext();
|
||||
const { data: config } = useGetStartupConfig();
|
||||
|
||||
const artifactFiles = useMemo(() => {
|
||||
if (Object.keys(files).length === 0) {
|
||||
return files;
|
||||
@@ -41,17 +38,6 @@ export const ArtifactPreview = memo(function ({
|
||||
},
|
||||
};
|
||||
}, [currentCode, files, fileKey]);
|
||||
|
||||
const options: typeof sharedOptions = useMemo(() => {
|
||||
if (!config) {
|
||||
return sharedOptions;
|
||||
}
|
||||
return {
|
||||
...sharedOptions,
|
||||
bundlerURL: config.bundlerURL,
|
||||
};
|
||||
}, [config]);
|
||||
|
||||
if (Object.keys(artifactFiles).length === 0) {
|
||||
return null;
|
||||
}
|
||||
@@ -62,7 +48,7 @@ export const ArtifactPreview = memo(function ({
|
||||
...artifactFiles,
|
||||
...sharedFiles,
|
||||
}}
|
||||
options={options}
|
||||
options={{ ...sharedOptions }}
|
||||
{...sharedProps}
|
||||
template={template}
|
||||
>
|
||||
|
||||
@@ -31,7 +31,7 @@ export default function ArtifactTabs({
|
||||
ref={contentRef}
|
||||
value="code"
|
||||
id="artifacts-code"
|
||||
className={cn('flex-grow overflow-auto')}
|
||||
className={cn('grow overflow-auto')}
|
||||
>
|
||||
<ArtifactCodeEditor
|
||||
files={files}
|
||||
@@ -45,7 +45,7 @@ export default function ArtifactTabs({
|
||||
</Tabs.Content>
|
||||
<Tabs.Content
|
||||
value="preview"
|
||||
className={cn('flex-grow overflow-auto', isMermaid ? 'bg-[#282C34]' : 'bg-white')}
|
||||
className={cn('grow overflow-auto', isMermaid ? 'bg-[#282C34]' : 'bg-white')}
|
||||
>
|
||||
<ArtifactPreview
|
||||
files={files}
|
||||
|
||||
@@ -26,7 +26,7 @@ export const code: React.ElementType = memo(({ inline, className, children }: TC
|
||||
);
|
||||
}
|
||||
|
||||
return <code className={`hljs language-${lang} !whitespace-pre`}>{children}</code>;
|
||||
return <code className={`hljs language-${lang} whitespace-pre!`}>{children}</code>;
|
||||
});
|
||||
|
||||
export const CodeMarkdown = memo(
|
||||
|
||||
@@ -27,7 +27,7 @@ export function EdgeVoiceDropdown() {
|
||||
value={voice ?? ''}
|
||||
options={voices}
|
||||
onChange={handleVoiceChange}
|
||||
sizeClasses="min-w-[200px] !max-w-[400px] [--anchor-max-width:400px]"
|
||||
sizeClasses="min-w-[200px] max-w-[400px]! [--anchor-max-width:400px]"
|
||||
testId="EdgeVoiceDropdown"
|
||||
/>
|
||||
</div>
|
||||
@@ -55,7 +55,7 @@ export function BrowserVoiceDropdown() {
|
||||
value={voice ?? ''}
|
||||
options={voices}
|
||||
onChange={handleVoiceChange}
|
||||
sizeClasses="min-w-[200px] !max-w-[400px] [--anchor-max-width:400px]"
|
||||
sizeClasses="min-w-[200px] max-w-[400px]! [--anchor-max-width:400px]"
|
||||
testId="BrowserVoiceDropdown"
|
||||
/>
|
||||
</div>
|
||||
@@ -83,7 +83,7 @@ export function ExternalVoiceDropdown() {
|
||||
value={voice ?? ''}
|
||||
options={voices}
|
||||
onChange={handleVoiceChange}
|
||||
sizeClasses="min-w-[200px] !max-w-[400px] [--anchor-max-width:400px]"
|
||||
sizeClasses="min-w-[200px] max-w-[400px]! [--anchor-max-width:400px]"
|
||||
testId="ExternalVoiceDropdown"
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -74,7 +74,7 @@ function AuthLayout({
|
||||
<ThemeSelector />
|
||||
</div>
|
||||
|
||||
<div className="flex flex-grow items-center justify-center">
|
||||
<div className="flex grow items-center justify-center">
|
||||
<div className="w-authPageWidth overflow-hidden bg-white px-6 py-4 dark:bg-gray-900 sm:max-w-md sm:rounded-lg">
|
||||
{!hasStartupConfigError && !isFetching && (
|
||||
<h1
|
||||
|
||||
@@ -2,7 +2,7 @@ export const ErrorMessage = ({ children }: { children: React.ReactNode }) => (
|
||||
<div
|
||||
role="alert"
|
||||
aria-live="assertive"
|
||||
className="relative mt-6 rounded-lg border border-red-500/20 bg-red-50/50 px-6 py-4 text-red-700 shadow-sm transition-all dark:bg-red-950/30 dark:text-red-100"
|
||||
className="relative mt-6 rounded-lg border border-red-500/20 bg-red-50/50 px-6 py-4 text-red-700 shadow-2xs transition-all dark:bg-red-950/30 dark:text-red-100"
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
|
||||
@@ -98,7 +98,7 @@ const LoginForm: React.FC<TLoginFormProps> = ({ onSubmit, startupConfig, error,
|
||||
aria-invalid={!!errors.email}
|
||||
className="
|
||||
webkit-dark-styles transition-color peer w-full rounded-2xl border border-border-light
|
||||
bg-surface-primary px-3.5 pb-2.5 pt-3 text-text-primary duration-200 focus:border-green-500 focus:outline-none
|
||||
bg-surface-primary px-3.5 pb-2.5 pt-3 text-text-primary duration-200 focus:border-green-500 focus:outline-hidden
|
||||
"
|
||||
placeholder=" "
|
||||
/>
|
||||
@@ -108,7 +108,7 @@ const LoginForm: React.FC<TLoginFormProps> = ({ onSubmit, startupConfig, error,
|
||||
absolute start-3 top-1.5 z-10 origin-[0] -translate-y-4 scale-75 transform bg-surface-primary px-2 text-sm text-text-secondary-alt duration-200
|
||||
peer-placeholder-shown:top-1/2 peer-placeholder-shown:-translate-y-1/2 peer-placeholder-shown:scale-100
|
||||
peer-focus:top-1.5 peer-focus:-translate-y-4 peer-focus:scale-75 peer-focus:px-2 peer-focus:text-green-600 dark:peer-focus:text-green-500
|
||||
rtl:peer-focus:left-auto rtl:peer-focus:translate-x-1/4
|
||||
peer-focus:rtl:left-auto peer-focus:rtl:translate-x-1/4
|
||||
"
|
||||
>
|
||||
{useUsernameLogin
|
||||
@@ -133,7 +133,7 @@ const LoginForm: React.FC<TLoginFormProps> = ({ onSubmit, startupConfig, error,
|
||||
aria-invalid={!!errors.password}
|
||||
className="
|
||||
webkit-dark-styles transition-color peer w-full rounded-2xl border border-border-light
|
||||
bg-surface-primary px-3.5 pb-2.5 pt-3 text-text-primary duration-200 focus:border-green-500 focus:outline-none
|
||||
bg-surface-primary px-3.5 pb-2.5 pt-3 text-text-primary duration-200 focus:border-green-500 focus:outline-hidden
|
||||
"
|
||||
placeholder=" "
|
||||
/>
|
||||
@@ -143,7 +143,7 @@ const LoginForm: React.FC<TLoginFormProps> = ({ onSubmit, startupConfig, error,
|
||||
absolute start-3 top-1.5 z-10 origin-[0] -translate-y-4 scale-75 transform bg-surface-primary px-2 text-sm text-text-secondary-alt duration-200
|
||||
peer-placeholder-shown:top-1/2 peer-placeholder-shown:-translate-y-1/2 peer-placeholder-shown:scale-100
|
||||
peer-focus:top-1.5 peer-focus:-translate-y-4 peer-focus:scale-75 peer-focus:px-2 peer-focus:text-green-600 dark:peer-focus:text-green-500
|
||||
rtl:peer-focus:left-auto rtl:peer-focus:translate-x-1/4
|
||||
peer-focus:rtl:left-auto peer-focus:rtl:translate-x-1/4
|
||||
"
|
||||
>
|
||||
{localize('com_auth_password')}
|
||||
|
||||
@@ -71,7 +71,7 @@ const Registration: React.FC = () => {
|
||||
aria-invalid={!!errors[id]}
|
||||
className="
|
||||
webkit-dark-styles transition-color peer w-full rounded-2xl border border-border-light
|
||||
bg-surface-primary px-3.5 pb-2.5 pt-3 text-text-primary duration-200 focus:border-green-500 focus:outline-none
|
||||
bg-surface-primary px-3.5 pb-2.5 pt-3 text-text-primary duration-200 focus:border-green-500 focus:outline-hidden
|
||||
"
|
||||
placeholder=" "
|
||||
data-testid={id}
|
||||
@@ -82,7 +82,7 @@ const Registration: React.FC = () => {
|
||||
absolute start-3 top-1.5 z-10 origin-[0] -translate-y-4 scale-75 transform bg-surface-primary px-2 text-sm text-text-secondary-alt duration-200
|
||||
peer-placeholder-shown:top-1/2 peer-placeholder-shown:-translate-y-1/2 peer-placeholder-shown:scale-100
|
||||
peer-focus:top-1.5 peer-focus:-translate-y-4 peer-focus:scale-75 peer-focus:px-2 peer-focus:text-green-500
|
||||
rtl:peer-focus:left-auto rtl:peer-focus:translate-x-1/4
|
||||
peer-focus:rtl:left-auto peer-focus:rtl:translate-x-1/4
|
||||
"
|
||||
>
|
||||
{localize(label)}
|
||||
@@ -185,7 +185,7 @@ const Registration: React.FC = () => {
|
||||
aria-label="Submit registration"
|
||||
className="
|
||||
w-full rounded-2xl bg-green-600 px-4 py-3 text-sm font-medium text-white
|
||||
transition-colors hover:bg-green-700 focus:outline-none focus:ring-2
|
||||
transition-colors hover:bg-green-700 focus:outline-hidden focus:ring-2
|
||||
focus:ring-green-500 focus:ring-offset-2 disabled:opacity-50
|
||||
disabled:hover:bg-green-600 dark:bg-green-600 dark:hover:bg-green-700
|
||||
"
|
||||
|
||||
@@ -10,7 +10,7 @@ import { useLocalize } from '~/hooks';
|
||||
const BodyTextWrapper: FC<{ children: ReactNode }> = ({ children }) => {
|
||||
return (
|
||||
<div
|
||||
className="relative mt-6 rounded-lg border border-green-500/20 bg-green-50/50 px-6 py-4 text-green-700 shadow-sm transition-all dark:bg-green-950/30 dark:text-green-100"
|
||||
className="relative mt-6 rounded-lg border border-green-500/20 bg-green-50/50 px-6 py-4 text-green-700 shadow-2xs transition-all dark:bg-green-950/30 dark:text-green-100"
|
||||
role="alert"
|
||||
>
|
||||
{children}
|
||||
@@ -108,7 +108,7 @@ function RequestPasswordReset() {
|
||||
className="
|
||||
peer w-full rounded-lg border border-gray-300 bg-transparent px-4 py-3
|
||||
text-base text-gray-900 placeholder-transparent transition-all
|
||||
focus:border-green-500 focus:outline-none focus:ring-2 focus:ring-green-500/20
|
||||
focus:border-green-500 focus:outline-hidden focus:ring-2 focus:ring-green-500/20
|
||||
dark:border-gray-700 dark:text-white dark:focus:border-green-500
|
||||
"
|
||||
placeholder="email@example.com"
|
||||
@@ -138,7 +138,7 @@ function RequestPasswordReset() {
|
||||
disabled={!!errors.email}
|
||||
className="
|
||||
w-full rounded-2xl bg-green-600 px-4 py-3 text-sm font-medium text-white
|
||||
transition-colors hover:bg-green-700 focus:outline-none focus:ring-2
|
||||
transition-colors hover:bg-green-700 focus:outline-hidden focus:ring-2
|
||||
focus:ring-green-500 focus:ring-offset-2 disabled:opacity-50
|
||||
disabled:hover:bg-green-600 dark:bg-green-600 dark:hover:bg-green-700
|
||||
"
|
||||
|
||||
@@ -43,7 +43,7 @@ function ResetPassword() {
|
||||
<button
|
||||
onClick={() => navigate('/login')}
|
||||
aria-label={localize('com_auth_sign_in')}
|
||||
className="w-full transform rounded-2xl bg-green-500 px-4 py-3 tracking-wide text-white transition-colors duration-200 hover:bg-green-600 focus:bg-green-600 focus:outline-none"
|
||||
className="w-full transform rounded-2xl bg-green-500 px-4 py-3 tracking-wide text-white transition-colors duration-200 hover:bg-green-600 focus:bg-green-600 focus:outline-hidden"
|
||||
>
|
||||
{localize('com_auth_continue')}
|
||||
</button>
|
||||
@@ -91,7 +91,7 @@ function ResetPassword() {
|
||||
aria-invalid={!!errors.password}
|
||||
className="
|
||||
webkit-dark-styles transition-color peer w-full rounded-2xl border border-border-light
|
||||
bg-surface-primary px-3.5 pb-2.5 pt-3 text-text-primary duration-200 focus:border-green-500 focus:outline-none
|
||||
bg-surface-primary px-3.5 pb-2.5 pt-3 text-text-primary duration-200 focus:border-green-500 focus:outline-hidden
|
||||
"
|
||||
placeholder=" "
|
||||
/>
|
||||
@@ -101,7 +101,7 @@ function ResetPassword() {
|
||||
absolute start-3 top-1.5 z-10 origin-[0] -translate-y-4 scale-75 transform bg-surface-primary px-2 text-sm text-text-secondary-alt duration-200
|
||||
peer-placeholder-shown:top-1/2 peer-placeholder-shown:-translate-y-1/2 peer-placeholder-shown:scale-100
|
||||
peer-focus:top-1.5 peer-focus:-translate-y-4 peer-focus:scale-75 peer-focus:px-2 peer-focus:text-green-500
|
||||
rtl:peer-focus:left-auto rtl:peer-focus:translate-x-1/4
|
||||
peer-focus:rtl:left-auto peer-focus:rtl:translate-x-1/4
|
||||
"
|
||||
>
|
||||
{localize('com_auth_password')}
|
||||
@@ -126,7 +126,7 @@ function ResetPassword() {
|
||||
aria-invalid={!!errors.confirm_password}
|
||||
className="
|
||||
webkit-dark-styles transition-color peer w-full rounded-2xl border border-border-light
|
||||
bg-surface-primary px-3.5 pb-2.5 pt-3 text-text-primary duration-200 focus:border-green-500 focus:outline-none
|
||||
bg-surface-primary px-3.5 pb-2.5 pt-3 text-text-primary duration-200 focus:border-green-500 focus:outline-hidden
|
||||
"
|
||||
placeholder=" "
|
||||
/>
|
||||
@@ -136,7 +136,7 @@ function ResetPassword() {
|
||||
absolute start-3 top-1.5 z-10 origin-[0] -translate-y-4 scale-75 transform bg-surface-primary px-2 text-sm text-text-secondary-alt duration-200
|
||||
peer-placeholder-shown:top-1/2 peer-placeholder-shown:-translate-y-1/2 peer-placeholder-shown:scale-100
|
||||
peer-focus:top-1.5 peer-focus:-translate-y-4 peer-focus:scale-75 peer-focus:px-2 peer-focus:text-green-500
|
||||
rtl:peer-focus:left-auto rtl:peer-focus:translate-x-1/4
|
||||
peer-focus:rtl:left-auto peer-focus:rtl:translate-x-1/4
|
||||
"
|
||||
>
|
||||
{localize('com_auth_password_confirm')}
|
||||
@@ -165,7 +165,7 @@ function ResetPassword() {
|
||||
aria-label={localize('com_auth_submit_registration')}
|
||||
className="
|
||||
w-full rounded-2xl bg-green-600 px-4 py-3 text-sm font-medium text-white
|
||||
transition-colors hover:bg-green-700 focus:outline-none focus:ring-2
|
||||
transition-colors hover:bg-green-700 focus:outline-hidden focus:ring-2
|
||||
focus:ring-green-500 focus:ring-offset-2 disabled:opacity-50
|
||||
disabled:hover:bg-green-600 dark:bg-green-600 dark:hover:bg-green-700
|
||||
"
|
||||
|
||||
@@ -29,7 +29,7 @@ export const Banner = ({ onHeightChange }: { onHeightChange?: (height: number) =
|
||||
return (
|
||||
<div
|
||||
ref={bannerRef}
|
||||
className="sticky top-0 z-20 flex items-center bg-neutral-900 from-gray-700 to-gray-900 px-2 py-1 text-slate-50 dark:bg-gradient-to-r dark:text-white md:relative"
|
||||
className="sticky top-0 z-20 flex items-center bg-neutral-900 from-gray-700 to-gray-900 px-2 py-1 text-slate-50 dark:bg-linear-to-r dark:text-white md:relative"
|
||||
>
|
||||
<div
|
||||
className="w-full truncate px-4 text-center text-sm"
|
||||
|
||||
@@ -134,7 +134,7 @@ const BookmarkForm = ({
|
||||
id="bookmark-description"
|
||||
disabled={false}
|
||||
className={cn(
|
||||
'flex h-10 max-h-[250px] min-h-[100px] w-full resize-none rounded-lg border border-input bg-transparent px-3 py-2 text-sm ring-offset-background focus-visible:outline-none',
|
||||
'flex h-10 max-h-[250px] min-h-[100px] w-full resize-none rounded-lg border border-input bg-transparent px-3 py-2 text-sm ring-offset-background focus-visible:outline-hidden',
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -46,7 +46,7 @@ const BookmarkItem: FC<MenuItemProps> = ({ tag, selected, handleSubmit, icon, ..
|
||||
return (
|
||||
<MenuItem
|
||||
aria-label={tag as string}
|
||||
className="group flex w-full gap-2 rounded-lg p-2.5 text-sm text-text-primary transition-colors duration-200 focus:outline-none data-[focus]:bg-surface-secondary data-[focus]:ring-2 data-[focus]:ring-primary"
|
||||
className="group flex w-full gap-2 rounded-lg p-2.5 text-sm text-text-primary transition-colors duration-200 focus:outline-hidden data-focus:bg-surface-secondary data-focus:ring-2 data-focus:ring-primary"
|
||||
{...rest}
|
||||
as="button"
|
||||
onClick={clickHandler}
|
||||
|
||||
@@ -26,7 +26,7 @@ export default function AddedConvo({
|
||||
}
|
||||
return (
|
||||
<div className="flex items-start gap-4 py-2.5 pl-3 pr-1.5 text-sm">
|
||||
<span className="mt-0 flex h-6 w-6 flex-shrink-0 items-center justify-center">
|
||||
<span className="mt-0 flex h-6 w-6 shrink-0 items-center justify-center">
|
||||
<div className="icon-md">
|
||||
<EndpointIcon
|
||||
conversation={addedConvo}
|
||||
@@ -41,7 +41,7 @@ export default function AddedConvo({
|
||||
{title}
|
||||
</span>
|
||||
<button
|
||||
className="text-token-text-secondary flex-shrink-0"
|
||||
className="text-token-text-secondary shrink-0"
|
||||
type="button"
|
||||
aria-label="Close added conversation"
|
||||
onClick={() => setAddedConvo(null)}
|
||||
|
||||
@@ -183,7 +183,7 @@ const ChatForm = ({ index = 0 }) => {
|
||||
/>
|
||||
)}
|
||||
<PromptsCommand index={index} textAreaRef={textAreaRef} submitPrompt={submitPrompt} />
|
||||
<div className="transitional-all relative flex w-full flex-grow flex-col overflow-hidden rounded-3xl bg-surface-tertiary text-text-primary duration-200">
|
||||
<div className="transitional-all relative flex w-full grow flex-col overflow-hidden rounded-3xl bg-surface-tertiary text-text-primary duration-200">
|
||||
<TemporaryChat
|
||||
isTemporaryChat={isTemporaryChat}
|
||||
setIsTemporaryChat={setIsTemporaryChat}
|
||||
|
||||
@@ -30,7 +30,7 @@ const CollapseChat = ({
|
||||
onClick={() => setIsCollapsed(true)}
|
||||
className={cn(
|
||||
'absolute right-2 top-2 z-10 size-[35px] rounded-full p-2 transition-colors',
|
||||
'hover:bg-surface-hover focus:outline-none focus:ring-2 focus:ring-primary focus:ring-opacity-50',
|
||||
'hover:bg-surface-hover focus:outline-hidden focus:ring-2 focus:ring-primary focus:ring-opacity-50',
|
||||
)}
|
||||
>
|
||||
<Minimize2 className="h-full w-full" />
|
||||
|
||||
@@ -25,7 +25,7 @@ const AttachFile = ({
|
||||
aria-label={localize('com_sidepanel_attach_files')}
|
||||
disabled={isUploadDisabled}
|
||||
className={cn(
|
||||
'absolute flex size-[35px] items-center justify-center rounded-full p-1 transition-colors hover:bg-surface-hover focus:outline-none focus:ring-2 focus:ring-primary focus:ring-opacity-50',
|
||||
'absolute flex size-[35px] items-center justify-center rounded-full p-1 transition-colors hover:bg-surface-hover focus:outline-hidden focus:ring-2 focus:ring-primary focus:ring-opacity-50',
|
||||
isRTL ? 'bottom-2 right-2' : 'bottom-2 left-2',
|
||||
)}
|
||||
description={localize('com_sidepanel_attach_files')}
|
||||
|
||||
@@ -82,7 +82,7 @@ const AttachFile = ({ isRTL, disabled, handleFileChange }: AttachFileProps) => {
|
||||
id="attach-file-menu-button"
|
||||
aria-label="Attach File Options"
|
||||
className={cn(
|
||||
'absolute flex size-[35px] items-center justify-center rounded-full p-1 transition-colors hover:bg-surface-hover focus:outline-none focus:ring-2 focus:ring-primary focus:ring-opacity-50',
|
||||
'absolute flex size-[35px] items-center justify-center rounded-full p-1 transition-colors hover:bg-surface-hover focus:outline-hidden focus:ring-2 focus:ring-primary focus:ring-opacity-50',
|
||||
isRTL ? 'bottom-2 right-2' : 'bottom-2 left-1 md:left-2',
|
||||
)}
|
||||
>
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
export default function DragDropOverlay() {
|
||||
return (
|
||||
<div
|
||||
className="bg-surface-primary/85 fixed inset-0 z-[9999] flex flex-col items-center justify-center
|
||||
className="bg-surface-primary/85 fixed inset-0 z-9999 flex flex-col items-center justify-center
|
||||
gap-2 text-text-primary
|
||||
backdrop-blur-[4px] transition-all duration-200
|
||||
ease-in-out animate-in fade-in
|
||||
zoom-in-95 hover:backdrop-blur-sm"
|
||||
zoom-in-95 hover:backdrop-blur-xs"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
|
||||
@@ -59,7 +59,7 @@ export function SortFilterHeader<TData, TValue>({
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent
|
||||
align="start"
|
||||
className="z-[1001] dark:border-gray-700 dark:bg-gray-850"
|
||||
className="z-1001 dark:border-gray-700 dark:bg-gray-850"
|
||||
>
|
||||
<DropdownMenuItem
|
||||
onClick={() => column.toggleSorting(false)}
|
||||
|
||||
@@ -70,7 +70,7 @@ export default function HeaderOptions({
|
||||
<Anchor>
|
||||
<div className="my-auto lg:max-w-2xl xl:max-w-3xl">
|
||||
<span className="flex w-full flex-col items-center justify-center gap-0 md:order-none md:m-auto md:gap-2">
|
||||
<div className="z-[61] flex w-full items-center justify-center gap-2">
|
||||
<div className="z-61 flex w-full items-center justify-center gap-2">
|
||||
{interfaceConfig?.modelSelect === true && !isAgentsEndpoint(endpoint) && (
|
||||
<ModelSelect
|
||||
conversation={conversation}
|
||||
|
||||
@@ -166,7 +166,7 @@ export default function Mention({
|
||||
autoFocus
|
||||
ref={inputRef}
|
||||
placeholder={localize(placeholder)}
|
||||
className="mb-1 w-full border-0 bg-white p-2 text-sm focus:outline-none dark:bg-gray-700 dark:text-gray-200"
|
||||
className="mb-1 w-full border-0 bg-white p-2 text-sm focus:outline-hidden dark:bg-gray-700 dark:text-gray-200"
|
||||
autoComplete="off"
|
||||
value={searchValue}
|
||||
onKeyDown={(e) => {
|
||||
|
||||
@@ -37,8 +37,8 @@ export default function MentionItem({
|
||||
isActive === true ? 'bg-surface-active' : 'bg-transparent',
|
||||
)}
|
||||
>
|
||||
<div className="flex h-5 w-5 flex-shrink-0 items-center justify-center">{icon}</div>
|
||||
<div className="flex min-w-0 flex-grow items-center justify-between">
|
||||
<div className="flex h-5 w-5 shrink-0 items-center justify-center">{icon}</div>
|
||||
<div className="flex min-w-0 grow items-center justify-between">
|
||||
<div className="truncate">
|
||||
<span className="font-medium">{name}</span>
|
||||
{description != null && description ? (
|
||||
@@ -47,7 +47,7 @@ export default function MentionItem({
|
||||
</span>
|
||||
) : null}
|
||||
</div>
|
||||
<Clock4 size={16} className="ml-2 flex-shrink-0" />
|
||||
<Clock4 size={16} className="ml-2 shrink-0" />
|
||||
</div>
|
||||
</div>
|
||||
</button>
|
||||
|
||||
@@ -54,7 +54,7 @@ export default function OptionsPopover({
|
||||
return (
|
||||
<Portal>
|
||||
<Content sideOffset={8} align="start" ref={popoverRef} asChild>
|
||||
<div className="z-[70] flex w-screen flex-col items-center md:w-full md:px-4">
|
||||
<div className="z-70 flex w-screen flex-col items-center md:w-full md:px-4">
|
||||
<div
|
||||
className={cn(
|
||||
cardStyle,
|
||||
|
||||
@@ -203,7 +203,7 @@ function PromptsCommand({
|
||||
autoFocus
|
||||
ref={inputRef}
|
||||
placeholder={localize('com_ui_command_usage_placeholder')}
|
||||
className="mb-1 w-full border-0 bg-surface-tertiary-alt p-2 text-sm focus:outline-none dark:text-gray-200"
|
||||
className="mb-1 w-full border-0 bg-surface-tertiary-alt p-2 text-sm focus:outline-hidden dark:text-gray-200"
|
||||
autoComplete="off"
|
||||
value={searchValue}
|
||||
onKeyDown={(e) => {
|
||||
|
||||
@@ -16,7 +16,7 @@ export const TemporaryChat = ({ isTemporaryChat, setIsTemporaryChat }: Temporary
|
||||
return (
|
||||
<div className="divide-token-border-light m-1.5 flex flex-col divide-y overflow-hidden rounded-b-lg rounded-t-2xl bg-surface-secondary-alt">
|
||||
<div className="flex items-start gap-4 py-2.5 pl-3 pr-1.5 text-sm">
|
||||
<span className="mt-0 flex h-6 w-6 flex-shrink-0 items-center justify-center">
|
||||
<span className="mt-0 flex h-6 w-6 shrink-0 items-center justify-center">
|
||||
<div className="icon-md">
|
||||
<MessageCircleDashed className="icon-md" aria-hidden="true" />
|
||||
</div>
|
||||
@@ -25,7 +25,7 @@ export const TemporaryChat = ({ isTemporaryChat, setIsTemporaryChat }: Temporary
|
||||
{localize('com_ui_temporary_chat')}
|
||||
</span>
|
||||
<button
|
||||
className="text-token-text-secondary flex-shrink-0"
|
||||
className="text-token-text-secondary shrink-0"
|
||||
type="button"
|
||||
aria-label="Close temporary chat"
|
||||
onClick={() => setIsTemporaryChat(false)}
|
||||
|
||||
@@ -98,7 +98,7 @@ const MenuItem: FC<MenuItemProps> = ({
|
||||
role="option"
|
||||
aria-selected={selected}
|
||||
className={cn(
|
||||
'group m-1.5 flex max-h-[40px] cursor-pointer gap-2 rounded px-5 py-2.5 !pr-3 text-sm !opacity-100 hover:bg-surface-hover',
|
||||
'group m-1.5 flex max-h-[40px] cursor-pointer gap-2 rounded px-5 py-2.5 pr-3! text-sm opacity-100! hover:bg-surface-hover',
|
||||
'radix-disabled:pointer-events-none radix-disabled:opacity-50',
|
||||
)}
|
||||
tabIndex={0}
|
||||
|
||||
@@ -57,7 +57,7 @@ const MenuItem: FC<MenuItemProps> = ({
|
||||
id={selected ? 'selected-llm' : undefined}
|
||||
role="option"
|
||||
aria-selected={selected}
|
||||
className="group m-1.5 flex cursor-pointer gap-2 rounded px-1 py-2.5 !pr-3 text-sm !opacity-100 hover:bg-black/5 focus:ring-0 radix-disabled:pointer-events-none radix-disabled:opacity-50 dark:hover:bg-white/5"
|
||||
className="group m-1.5 flex cursor-pointer gap-2 rounded px-1 py-2.5 pr-3! text-sm opacity-100! hover:bg-black/5 focus:ring-0 radix-disabled:pointer-events-none radix-disabled:opacity-50 dark:hover:bg-white/5"
|
||||
tabIndex={0}
|
||||
{...rest}
|
||||
onClick={clickHandler}
|
||||
|
||||
@@ -39,7 +39,7 @@ const PresetItems: FC<{
|
||||
<>
|
||||
<div
|
||||
role="menuitem"
|
||||
className="pointer-none group m-1.5 flex h-8 min-w-[170px] gap-2 rounded px-5 py-2.5 !pr-3 text-sm !opacity-100 focus:ring-0 radix-disabled:pointer-events-none radix-disabled:opacity-50 md:min-w-[240px]"
|
||||
className="pointer-none group m-1.5 flex h-8 min-w-[170px] gap-2 rounded px-5 py-2.5 pr-3! text-sm opacity-100! focus:ring-0 radix-disabled:pointer-events-none radix-disabled:opacity-50 md:min-w-[240px]"
|
||||
tabIndex={-1}
|
||||
>
|
||||
<div className="flex h-full grow items-center justify-end gap-2">
|
||||
@@ -101,7 +101,7 @@ const PresetItems: FC<{
|
||||
{presets && presets.length === 0 && (
|
||||
<div
|
||||
role="menuitem"
|
||||
className="pointer-none group m-1.5 flex h-8 min-w-[170px] gap-2 rounded px-5 py-2.5 !pr-3 text-sm !opacity-100 focus:ring-0 radix-disabled:pointer-events-none radix-disabled:opacity-50 md:min-w-[240px]"
|
||||
className="pointer-none group m-1.5 flex h-8 min-w-[170px] gap-2 rounded px-5 py-2.5 pr-3! text-sm opacity-100! focus:ring-0 radix-disabled:pointer-events-none radix-disabled:opacity-50 md:min-w-[240px]"
|
||||
tabIndex={-1}
|
||||
>
|
||||
<div className="flex h-full grow items-center justify-end gap-2 text-gray-600 dark:text-gray-300">
|
||||
|
||||
@@ -38,7 +38,7 @@ const MenuItem: FC<MenuItemProps> = ({
|
||||
aria-label={title}
|
||||
data-testid="chat-menu-item"
|
||||
className={cn(
|
||||
'group m-1.5 flex cursor-pointer gap-2 rounded px-5 py-2.5 !pr-3 text-sm !opacity-100 hover:bg-black/5 focus:ring-0 radix-disabled:pointer-events-none radix-disabled:opacity-50 dark:hover:bg-gray-600 md:min-w-[240px]',
|
||||
'group m-1.5 flex cursor-pointer gap-2 rounded px-5 py-2.5 pr-3! text-sm opacity-100! hover:bg-black/5 focus:ring-0 radix-disabled:pointer-events-none radix-disabled:opacity-50 dark:hover:bg-gray-600 md:min-w-[240px]',
|
||||
className || '',
|
||||
)}
|
||||
tabIndex={0} // Change to 0 to make it focusable
|
||||
|
||||
@@ -109,9 +109,7 @@ const ContentParts = memo(
|
||||
return val;
|
||||
})
|
||||
}
|
||||
label={
|
||||
isSubmitting && isLast ? localize('com_ui_thinking') : localize('com_ui_thoughts')
|
||||
}
|
||||
label={isSubmitting ? localize('com_ui_thinking') : localize('com_ui_thoughts')}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -4,7 +4,7 @@ export default function DialogImage({ src = '', width = 1920, height = 1080 }) {
|
||||
return (
|
||||
<Dialog.Portal>
|
||||
<Dialog.Overlay
|
||||
className="radix-state-open:animate-show fixed inset-0 z-[100] flex items-center justify-center overflow-hidden bg-black/90 dark:bg-black/80"
|
||||
className="radix-state-open:animate-show fixed inset-0 z-100 flex items-center justify-center overflow-hidden bg-black/90 dark:bg-black/80"
|
||||
style={{ pointerEvents: 'auto' }}
|
||||
>
|
||||
<Dialog.Close asChild>
|
||||
@@ -30,7 +30,7 @@ export default function DialogImage({ src = '', width = 1920, height = 1080 }) {
|
||||
</button>
|
||||
</Dialog.Close>
|
||||
<Dialog.Content
|
||||
className="radix-state-open:animate-contentShow relative max-h-[85vh] max-w-[90vw] shadow-xl focus:outline-none"
|
||||
className="radix-state-open:animate-contentShow relative max-h-[85vh] max-w-[90vw] shadow-xl focus:outline-hidden"
|
||||
tabIndex={-1}
|
||||
style={{ pointerEvents: 'auto', aspectRatio: height > width ? 1 / 1.75 : 1.75 / 1 }}
|
||||
>
|
||||
|
||||
@@ -150,7 +150,7 @@ const EditMessage = ({
|
||||
|
||||
return (
|
||||
<Container message={message}>
|
||||
<div className="bg-token-main-surface-primary relative flex w-full flex-grow flex-col overflow-hidden rounded-2xl border border-border-medium text-text-primary [&:has(textarea:focus)]:border-border-heavy [&:has(textarea:focus)]:shadow-[0_2px_6px_rgba(0,0,0,.05)]">
|
||||
<div className="bg-token-main-surface-primary relative flex w-full grow flex-col overflow-hidden rounded-2xl border border-border-medium text-text-primary [&:has(textarea:focus)]:border-border-heavy [&:has(textarea:focus)]:shadow-[0_2px_6px_rgba(0,0,0,.05)]">
|
||||
<TextareaAutosize
|
||||
{...registerProps}
|
||||
ref={(e) => {
|
||||
|
||||
@@ -29,7 +29,6 @@ const Image = ({
|
||||
height,
|
||||
width,
|
||||
placeholderDimensions,
|
||||
className,
|
||||
}: {
|
||||
imagePath: string;
|
||||
altText: string;
|
||||
@@ -39,7 +38,6 @@ const Image = ({
|
||||
height?: string;
|
||||
width?: string;
|
||||
};
|
||||
className?: string;
|
||||
}) => {
|
||||
const [isLoaded, setIsLoaded] = useState(false);
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
@@ -59,12 +57,7 @@ const Image = ({
|
||||
return (
|
||||
<Dialog.Root>
|
||||
<div ref={containerRef}>
|
||||
<div
|
||||
className={cn(
|
||||
'relative mt-1 flex h-auto w-full max-w-lg items-center justify-center overflow-hidden bg-surface-active-alt text-text-secondary-alt',
|
||||
className,
|
||||
)}
|
||||
>
|
||||
<div className="relative mt-1 flex h-auto w-full max-w-lg items-center justify-center overflow-hidden bg-gray-200 text-gray-500 dark:bg-gray-700 dark:text-gray-400">
|
||||
<Dialog.Trigger asChild>
|
||||
<button type="button" aria-haspopup="dialog" aria-expanded="false">
|
||||
<LazyLoadImage
|
||||
|
||||
@@ -12,13 +12,7 @@ export default function Attachment({ attachment }: { attachment?: TAttachment })
|
||||
|
||||
if (isImage) {
|
||||
return (
|
||||
<Image
|
||||
altText={attachment.filename}
|
||||
imagePath={filepath}
|
||||
height={height}
|
||||
width={width}
|
||||
className="mb-4"
|
||||
/>
|
||||
<Image altText={attachment.filename} imagePath={filepath} height={height} width={width} />
|
||||
);
|
||||
}
|
||||
return null;
|
||||
|
||||
@@ -146,7 +146,7 @@ const EditTextPart = ({
|
||||
|
||||
return (
|
||||
<Container message={message}>
|
||||
<div className="bg-token-main-surface-primary relative flex w-full flex-grow flex-col overflow-hidden rounded-2xl border border-border-medium text-text-primary [&:has(textarea:focus)]:border-border-heavy [&:has(textarea:focus)]:shadow-[0_2px_6px_rgba(0,0,0,.05)]">
|
||||
<div className="bg-token-main-surface-primary relative flex w-full grow flex-col overflow-hidden rounded-2xl border border-border-medium text-text-primary [&:has(textarea:focus)]:border-border-heavy [&:has(textarea:focus)]:shadow-[0_2px_6px_rgba(0,0,0,.05)]">
|
||||
<TextareaAutosize
|
||||
{...registerProps}
|
||||
ref={(e) => {
|
||||
|
||||
@@ -42,7 +42,7 @@ const LogLink: React.FC<LogLinkProps> = ({ href, filename, children }) => {
|
||||
onClick={handleDownload}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="!text-blue-400 visited:!text-purple-400 hover:underline"
|
||||
className="text-blue-400! visited:text-purple-400! hover:underline"
|
||||
>
|
||||
{children}
|
||||
</a>
|
||||
|
||||
@@ -48,7 +48,7 @@ export default function ToolPopover({
|
||||
<div className="mb-2 text-sm font-medium text-text-primary">{title}</div>
|
||||
<div className="bg-token-surface-secondary text-token-text-primary dark rounded-md text-xs">
|
||||
<div className="max-h-32 overflow-y-auto rounded-md bg-surface-tertiary p-2">
|
||||
<code className="!whitespace-pre-wrap ">{formatText(input)}</code>
|
||||
<code className="whitespace-pre-wrap! ">{formatText(input)}</code>
|
||||
</div>
|
||||
</div>
|
||||
{output != null && output && (
|
||||
@@ -58,7 +58,7 @@ export default function ToolPopover({
|
||||
</div>
|
||||
<div className="bg-token-surface-secondary text-token-text-primary dark rounded-md text-xs">
|
||||
<div className="max-h-32 overflow-y-auto rounded-md bg-surface-tertiary p-2">
|
||||
<code className="!whitespace-pre-wrap ">{formatText(output)}</code>
|
||||
<code className="whitespace-pre-wrap! ">{formatText(output)}</code>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
|
||||
@@ -71,7 +71,7 @@ export default function HoverButtons({
|
||||
return (
|
||||
<button
|
||||
className={cn(
|
||||
'hover-button active rounded-md p-1 hover:bg-gray-100 hover:text-gray-500 focus:opacity-100 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:invisible md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
'hover-button active rounded-md p-1 hover:bg-gray-100 hover:text-gray-500 focus:opacity-100 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 dark:disabled:hover:text-gray-400 md:invisible md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
!isLast ? 'md:opacity-0 md:group-hover:opacity-100' : '',
|
||||
)}
|
||||
onClick={regenerate}
|
||||
@@ -79,7 +79,7 @@ export default function HoverButtons({
|
||||
title={localize('com_ui_regenerate')}
|
||||
>
|
||||
<RegenerateIcon
|
||||
className="hover:text-gray-500 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400"
|
||||
className="hover:text-gray-500 dark:hover:text-gray-200 dark:disabled:hover:text-gray-400"
|
||||
size="19"
|
||||
/>
|
||||
</button>
|
||||
@@ -110,7 +110,7 @@ export default function HoverButtons({
|
||||
content={message.content ?? message.text}
|
||||
isLast={isLast}
|
||||
className={cn(
|
||||
'ml-0 flex items-center gap-1.5 rounded-md p-1 text-xs hover:bg-gray-100 hover:text-gray-500 focus:opacity-100 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
'ml-0 flex items-center gap-1.5 rounded-md p-1 text-xs hover:bg-gray-100 hover:text-gray-500 focus:opacity-100 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 dark:disabled:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
@@ -118,7 +118,7 @@ export default function HoverButtons({
|
||||
<button
|
||||
id={`edit-${message.messageId}`}
|
||||
className={cn(
|
||||
'hover-button rounded-md p-1 hover:bg-gray-100 hover:text-gray-500 focus:opacity-100 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
'hover-button rounded-md p-1 hover:bg-gray-100 hover:text-gray-500 focus:opacity-100 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 dark:disabled:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
isCreatedByUser ? '' : 'active',
|
||||
hideEditButton ? 'opacity-0' : '',
|
||||
isEditing ? 'active text-gray-700 dark:text-gray-200' : '',
|
||||
@@ -134,7 +134,7 @@ export default function HoverButtons({
|
||||
)}
|
||||
<button
|
||||
className={cn(
|
||||
'ml-0 flex items-center gap-1.5 rounded-md p-1 text-xs hover:bg-gray-100 hover:text-gray-500 focus:opacity-100 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
'ml-0 flex items-center gap-1.5 rounded-md p-1 text-xs hover:bg-gray-100 hover:text-gray-500 focus:opacity-100 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 dark:disabled:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
isSubmitting && isCreatedByUser ? 'md:opacity-0 md:group-hover:opacity-100' : '',
|
||||
!isLast ? 'md:opacity-0 md:group-hover:opacity-100' : '',
|
||||
)}
|
||||
@@ -157,14 +157,14 @@ export default function HoverButtons({
|
||||
{continueSupported === true ? (
|
||||
<button
|
||||
className={cn(
|
||||
'hover-button active rounded-md p-1 hover:bg-gray-100 hover:text-gray-500 focus:opacity-100 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:invisible md:group-hover:visible',
|
||||
'hover-button active rounded-md p-1 hover:bg-gray-100 hover:text-gray-500 focus:opacity-100 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 dark:disabled:hover:text-gray-400 md:invisible md:group-hover:visible',
|
||||
!isLast ? 'md:opacity-0 md:group-hover:opacity-100' : '',
|
||||
)}
|
||||
onClick={handleContinue}
|
||||
type="button"
|
||||
title={localize('com_ui_continue')}
|
||||
>
|
||||
<ContinueIcon className="h-4 w-4 hover:text-gray-500 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400" />
|
||||
<ContinueIcon className="h-4 w-4 hover:text-gray-500 dark:hover:text-gray-200 dark:disabled:hover:text-gray-400" />
|
||||
</button>
|
||||
) : null}
|
||||
</div>
|
||||
|
||||
@@ -80,7 +80,7 @@ export default function Message(props: TMessageProps) {
|
||||
>
|
||||
<div className="m-auto justify-center p-4 py-2 md:gap-6 ">
|
||||
<div className="group mx-auto flex flex-1 gap-3 md:max-w-3xl md:px-5 lg:max-w-[40rem] lg:px-1 xl:max-w-[48rem] xl:px-5">
|
||||
<div className="relative flex flex-shrink-0 flex-col items-end">
|
||||
<div className="relative flex shrink-0 flex-col items-end">
|
||||
<div>
|
||||
<div className="pt-0.5">
|
||||
<div className="shadow-stroke flex h-6 w-6 items-center justify-center overflow-hidden rounded-full">
|
||||
@@ -97,7 +97,7 @@ export default function Message(props: TMessageProps) {
|
||||
>
|
||||
<div className={cn('select-none font-semibold', fontSize)}>{name}</div>
|
||||
<div className="flex-col gap-1 md:gap-3">
|
||||
<div className="flex max-w-full flex-grow flex-col gap-0">
|
||||
<div className="flex max-w-full grow flex-col gap-0">
|
||||
<ContentParts
|
||||
isLast={isLast}
|
||||
isSubmitting={isSubmitting}
|
||||
|
||||
@@ -72,7 +72,7 @@ export default function MessagesView({
|
||||
)}
|
||||
<div
|
||||
id="messages-end"
|
||||
className="group h-0 w-full flex-shrink-0"
|
||||
className="group h-0 w-full shrink-0"
|
||||
ref={messagesEndRef}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -15,7 +15,7 @@ export default function MinimalHoverButtons({ message }: THoverButtons) {
|
||||
return (
|
||||
<div className="visible mt-0 flex justify-center gap-1 self-end text-gray-400 lg:justify-start">
|
||||
<button
|
||||
className="ml-0 flex items-center gap-1.5 rounded-md p-1 text-xs hover:text-gray-900 dark:text-gray-400/70 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible"
|
||||
className="ml-0 flex items-center gap-1.5 rounded-md p-1 text-xs hover:text-gray-900 dark:text-gray-400/70 dark:hover:text-gray-200 dark:disabled:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible"
|
||||
onClick={() => copyToClipboard(setIsCopied)}
|
||||
type="button"
|
||||
title={
|
||||
|
||||
@@ -27,7 +27,7 @@ const MinimalMessages = React.forwardRef(
|
||||
>
|
||||
<div className="flex flex-col pb-9 text-sm dark:bg-transparent">
|
||||
{props.children}
|
||||
<div className="dark:gpt-dark-gray group h-0 w-full flex-shrink-0 dark:border-gray-800/50" />
|
||||
<div className="dark:gpt-dark-gray group h-0 w-full shrink-0 dark:border-gray-800/50" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -46,7 +46,7 @@ export default function Message({ message }: Pick<TMessageProps, 'message'>) {
|
||||
<div className="text-token-text-primary w-full border-0 bg-transparent dark:border-0 dark:bg-transparent">
|
||||
<div className="m-auto justify-center p-4 py-2 md:gap-6 ">
|
||||
<div className="final-completion group mx-auto flex flex-1 gap-3 md:max-w-3xl md:px-5 lg:max-w-[40rem] lg:px-1 xl:max-w-[48rem] xl:px-5">
|
||||
<div className="relative flex flex-shrink-0 flex-col items-end">
|
||||
<div className="relative flex shrink-0 flex-col items-end">
|
||||
<div>
|
||||
<div className="pt-0.5">
|
||||
<div className="flex h-6 w-6 items-center justify-center overflow-hidden rounded-full">
|
||||
@@ -60,7 +60,7 @@ export default function Message({ message }: Pick<TMessageProps, 'message'>) {
|
||||
>
|
||||
<div className={cn('select-none font-semibold', fontSize)}>{messageLabel}</div>
|
||||
<div className="flex-col gap-1 md:gap-3">
|
||||
<div className="flex max-w-full flex-grow flex-col gap-0">
|
||||
<div className="flex max-w-full grow flex-col gap-0">
|
||||
<SearchContent message={message} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -26,7 +26,7 @@ export default function SiblingSwitch({
|
||||
<div className="visible flex items-center justify-center gap-1 self-center pt-0 text-xs">
|
||||
<button
|
||||
className={cn(
|
||||
'hover-button rounded-md p-1 text-gray-400 hover:bg-gray-100 hover:text-gray-500 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
'hover-button rounded-md p-1 text-gray-400 hover:bg-gray-100 hover:text-gray-500 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 dark:disabled:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
)}
|
||||
type="button"
|
||||
onClick={previous}
|
||||
@@ -47,12 +47,12 @@ export default function SiblingSwitch({
|
||||
<polyline points="15 18 9 12 15 6" />
|
||||
</svg>
|
||||
</button>
|
||||
<span className="flex-shrink-0 flex-grow tabular-nums">
|
||||
<span className="shrink-0 grow tabular-nums">
|
||||
{siblingIdx + 1} / {siblingCount}
|
||||
</span>
|
||||
<button
|
||||
className={cn(
|
||||
'hover-button rounded-md p-1 text-gray-400 hover:bg-gray-100 hover:text-gray-500 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
'hover-button rounded-md p-1 text-gray-400 hover:bg-gray-100 hover:text-gray-500 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 dark:disabled:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
||||
)}
|
||||
type="button"
|
||||
onClick={next}
|
||||
|
||||
@@ -128,7 +128,7 @@ const MessageRender = memo(
|
||||
layoutClasses,
|
||||
latestCardClasses,
|
||||
showRenderClasses,
|
||||
'message-render focus:outline-none focus:ring-2 focus:ring-border-xheavy',
|
||||
'message-render focus:outline-hidden focus:ring-2 focus:ring-border-xheavy',
|
||||
)}
|
||||
onClick={clickHandler}
|
||||
onKeyDown={(e) => {
|
||||
@@ -142,7 +142,7 @@ const MessageRender = memo(
|
||||
{isLatestCard === true && (
|
||||
<div className="absolute right-0 top-0 m-2 h-3 w-3 rounded-full bg-text-primary"></div>
|
||||
)}
|
||||
<div className="relative flex flex-shrink-0 flex-col items-end">
|
||||
<div className="relative flex shrink-0 flex-col items-end">
|
||||
<div>
|
||||
<div className="pt-0.5">
|
||||
<div className="flex h-6 w-6 items-center justify-center overflow-hidden rounded-full">
|
||||
@@ -159,7 +159,7 @@ const MessageRender = memo(
|
||||
>
|
||||
<h2 className={cn('select-none font-semibold', fontSize)}>{messageLabel}</h2>
|
||||
<div className="flex-col gap-1 md:gap-3">
|
||||
<div className="flex max-w-full flex-grow flex-col gap-0">
|
||||
<div className="flex max-w-full grow flex-col gap-0">
|
||||
<MessageContext.Provider
|
||||
value={{
|
||||
messageId: msg.messageId,
|
||||
|
||||
@@ -154,7 +154,7 @@ export default function Conversation({
|
||||
<input
|
||||
ref={inputRef}
|
||||
type="text"
|
||||
className="w-full rounded bg-transparent p-0.5 text-sm leading-tight focus-visible:outline-none"
|
||||
className="w-full rounded bg-transparent p-0.5 text-sm leading-tight focus-visible:outline-hidden"
|
||||
value={titleInput ?? ''}
|
||||
onChange={(e) => setTitleInput(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
@@ -210,9 +210,9 @@ export default function Conversation({
|
||||
{title}
|
||||
</div>
|
||||
{isActiveConvo ? (
|
||||
<div className="absolute bottom-0 right-0 top-0 w-20 rounded-r-lg bg-gradient-to-l" />
|
||||
<div className="absolute bottom-0 right-0 top-0 w-20 rounded-r-lg bg-linear-to-l" />
|
||||
) : (
|
||||
<div className="absolute bottom-0 right-0 top-0 w-20 rounded-r-lg bg-gradient-to-l from-surface-primary-alt from-0% to-transparent group-hover:from-surface-active-alt group-hover:from-40%" />
|
||||
<div className="absolute bottom-0 right-0 top-0 w-20 rounded-r-lg bg-linear-to-l from-surface-primary-alt from-0% to-transparent group-hover:from-surface-active-alt group-hover:from-40%" />
|
||||
)}
|
||||
</a>
|
||||
)}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user