Compare commits
20 Commits
feat/Multi
...
fix/openid
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f7dc13c30 | ||
|
|
ac2e1b1586 | ||
|
|
45e4e70986 | ||
|
|
deb8a00e27 | ||
|
|
b45ff8e4ed | ||
|
|
fc8d24fa5b | ||
|
|
449d9b7613 | ||
|
|
ddb0a7a216 | ||
|
|
b2f44fc90f | ||
|
|
cede5d120c | ||
|
|
ed9ab8842a | ||
|
|
b344ed12a1 | ||
|
|
afee1a2cbd | ||
|
|
0dbbf7de04 | ||
|
|
bf80cf30b3 | ||
|
|
d47d827ed9 | ||
|
|
5be446edff | ||
|
|
2265413387 | ||
|
|
7e98702a87 | ||
|
|
a2f330e6ca |
51
.env.example
51
.env.example
@@ -20,8 +20,8 @@ DOMAIN_CLIENT=http://localhost:3080
|
||||
DOMAIN_SERVER=http://localhost:3080
|
||||
|
||||
NO_INDEX=true
|
||||
# Use the address that is at most n number of hops away from the Express application.
|
||||
# req.socket.remoteAddress is the first hop, and the rest are looked for in the X-Forwarded-For header from right to left.
|
||||
# Use the address that is at most n number of hops away from the Express application.
|
||||
# req.socket.remoteAddress is the first hop, and the rest are looked for in the X-Forwarded-For header from right to left.
|
||||
# A value of 0 means that the first untrusted address would be req.socket.remoteAddress, i.e. there is no reverse proxy.
|
||||
# Defaulted to 1.
|
||||
TRUST_PROXY=1
|
||||
@@ -88,7 +88,7 @@ PROXY=
|
||||
#============#
|
||||
|
||||
ANTHROPIC_API_KEY=user_provided
|
||||
# ANTHROPIC_MODELS=claude-3-7-sonnet-latest,claude-3-7-sonnet-20250219,claude-3-5-haiku-20241022,claude-3-5-sonnet-20241022,claude-3-5-sonnet-latest,claude-3-5-sonnet-20240620,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
||||
# ANTHROPIC_MODELS=claude-opus-4-20250514,claude-sonnet-4-20250514,claude-3-7-sonnet-20250219,claude-3-5-sonnet-20241022,claude-3-5-haiku-20241022,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307
|
||||
# ANTHROPIC_REVERSE_PROXY=
|
||||
|
||||
#============#
|
||||
@@ -444,6 +444,21 @@ OPENID_IMAGE_URL=
|
||||
# This will bypass the login form completely for users, only use this if OpenID is your only authentication method
|
||||
OPENID_AUTO_REDIRECT=false
|
||||
|
||||
# Set to true to use PKCE (Proof Key for Code Exchange) for OpenID authentication
|
||||
OPENID_USE_PKCE=false
|
||||
#Set to true to reuse openid tokens for authentication management instead of using the mongodb session and the custom refresh token.
|
||||
OPENID_REUSE_TOKENS=
|
||||
#By default, signing key verification results are cached in order to prevent excessive HTTP requests to the JWKS endpoint.
|
||||
#If a signing key matching the kid is found, this will be cached and the next time this kid is requested the signing key will be served from the cache.
|
||||
#Default is true.
|
||||
OPENID_JWKS_URL_CACHE_ENABLED=
|
||||
OPENID_JWKS_URL_CACHE_TIME= # 600000 ms eq to 10 minutes leave empty to disable caching
|
||||
#Set to true to trigger token exchange flow to acquire access token for the userinfo endpoint.
|
||||
OPENID_ON_BEHALF_FLOW_FOR_USERINFRO_REQUIRED=
|
||||
OPENID_ON_BEHALF_FLOW_USERINFRO_SCOPE = "user.read" # example for Scope Needed for Microsoft Graph API
|
||||
# Set to true to use the OpenID Connect end session endpoint for logout
|
||||
OPENID_USE_END_SESSION_ENDPOINT=
|
||||
|
||||
# LDAP
|
||||
LDAP_URL=
|
||||
LDAP_BIND_DN=
|
||||
@@ -575,3 +590,33 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
# OpenWeather #
|
||||
#=====================================================#
|
||||
OPENWEATHER_API_KEY=
|
||||
|
||||
#====================================#
|
||||
# LibreChat Code Interpreter API #
|
||||
#====================================#
|
||||
|
||||
# https://code.librechat.ai
|
||||
# LIBRECHAT_CODE_API_KEY=your-key
|
||||
|
||||
#======================#
|
||||
# Web Search #
|
||||
#======================#
|
||||
|
||||
# Note: All of the following variable names can be customized.
|
||||
# Omit values to allow user to provide them.
|
||||
|
||||
# For more information on configuration values, see:
|
||||
# https://librechat.ai/docs/features/web_search
|
||||
|
||||
# Search Provider (Required)
|
||||
# SERPER_API_KEY=your_serper_api_key
|
||||
|
||||
# Scraper (Required)
|
||||
# FIRECRAWL_API_KEY=your_firecrawl_api_key
|
||||
# Optional: Custom Firecrawl API URL
|
||||
# FIRECRAWL_API_URL=your_firecrawl_api_url
|
||||
|
||||
# Reranker (Required)
|
||||
# JINA_API_KEY=your_jina_api_key
|
||||
# or
|
||||
# COHERE_API_KEY=your_cohere_api_key
|
||||
4
.github/workflows/helmcharts.yml
vendored
4
.github/workflows/helmcharts.yml
vendored
@@ -26,6 +26,10 @@ jobs:
|
||||
uses: azure/setup-helm@v4
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
- name: Build Subchart Deps
|
||||
run: |
|
||||
cd helm/librechat-rag-api
|
||||
helm dependency build
|
||||
|
||||
- name: Run chart-releaser
|
||||
uses: helm/chart-releaser-action@v1.6.0
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -52,8 +52,9 @@ bower_components/
|
||||
*.d.ts
|
||||
!vite-env.d.ts
|
||||
|
||||
# Cline
|
||||
# AI
|
||||
.clineignore
|
||||
.cursor
|
||||
|
||||
# Floobits
|
||||
.floo
|
||||
|
||||
@@ -71,6 +71,11 @@
|
||||
- [Model Context Protocol (MCP) Support](https://modelcontextprotocol.io/clients#librechat) for Tools
|
||||
- Use LibreChat Agents and OpenAI Assistants with Files, Code Interpreter, Tools, and API Actions
|
||||
|
||||
- 🔍 **Web Search**:
|
||||
- Search the internet and retrieve relevant information to enhance your AI context
|
||||
- Combines search providers, content scrapers, and result rerankers for optimal results
|
||||
- **[Learn More →](https://www.librechat.ai/docs/features/web_search)**
|
||||
|
||||
- 🪄 **Generative UI with Code Artifacts**:
|
||||
- [Code Artifacts](https://youtu.be/GfTj7O4gmd0?si=WJbdnemZpJzBrJo3) allow creation of React, HTML, and Mermaid diagrams directly in chat
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ class AnthropicClient extends BaseClient {
|
||||
this.message_delta;
|
||||
/** Whether the model is part of the Claude 3 Family
|
||||
* @type {boolean} */
|
||||
this.isClaude3;
|
||||
this.isClaudeLatest;
|
||||
/** Whether to use Messages API or Completions API
|
||||
* @type {boolean} */
|
||||
this.useMessages;
|
||||
@@ -116,7 +116,8 @@ class AnthropicClient extends BaseClient {
|
||||
);
|
||||
|
||||
const modelMatch = matchModelName(this.modelOptions.model, EModelEndpoint.anthropic);
|
||||
this.isClaude3 = modelMatch.includes('claude-3');
|
||||
this.isClaudeLatest =
|
||||
/claude-[3-9]/.test(modelMatch) || /claude-(?:sonnet|opus|haiku)-[4-9]/.test(modelMatch);
|
||||
this.isLegacyOutput = !(
|
||||
/claude-3[-.]5-sonnet/.test(modelMatch) || /claude-3[-.]7/.test(modelMatch)
|
||||
);
|
||||
@@ -130,7 +131,7 @@ class AnthropicClient extends BaseClient {
|
||||
this.modelOptions.maxOutputTokens = legacy.maxOutputTokens.default;
|
||||
}
|
||||
|
||||
this.useMessages = this.isClaude3 || !!this.options.attachments;
|
||||
this.useMessages = this.isClaudeLatest || !!this.options.attachments;
|
||||
|
||||
this.defaultVisionModel = this.options.visionModel ?? 'claude-3-sonnet-20240229';
|
||||
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
|
||||
@@ -654,7 +655,10 @@ class AnthropicClient extends BaseClient {
|
||||
);
|
||||
};
|
||||
|
||||
if (this.modelOptions.model.includes('claude-3')) {
|
||||
if (
|
||||
/claude-[3-9]/.test(this.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(this.modelOptions.model)
|
||||
) {
|
||||
await buildMessagesPayload();
|
||||
processTokens();
|
||||
return {
|
||||
|
||||
@@ -15,7 +15,7 @@ describe('AnthropicClient', () => {
|
||||
{
|
||||
role: 'user',
|
||||
isCreatedByUser: true,
|
||||
text: 'What\'s up',
|
||||
text: "What's up",
|
||||
messageId: '3',
|
||||
parentMessageId: '2',
|
||||
},
|
||||
@@ -170,7 +170,7 @@ describe('AnthropicClient', () => {
|
||||
client.options.modelLabel = 'Claude-2';
|
||||
const result = await client.buildMessages(messages, parentMessageId);
|
||||
const { prompt } = result;
|
||||
expect(prompt).toContain('Human\'s name: John');
|
||||
expect(prompt).toContain("Human's name: John");
|
||||
expect(prompt).toContain('You are Claude-2');
|
||||
});
|
||||
});
|
||||
@@ -244,6 +244,64 @@ describe('AnthropicClient', () => {
|
||||
);
|
||||
});
|
||||
|
||||
describe('Claude 4 model headers', () => {
|
||||
it('should add "prompt-caching" beta header for claude-sonnet-4 model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-opus-4 model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-opus-4-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-4-sonnet model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-4-sonnet-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-4-opus model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-4-opus-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not add beta header for claude-3-5-sonnet-latest model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
@@ -729,4 +787,223 @@ describe('AnthropicClient', () => {
|
||||
expect(capturedOptions).toHaveProperty('topK', 10);
|
||||
expect(capturedOptions).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
|
||||
describe('isClaudeLatest', () => {
|
||||
it('should set isClaudeLatest to true for claude-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-sonnet-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-3.5 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.5-sonnet-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-sonnet-4 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-opus-4 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-3.5-haiku models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.5-haiku-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-2 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-2',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-instant models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-instant',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-sonnet-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-opus-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-haiku-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-haiku-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('configureReasoning', () => {
|
||||
it('should enable thinking for claude-opus-4 and claude-sonnet-4 models', async () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
// Create a mock async generator function
|
||||
async function* mockAsyncGenerator() {
|
||||
yield { type: 'message_start', message: { usage: {} } };
|
||||
yield { delta: { text: 'Test response' } };
|
||||
yield { type: 'message_delta', usage: {} };
|
||||
}
|
||||
|
||||
// Mock createResponse to return the async generator
|
||||
jest.spyOn(client, 'createResponse').mockImplementation(() => {
|
||||
return mockAsyncGenerator();
|
||||
});
|
||||
|
||||
// Test claude-opus-4
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20250514',
|
||||
},
|
||||
thinking: true,
|
||||
thinkingBudget: 2000,
|
||||
});
|
||||
|
||||
let capturedOptions = null;
|
||||
jest.spyOn(client, 'getClient').mockImplementation((options) => {
|
||||
capturedOptions = options;
|
||||
return {};
|
||||
});
|
||||
|
||||
const payload = [{ role: 'user', content: 'Test message' }];
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('thinking');
|
||||
expect(capturedOptions.thinking).toEqual({
|
||||
type: 'enabled',
|
||||
budget_tokens: 2000,
|
||||
});
|
||||
|
||||
// Test claude-sonnet-4
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
},
|
||||
thinking: true,
|
||||
thinkingBudget: 2000,
|
||||
});
|
||||
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('thinking');
|
||||
expect(capturedOptions.thinking).toEqual({
|
||||
type: 'enabled',
|
||||
budget_tokens: 2000,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Claude Model Tests', () => {
|
||||
it('should handle Claude 3 and 4 series models correctly', () => {
|
||||
const client = new AnthropicClient('test-key');
|
||||
// Claude 3 series models
|
||||
const claude3Models = [
|
||||
'claude-3-opus-20240229',
|
||||
'claude-3-sonnet-20240229',
|
||||
'claude-3-haiku-20240307',
|
||||
'claude-3-5-sonnet-20240620',
|
||||
'claude-3-5-haiku-20240620',
|
||||
'claude-3.5-sonnet-20240620',
|
||||
'claude-3.5-haiku-20240620',
|
||||
'claude-3.7-sonnet-20240620',
|
||||
'claude-3.7-haiku-20240620',
|
||||
'anthropic/claude-3-opus-20240229',
|
||||
'claude-3-opus-20240229/anthropic',
|
||||
];
|
||||
|
||||
// Claude 4 series models
|
||||
const claude4Models = [
|
||||
'claude-sonnet-4-20250514',
|
||||
'claude-opus-4-20250514',
|
||||
'claude-4-sonnet-20250514',
|
||||
'claude-4-opus-20250514',
|
||||
'anthropic/claude-sonnet-4-20250514',
|
||||
'claude-sonnet-4-20250514/anthropic',
|
||||
];
|
||||
|
||||
// Test Claude 3 series
|
||||
claude3Models.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
// Test Claude 4 series
|
||||
claude4Models.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
// Test non-Claude 3/4 models
|
||||
const nonClaudeModels = ['claude-2', 'claude-instant', 'gpt-4', 'gpt-3.5-turbo'];
|
||||
|
||||
nonClaudeModels.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
const { createCodeExecutionTool, EnvVar } = require('@librechat/agents');
|
||||
const { Tools, Constants, EToolResources } = require('librechat-data-provider');
|
||||
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
EToolResources,
|
||||
loadWebSearchAuth,
|
||||
replaceSpecialVars,
|
||||
} = require('librechat-data-provider');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const {
|
||||
availableTools,
|
||||
@@ -138,7 +144,6 @@ const loadTools = async ({
|
||||
agent,
|
||||
model,
|
||||
endpoint,
|
||||
useSpecs,
|
||||
tools = [],
|
||||
options = {},
|
||||
functions = true,
|
||||
@@ -263,6 +268,33 @@ const loadTools = async ({
|
||||
return createFileSearchTool({ req: options.req, files, entity_id: agent?.id });
|
||||
};
|
||||
continue;
|
||||
} else if (tool === Tools.web_search) {
|
||||
const webSearchConfig = options?.req?.app?.locals?.webSearch;
|
||||
const result = await loadWebSearchAuth({
|
||||
userId: user,
|
||||
loadAuthValues,
|
||||
webSearchConfig,
|
||||
});
|
||||
const { onSearchResults, onGetHighlights } = options?.[Tools.web_search] ?? {};
|
||||
requestedTools[tool] = async () => {
|
||||
toolContextMap[tool] = `# \`${tool}\`:
|
||||
Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
||||
1. **Execute immediately without preface** when using \`${tool}\`.
|
||||
2. **After the search, begin with a brief summary** that directly addresses the query without headers or explaining your process.
|
||||
3. **Structure your response clearly** using Markdown formatting (Level 2 headers for sections, lists for multiple points, tables for comparisons).
|
||||
4. **Cite sources properly** according to the citation anchor format, utilizing group anchors when appropriate.
|
||||
5. **Tailor your approach to the query type** (academic, news, coding, etc.) while maintaining an expert, journalistic, unbiased tone.
|
||||
6. **Provide comprehensive information** with specific details, examples, and as much relevant context as possible from search results.
|
||||
7. **Avoid moralizing language.**
|
||||
`.trim();
|
||||
return createSearchTool({
|
||||
...result.authResult,
|
||||
onSearchResults,
|
||||
onGetHighlights,
|
||||
logger,
|
||||
});
|
||||
};
|
||||
continue;
|
||||
} else if (tool && appTools[tool] && mcpToolPattern.test(tool)) {
|
||||
requestedTools[tool] = async () =>
|
||||
createMCPTool({
|
||||
|
||||
5
api/cache/getLogStores.js
vendored
5
api/cache/getLogStores.js
vendored
@@ -61,6 +61,10 @@ const abortKeys = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: CacheKeys.ABORT_KEYS, ttl: Time.TEN_MINUTES });
|
||||
|
||||
const openIdExchangedTokensCache = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.TEN_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.OPENID_EXCHANGED_TOKENS, ttl: Time.TEN_MINUTES });
|
||||
|
||||
const namespaces = {
|
||||
[CacheKeys.ROLES]: roles,
|
||||
[CacheKeys.CONFIG_STORE]: config,
|
||||
@@ -98,6 +102,7 @@ const namespaces = {
|
||||
[CacheKeys.AUDIO_RUNS]: audioRuns,
|
||||
[CacheKeys.MESSAGES]: messages,
|
||||
[CacheKeys.FLOWS]: flows,
|
||||
[CacheKeys.OPENID_EXCHANGED_TOKENS]: openIdExchangedTokensCache,
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
11
api/cache/keyvRedis.js
vendored
11
api/cache/keyvRedis.js
vendored
@@ -76,10 +76,13 @@ if (REDIS_URI && isEnabled(USE_REDIS)) {
|
||||
keyvRedis = new KeyvRedis(REDIS_URI, keyvOpts);
|
||||
}
|
||||
|
||||
const pingInterval = setInterval(() => {
|
||||
logger.debug('KeyvRedis ping');
|
||||
keyvRedis.client.ping().catch(err => logger.error('Redis keep-alive ping failed:', err));
|
||||
}, 5 * 60 * 1000);
|
||||
const pingInterval = setInterval(
|
||||
() => {
|
||||
logger.debug('KeyvRedis ping');
|
||||
keyvRedis.client.ping().catch((err) => logger.error('Redis keep-alive ping failed:', err));
|
||||
},
|
||||
5 * 60 * 1000,
|
||||
);
|
||||
|
||||
keyvRedis.on('ready', () => {
|
||||
logger.info('KeyvRedis connection ready');
|
||||
|
||||
@@ -11,5 +11,8 @@ module.exports = {
|
||||
moduleNameMapper: {
|
||||
'~/(.*)': '<rootDir>/$1',
|
||||
'~/data/auth.json': '<rootDir>/__mocks__/auth.mock.json',
|
||||
'^openid-client/passport$': '<rootDir>/test/__mocks__/openid-client-passport.js', // Mock for the passport strategy part
|
||||
'^openid-client$': '<rootDir>/test/__mocks__/openid-client.js',
|
||||
},
|
||||
transformIgnorePatterns: ['/node_modules/(?!(openid-client|oauth4webapi|jose)/).*/'],
|
||||
};
|
||||
|
||||
@@ -21,7 +21,19 @@ const Agent = mongoose.model('agent', agentSchema);
|
||||
* @throws {Error} If the agent creation fails.
|
||||
*/
|
||||
const createAgent = async (agentData) => {
|
||||
return (await Agent.create(agentData)).toObject();
|
||||
const { author, ...versionData } = agentData;
|
||||
const timestamp = new Date();
|
||||
const initialAgentData = {
|
||||
...agentData,
|
||||
versions: [
|
||||
{
|
||||
...versionData,
|
||||
createdAt: timestamp,
|
||||
updatedAt: timestamp,
|
||||
},
|
||||
],
|
||||
};
|
||||
return (await Agent.create(initialAgentData)).toObject();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -48,12 +60,17 @@ const loadEphemeralAgent = ({ req, agent_id, endpoint, model_parameters: _m }) =
|
||||
const { model, ...model_parameters } = _m;
|
||||
/** @type {Record<string, FunctionTool>} */
|
||||
const availableTools = req.app.locals.availableTools;
|
||||
const mcpServers = new Set(req.body.ephemeralAgent?.mcp);
|
||||
/** @type {TEphemeralAgent | null} */
|
||||
const ephemeralAgent = req.body.ephemeralAgent;
|
||||
const mcpServers = new Set(ephemeralAgent?.mcp);
|
||||
/** @type {string[]} */
|
||||
const tools = [];
|
||||
if (req.body.ephemeralAgent?.execute_code === true) {
|
||||
if (ephemeralAgent?.execute_code === true) {
|
||||
tools.push(Tools.execute_code);
|
||||
}
|
||||
if (ephemeralAgent?.web_search === true) {
|
||||
tools.push(Tools.web_search);
|
||||
}
|
||||
|
||||
if (mcpServers.size > 0) {
|
||||
for (const toolName of Object.keys(availableTools)) {
|
||||
@@ -103,6 +120,8 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
||||
return null;
|
||||
}
|
||||
|
||||
agent.version = agent.versions ? agent.versions.length : 0;
|
||||
|
||||
if (agent.author.toString() === req.user.id) {
|
||||
return agent;
|
||||
}
|
||||
@@ -127,18 +146,155 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if a version already exists in the versions array, excluding timestamp and author fields
|
||||
* @param {Object} updateData - The update data to compare
|
||||
* @param {Array} versions - The existing versions array
|
||||
* @returns {Object|null} - The matching version if found, null otherwise
|
||||
*/
|
||||
const isDuplicateVersion = (updateData, currentData, versions) => {
|
||||
if (!versions || versions.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const excludeFields = [
|
||||
'_id',
|
||||
'id',
|
||||
'createdAt',
|
||||
'updatedAt',
|
||||
'author',
|
||||
'updatedBy',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'__v',
|
||||
'agent_ids',
|
||||
'versions',
|
||||
];
|
||||
|
||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||
|
||||
if (Object.keys(directUpdates).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const wouldBeVersion = { ...currentData, ...directUpdates };
|
||||
const lastVersion = versions[versions.length - 1];
|
||||
|
||||
const allFields = new Set([...Object.keys(wouldBeVersion), ...Object.keys(lastVersion)]);
|
||||
|
||||
const importantFields = Array.from(allFields).filter((field) => !excludeFields.includes(field));
|
||||
|
||||
let isMatch = true;
|
||||
for (const field of importantFields) {
|
||||
if (!wouldBeVersion[field] && !lastVersion[field]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Array.isArray(wouldBeVersion[field]) && Array.isArray(lastVersion[field])) {
|
||||
if (wouldBeVersion[field].length !== lastVersion[field].length) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
|
||||
// Special handling for projectIds (MongoDB ObjectIds)
|
||||
if (field === 'projectIds') {
|
||||
const wouldBeIds = wouldBeVersion[field].map((id) => id.toString()).sort();
|
||||
const versionIds = lastVersion[field].map((id) => id.toString()).sort();
|
||||
|
||||
if (!wouldBeIds.every((id, i) => id === versionIds[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Handle arrays of objects like tool_kwargs
|
||||
else if (typeof wouldBeVersion[field][0] === 'object' && wouldBeVersion[field][0] !== null) {
|
||||
const sortedWouldBe = [...wouldBeVersion[field]].map((item) => JSON.stringify(item)).sort();
|
||||
const sortedVersion = [...lastVersion[field]].map((item) => JSON.stringify(item)).sort();
|
||||
|
||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
const sortedWouldBe = [...wouldBeVersion[field]].sort();
|
||||
const sortedVersion = [...lastVersion[field]].sort();
|
||||
|
||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (field === 'model_parameters') {
|
||||
const wouldBeParams = wouldBeVersion[field] || {};
|
||||
const lastVersionParams = lastVersion[field] || {};
|
||||
if (JSON.stringify(wouldBeParams) !== JSON.stringify(lastVersionParams)) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
} else if (wouldBeVersion[field] !== lastVersion[field]) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return isMatch ? lastVersion : null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Update an agent with new data without overwriting existing
|
||||
* properties, or create a new agent if it doesn't exist.
|
||||
* When an agent is updated, a copy of the current state will be saved to the versions array.
|
||||
*
|
||||
* @param {Object} searchParameter - The search parameters to find the agent to update.
|
||||
* @param {string} searchParameter.id - The ID of the agent to update.
|
||||
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @param {string} [updatingUserId] - The ID of the user performing the update (used for tracking non-author updates).
|
||||
* @returns {Promise<Agent>} The updated or newly created agent document as a plain object.
|
||||
* @throws {Error} If the update would create a duplicate version
|
||||
*/
|
||||
const updateAgent = async (searchParameter, updateData) => {
|
||||
const updateAgent = async (searchParameter, updateData, updatingUserId = null) => {
|
||||
const options = { new: true, upsert: false };
|
||||
|
||||
const currentAgent = await Agent.findOne(searchParameter);
|
||||
if (currentAgent) {
|
||||
const { __v, _id, id, versions, author, ...versionData } = currentAgent.toObject();
|
||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||
|
||||
if (Object.keys(directUpdates).length > 0 && versions && versions.length > 0) {
|
||||
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions);
|
||||
if (duplicateVersion) {
|
||||
const error = new Error(
|
||||
'Duplicate version: This would create a version identical to an existing one',
|
||||
);
|
||||
error.statusCode = 409;
|
||||
error.details = {
|
||||
duplicateVersion,
|
||||
versionIndex: versions.findIndex(
|
||||
(v) => JSON.stringify(duplicateVersion) === JSON.stringify(v),
|
||||
),
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const versionEntry = {
|
||||
...versionData,
|
||||
...directUpdates,
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
// Always store updatedBy field to track who made the change
|
||||
if (updatingUserId) {
|
||||
versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId);
|
||||
}
|
||||
|
||||
updateData.$push = {
|
||||
...($push || {}),
|
||||
versions: versionEntry,
|
||||
};
|
||||
}
|
||||
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, options).lean();
|
||||
};
|
||||
|
||||
@@ -151,7 +307,7 @@ const updateAgent = async (searchParameter, updateData) => {
|
||||
* @param {string} params.file_id
|
||||
* @returns {Promise<Agent>} The updated agent.
|
||||
*/
|
||||
const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
|
||||
const addAgentResourceFile = async ({ req, agent_id, tool_resource, file_id }) => {
|
||||
const searchParameter = { id: agent_id };
|
||||
let agent = await getAgent(searchParameter);
|
||||
if (!agent) {
|
||||
@@ -177,7 +333,7 @@ const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
|
||||
},
|
||||
};
|
||||
|
||||
const updatedAgent = await updateAgent(searchParameter, updateData);
|
||||
const updatedAgent = await updateAgent(searchParameter, updateData, req?.user?.id);
|
||||
if (updatedAgent) {
|
||||
return updatedAgent;
|
||||
} else {
|
||||
@@ -341,7 +497,7 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
||||
delete updateQuery.author;
|
||||
}
|
||||
|
||||
const updatedAgent = await updateAgent(updateQuery, updateOps);
|
||||
const updatedAgent = await updateAgent(updateQuery, updateOps, user.id);
|
||||
if (updatedAgent) {
|
||||
return updatedAgent;
|
||||
}
|
||||
@@ -358,6 +514,40 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
||||
return await getAgent({ id: agentId });
|
||||
};
|
||||
|
||||
/**
|
||||
* Reverts an agent to a specific version in its version history.
|
||||
* @param {Object} searchParameter - The search parameters to find the agent to revert.
|
||||
* @param {string} searchParameter.id - The ID of the agent to revert.
|
||||
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
||||
* @param {number} versionIndex - The index of the version to revert to in the versions array.
|
||||
* @returns {Promise<MongoAgent>} The updated agent document after reverting.
|
||||
* @throws {Error} If the agent is not found or the specified version does not exist.
|
||||
*/
|
||||
const revertAgentVersion = async (searchParameter, versionIndex) => {
|
||||
const agent = await Agent.findOne(searchParameter);
|
||||
if (!agent) {
|
||||
throw new Error('Agent not found');
|
||||
}
|
||||
|
||||
if (!agent.versions || !agent.versions[versionIndex]) {
|
||||
throw new Error(`Version ${versionIndex} not found`);
|
||||
}
|
||||
|
||||
const revertToVersion = agent.versions[versionIndex];
|
||||
|
||||
const updateData = {
|
||||
...revertToVersion,
|
||||
};
|
||||
|
||||
delete updateData._id;
|
||||
delete updateData.id;
|
||||
delete updateData.versions;
|
||||
delete updateData.author;
|
||||
delete updateData.updatedBy;
|
||||
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, { new: true }).lean();
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Agent,
|
||||
getAgent,
|
||||
@@ -369,4 +559,5 @@ module.exports = {
|
||||
updateAgentProjects,
|
||||
addAgentResourceFile,
|
||||
removeAgentResourceFiles,
|
||||
revertAgentVersion,
|
||||
};
|
||||
|
||||
@@ -1,7 +1,25 @@
|
||||
const originalEnv = {
|
||||
CREDS_KEY: process.env.CREDS_KEY,
|
||||
CREDS_IV: process.env.CREDS_IV,
|
||||
};
|
||||
|
||||
process.env.CREDS_KEY = '0123456789abcdef0123456789abcdef';
|
||||
process.env.CREDS_IV = '0123456789abcdef';
|
||||
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { Agent, addAgentResourceFile, removeAgentResourceFiles } = require('./Agent');
|
||||
const {
|
||||
Agent,
|
||||
addAgentResourceFile,
|
||||
removeAgentResourceFiles,
|
||||
createAgent,
|
||||
updateAgent,
|
||||
getAgent,
|
||||
deleteAgent,
|
||||
getListAgents,
|
||||
updateAgentProjects,
|
||||
} = require('./Agent');
|
||||
|
||||
describe('Agent Resource File Operations', () => {
|
||||
let mongoServer;
|
||||
@@ -15,6 +33,8 @@ describe('Agent Resource File Operations', () => {
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
process.env.CREDS_KEY = originalEnv.CREDS_KEY;
|
||||
process.env.CREDS_IV = originalEnv.CREDS_IV;
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -332,3 +352,674 @@ describe('Agent Resource File Operations', () => {
|
||||
expect(finalFileIds).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Agent CRUD Operations', () => {
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
});
|
||||
|
||||
test('should create and get an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
const newAgent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
description: 'Test description',
|
||||
});
|
||||
|
||||
expect(newAgent).toBeDefined();
|
||||
expect(newAgent.id).toBe(agentId);
|
||||
expect(newAgent.name).toBe('Test Agent');
|
||||
|
||||
const retrievedAgent = await getAgent({ id: agentId });
|
||||
expect(retrievedAgent).toBeDefined();
|
||||
expect(retrievedAgent.id).toBe(agentId);
|
||||
expect(retrievedAgent.name).toBe('Test Agent');
|
||||
expect(retrievedAgent.description).toBe('Test description');
|
||||
});
|
||||
|
||||
test('should delete an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Agent To Delete',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
});
|
||||
|
||||
const agentBeforeDelete = await getAgent({ id: agentId });
|
||||
expect(agentBeforeDelete).toBeDefined();
|
||||
|
||||
await deleteAgent({ id: agentId });
|
||||
|
||||
const agentAfterDelete = await getAgent({ id: agentId });
|
||||
expect(agentAfterDelete).toBeNull();
|
||||
});
|
||||
|
||||
test('should list agents by author', async () => {
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const otherAuthorId = new mongoose.Types.ObjectId();
|
||||
|
||||
const agentIds = [];
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const id = `agent_${uuidv4()}`;
|
||||
agentIds.push(id);
|
||||
await createAgent({
|
||||
id,
|
||||
name: `Agent ${i}`,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
await createAgent({
|
||||
id: `other_agent_${uuidv4()}`,
|
||||
name: `Other Agent ${i}`,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: otherAuthorId,
|
||||
});
|
||||
}
|
||||
|
||||
const result = await getListAgents({ author: authorId.toString() });
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.data).toBeDefined();
|
||||
expect(result.data).toHaveLength(5);
|
||||
expect(result.has_more).toBe(true);
|
||||
|
||||
for (const agent of result.data) {
|
||||
expect(agent.author).toBe(authorId.toString());
|
||||
}
|
||||
});
|
||||
|
||||
test('should update agent projects', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId1 = new mongoose.Types.ObjectId();
|
||||
const projectId2 = new mongoose.Types.ObjectId();
|
||||
const projectId3 = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Project Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
projectIds: [projectId1],
|
||||
});
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ $addToSet: { projectIds: { $each: [projectId2, projectId3] } } },
|
||||
);
|
||||
|
||||
await updateAgent({ id: agentId }, { $pull: { projectIds: projectId1 } });
|
||||
|
||||
await updateAgent({ id: agentId }, { projectIds: [projectId2, projectId3] });
|
||||
|
||||
const updatedAgent = await getAgent({ id: agentId });
|
||||
expect(updatedAgent.projectIds).toHaveLength(2);
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).toContain(projectId2.toString());
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).toContain(projectId3.toString());
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).not.toContain(projectId1.toString());
|
||||
|
||||
await updateAgent({ id: agentId }, { projectIds: [] });
|
||||
|
||||
const emptyProjectsAgent = await getAgent({ id: agentId });
|
||||
expect(emptyProjectsAgent.projectIds).toHaveLength(0);
|
||||
|
||||
const nonExistentId = `agent_${uuidv4()}`;
|
||||
await expect(
|
||||
updateAgentProjects({
|
||||
id: nonExistentId,
|
||||
projectIds: [projectId1],
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
test('should handle ephemeral agent loading', async () => {
|
||||
const agentId = 'ephemeral_test';
|
||||
const endpoint = 'openai';
|
||||
|
||||
const originalModule = jest.requireActual('librechat-data-provider');
|
||||
|
||||
const mockDataProvider = {
|
||||
...originalModule,
|
||||
Constants: {
|
||||
...originalModule.Constants,
|
||||
EPHEMERAL_AGENT_ID: 'ephemeral_test',
|
||||
},
|
||||
};
|
||||
|
||||
jest.doMock('librechat-data-provider', () => mockDataProvider);
|
||||
|
||||
const mockReq = {
|
||||
user: { id: 'user123' },
|
||||
body: {
|
||||
promptPrefix: 'This is a test instruction',
|
||||
ephemeralAgent: {
|
||||
execute_code: true,
|
||||
mcp: ['server1', 'server2'],
|
||||
},
|
||||
},
|
||||
app: {
|
||||
locals: {
|
||||
availableTools: {
|
||||
tool__server1: {},
|
||||
tool__server2: {},
|
||||
another_tool: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const params = {
|
||||
req: mockReq,
|
||||
agent_id: agentId,
|
||||
endpoint,
|
||||
model_parameters: {
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
},
|
||||
};
|
||||
|
||||
expect(agentId).toBeDefined();
|
||||
expect(endpoint).toBeDefined();
|
||||
|
||||
jest.dontMock('librechat-data-provider');
|
||||
});
|
||||
|
||||
test('should handle loadAgent functionality and errors', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Load Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
tools: ['tool1', 'tool2'],
|
||||
});
|
||||
|
||||
const agent = await getAgent({ id: agentId });
|
||||
|
||||
expect(agent).toBeDefined();
|
||||
expect(agent.id).toBe(agentId);
|
||||
expect(agent.name).toBe('Test Load Agent');
|
||||
expect(agent.tools).toEqual(expect.arrayContaining(['tool1', 'tool2']));
|
||||
|
||||
const mockLoadAgent = jest.fn().mockResolvedValue(agent);
|
||||
const loadedAgent = await mockLoadAgent();
|
||||
expect(loadedAgent).toBeDefined();
|
||||
expect(loadedAgent.id).toBe(agentId);
|
||||
|
||||
const nonExistentId = `agent_${uuidv4()}`;
|
||||
const nonExistentAgent = await getAgent({ id: nonExistentId });
|
||||
expect(nonExistentAgent).toBeNull();
|
||||
|
||||
const mockLoadAgentError = jest.fn().mockRejectedValue(new Error('No agent found with ID'));
|
||||
await expect(mockLoadAgentError()).rejects.toThrow('No agent found with ID');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Agent Version History', () => {
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
});
|
||||
|
||||
test('should create an agent with a single entry in versions array', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const agent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
expect(agent.versions).toBeDefined();
|
||||
expect(Array.isArray(agent.versions)).toBe(true);
|
||||
expect(agent.versions).toHaveLength(1);
|
||||
expect(agent.versions[0].name).toBe('Test Agent');
|
||||
expect(agent.versions[0].provider).toBe('test');
|
||||
expect(agent.versions[0].model).toBe('test-model');
|
||||
});
|
||||
|
||||
test('should accumulate version history across multiple updates', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const author = new mongoose.Types.ObjectId();
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'First Name',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author,
|
||||
description: 'First description',
|
||||
});
|
||||
|
||||
await updateAgent({ id: agentId }, { name: 'Second Name', description: 'Second description' });
|
||||
await updateAgent({ id: agentId }, { name: 'Third Name', model: 'new-model' });
|
||||
const finalAgent = await updateAgent({ id: agentId }, { description: 'Final description' });
|
||||
|
||||
expect(finalAgent.versions).toBeDefined();
|
||||
expect(Array.isArray(finalAgent.versions)).toBe(true);
|
||||
expect(finalAgent.versions).toHaveLength(4);
|
||||
|
||||
expect(finalAgent.versions[0].name).toBe('First Name');
|
||||
expect(finalAgent.versions[0].description).toBe('First description');
|
||||
expect(finalAgent.versions[0].model).toBe('test-model');
|
||||
|
||||
expect(finalAgent.versions[1].name).toBe('Second Name');
|
||||
expect(finalAgent.versions[1].description).toBe('Second description');
|
||||
expect(finalAgent.versions[1].model).toBe('test-model');
|
||||
|
||||
expect(finalAgent.versions[2].name).toBe('Third Name');
|
||||
expect(finalAgent.versions[2].description).toBe('Second description');
|
||||
expect(finalAgent.versions[2].model).toBe('new-model');
|
||||
|
||||
expect(finalAgent.versions[3].name).toBe('Third Name');
|
||||
expect(finalAgent.versions[3].description).toBe('Final description');
|
||||
expect(finalAgent.versions[3].model).toBe('new-model');
|
||||
|
||||
expect(finalAgent.name).toBe('Third Name');
|
||||
expect(finalAgent.description).toBe('Final description');
|
||||
expect(finalAgent.model).toBe('new-model');
|
||||
});
|
||||
|
||||
test('should not include metadata fields in version history', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent({ id: agentId }, { description: 'New description' });
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.versions[0]._id).toBeUndefined();
|
||||
expect(updatedAgent.versions[0].__v).toBeUndefined();
|
||||
expect(updatedAgent.versions[0].name).toBe('Test Agent');
|
||||
expect(updatedAgent.versions[0].author).toBeUndefined();
|
||||
|
||||
expect(updatedAgent.versions[1]._id).toBeUndefined();
|
||||
expect(updatedAgent.versions[1].__v).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should not recursively include previous versions', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
await updateAgent({ id: agentId }, { name: 'Updated Name 1' });
|
||||
await updateAgent({ id: agentId }, { name: 'Updated Name 2' });
|
||||
const finalAgent = await updateAgent({ id: agentId }, { name: 'Updated Name 3' });
|
||||
|
||||
expect(finalAgent.versions).toHaveLength(4);
|
||||
|
||||
finalAgent.versions.forEach((version) => {
|
||||
expect(version.versions).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle MongoDB operators and field updates correctly', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'MongoDB Operator Test',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
tools: ['tool1'],
|
||||
});
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
description: 'Updated description',
|
||||
$push: { tools: 'tool2' },
|
||||
$addToSet: { projectIds: projectId },
|
||||
},
|
||||
);
|
||||
|
||||
const firstUpdate = await getAgent({ id: agentId });
|
||||
expect(firstUpdate.description).toBe('Updated description');
|
||||
expect(firstUpdate.tools).toContain('tool1');
|
||||
expect(firstUpdate.tools).toContain('tool2');
|
||||
expect(firstUpdate.projectIds.map((id) => id.toString())).toContain(projectId.toString());
|
||||
expect(firstUpdate.versions).toHaveLength(2);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
tools: ['tool2', 'tool3'],
|
||||
},
|
||||
);
|
||||
|
||||
const secondUpdate = await getAgent({ id: agentId });
|
||||
expect(secondUpdate.tools).toHaveLength(2);
|
||||
expect(secondUpdate.tools).toContain('tool2');
|
||||
expect(secondUpdate.tools).toContain('tool3');
|
||||
expect(secondUpdate.tools).not.toContain('tool1');
|
||||
expect(secondUpdate.versions).toHaveLength(3);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
$push: { tools: 'tool3' },
|
||||
},
|
||||
);
|
||||
|
||||
const thirdUpdate = await getAgent({ id: agentId });
|
||||
const toolCount = thirdUpdate.tools.filter((t) => t === 'tool3').length;
|
||||
expect(toolCount).toBe(2);
|
||||
expect(thirdUpdate.versions).toHaveLength(4);
|
||||
});
|
||||
|
||||
test('should handle parameter objects correctly', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Parameters Test',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
model_parameters: { temperature: 0.7 },
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ model_parameters: { temperature: 0.8 } },
|
||||
);
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.model_parameters.temperature).toBe(0.8);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
model_parameters: {
|
||||
temperature: 0.8,
|
||||
max_tokens: 1000,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const complexAgent = await getAgent({ id: agentId });
|
||||
expect(complexAgent.versions).toHaveLength(3);
|
||||
expect(complexAgent.model_parameters.temperature).toBe(0.8);
|
||||
expect(complexAgent.model_parameters.max_tokens).toBe(1000);
|
||||
|
||||
await updateAgent({ id: agentId }, { model_parameters: {} });
|
||||
|
||||
const emptyParamsAgent = await getAgent({ id: agentId });
|
||||
expect(emptyParamsAgent.versions).toHaveLength(4);
|
||||
expect(emptyParamsAgent.model_parameters).toEqual({});
|
||||
});
|
||||
|
||||
test('should detect duplicate versions and reject updates', async () => {
|
||||
const originalConsoleError = console.error;
|
||||
console.error = jest.fn();
|
||||
|
||||
try {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId1 = new mongoose.Types.ObjectId();
|
||||
const projectId2 = new mongoose.Types.ObjectId();
|
||||
|
||||
const testCases = [
|
||||
{
|
||||
name: 'simple field update',
|
||||
initial: {
|
||||
name: 'Test Agent',
|
||||
description: 'Initial description',
|
||||
},
|
||||
update: { name: 'Updated Name' },
|
||||
duplicate: { name: 'Updated Name' },
|
||||
},
|
||||
{
|
||||
name: 'object field update',
|
||||
initial: {
|
||||
model_parameters: { temperature: 0.7 },
|
||||
},
|
||||
update: { model_parameters: { temperature: 0.8 } },
|
||||
duplicate: { model_parameters: { temperature: 0.8 } },
|
||||
},
|
||||
{
|
||||
name: 'array field update',
|
||||
initial: {
|
||||
tools: ['tool1', 'tool2'],
|
||||
},
|
||||
update: { tools: ['tool2', 'tool3'] },
|
||||
duplicate: { tools: ['tool2', 'tool3'] },
|
||||
},
|
||||
{
|
||||
name: 'projectIds update',
|
||||
initial: {
|
||||
projectIds: [projectId1],
|
||||
},
|
||||
update: { projectIds: [projectId1, projectId2] },
|
||||
duplicate: { projectIds: [projectId2, projectId1] },
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const testAgentId = `agent_${uuidv4()}`;
|
||||
|
||||
await createAgent({
|
||||
id: testAgentId,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
...testCase.initial,
|
||||
});
|
||||
|
||||
await updateAgent({ id: testAgentId }, testCase.update);
|
||||
|
||||
let error;
|
||||
try {
|
||||
await updateAgent({ id: testAgentId }, testCase.duplicate);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('Duplicate version');
|
||||
expect(error.statusCode).toBe(409);
|
||||
expect(error.details).toBeDefined();
|
||||
expect(error.details.duplicateVersion).toBeDefined();
|
||||
|
||||
const agent = await getAgent({ id: testAgentId });
|
||||
expect(agent.versions).toHaveLength(2);
|
||||
}
|
||||
} finally {
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
|
||||
test('should track updatedBy when a different user updates an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
const updatingUser = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated Agent', description: 'Updated description' },
|
||||
updatingUser.toString(),
|
||||
);
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.versions[1].updatedBy.toString()).toBe(updatingUser.toString());
|
||||
expect(updatedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
});
|
||||
|
||||
test('should include updatedBy even when the original author updates the agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated Agent', description: 'Updated description' },
|
||||
originalAuthor.toString(),
|
||||
);
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.versions[1].updatedBy.toString()).toBe(originalAuthor.toString());
|
||||
expect(updatedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
});
|
||||
|
||||
test('should track multiple different users updating the same agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
const user1 = new mongoose.Types.ObjectId();
|
||||
const user2 = new mongoose.Types.ObjectId();
|
||||
const user3 = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
// User 1 makes an update
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated by User 1', description: 'First update' },
|
||||
user1.toString(),
|
||||
);
|
||||
|
||||
// Original author makes an update
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ description: 'Updated by original author' },
|
||||
originalAuthor.toString(),
|
||||
);
|
||||
|
||||
// User 2 makes an update
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated by User 2', model: 'new-model' },
|
||||
user2.toString(),
|
||||
);
|
||||
|
||||
// User 3 makes an update
|
||||
const finalAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ description: 'Final update by User 3' },
|
||||
user3.toString(),
|
||||
);
|
||||
|
||||
expect(finalAgent.versions).toHaveLength(5);
|
||||
expect(finalAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
|
||||
// Check that each version has the correct updatedBy
|
||||
expect(finalAgent.versions[0].updatedBy).toBeUndefined(); // Initial creation has no updatedBy
|
||||
expect(finalAgent.versions[1].updatedBy.toString()).toBe(user1.toString());
|
||||
expect(finalAgent.versions[2].updatedBy.toString()).toBe(originalAuthor.toString());
|
||||
expect(finalAgent.versions[3].updatedBy.toString()).toBe(user2.toString());
|
||||
expect(finalAgent.versions[4].updatedBy.toString()).toBe(user3.toString());
|
||||
|
||||
// Verify the final state
|
||||
expect(finalAgent.name).toBe('Updated by User 2');
|
||||
expect(finalAgent.description).toBe('Final update by User 3');
|
||||
expect(finalAgent.model).toBe('new-model');
|
||||
});
|
||||
|
||||
test('should preserve original author during agent restoration', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
const updatingUser = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated Agent', description: 'Updated description' },
|
||||
updatingUser.toString(),
|
||||
);
|
||||
|
||||
const { revertAgentVersion } = require('./Agent');
|
||||
const revertedAgent = await revertAgentVersion({ id: agentId }, 0);
|
||||
|
||||
expect(revertedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
expect(revertedAgent.name).toBe('Original Agent');
|
||||
expect(revertedAgent.description).toBe('Original description');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -100,6 +100,8 @@ const tokenValues = Object.assign(
|
||||
'claude-3-5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3.5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3-haiku': { prompt: 0.25, completion: 1.25 },
|
||||
'claude-sonnet-4': { prompt: 3, completion: 15 },
|
||||
'claude-opus-4': { prompt: 15, completion: 75 },
|
||||
'claude-2.1': { prompt: 8, completion: 24 },
|
||||
'claude-2': { prompt: 8, completion: 24 },
|
||||
'claude-instant': { prompt: 0.8, completion: 2.4 },
|
||||
@@ -162,6 +164,8 @@ const cacheTokenValues = {
|
||||
'claude-3.5-haiku': { write: 1, read: 0.08 },
|
||||
'claude-3-5-haiku': { write: 1, read: 0.08 },
|
||||
'claude-3-haiku': { write: 0.3, read: 0.03 },
|
||||
'claude-sonnet-4': { write: 3.75, read: 0.3 },
|
||||
'claude-opus-4': { write: 18.75, read: 1.5 },
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -664,3 +664,97 @@ describe('Grok Model Tests - Pricing', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Claude Model Tests', () => {
|
||||
it('should return correct prompt and completion rates for Claude 4 models', () => {
|
||||
expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['claude-sonnet-4'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'completion' })).toBe(
|
||||
tokenValues['claude-sonnet-4'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-opus-4', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['claude-opus-4'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-opus-4', tokenType: 'completion' })).toBe(
|
||||
tokenValues['claude-opus-4'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle Claude 4 model name variations with different prefixes and suffixes', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const valueKey = getValueKey(model);
|
||||
const isSonnet = model.includes('sonnet');
|
||||
const expectedKey = isSonnet ? 'claude-sonnet-4' : 'claude-opus-4';
|
||||
|
||||
expect(valueKey).toBe(expectedKey);
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(tokenValues[expectedKey].prompt);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(
|
||||
tokenValues[expectedKey].completion,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should return correct cache rates for Claude 4 models', () => {
|
||||
expect(getCacheMultiplier({ model: 'claude-sonnet-4', cacheType: 'write' })).toBe(
|
||||
cacheTokenValues['claude-sonnet-4'].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-sonnet-4', cacheType: 'read' })).toBe(
|
||||
cacheTokenValues['claude-sonnet-4'].read,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-opus-4', cacheType: 'write' })).toBe(
|
||||
cacheTokenValues['claude-opus-4'].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-opus-4', cacheType: 'read' })).toBe(
|
||||
cacheTokenValues['claude-opus-4'].read,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle Claude 4 model cache rates with different prefixes and suffixes', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const isSonnet = model.includes('sonnet');
|
||||
const expectedKey = isSonnet ? 'claude-sonnet-4' : 'claude-opus-4';
|
||||
|
||||
expect(getCacheMultiplier({ model, cacheType: 'write' })).toBe(
|
||||
cacheTokenValues[expectedKey].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model, cacheType: 'read' })).toBe(
|
||||
cacheTokenValues[expectedKey].read,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -43,12 +43,12 @@
|
||||
"@google/generative-ai": "^0.23.0",
|
||||
"@googleapis/youtube": "^20.0.0",
|
||||
"@keyv/redis": "^4.3.3",
|
||||
"@langchain/community": "^0.3.42",
|
||||
"@langchain/core": "^0.3.55",
|
||||
"@langchain/google-genai": "^0.2.8",
|
||||
"@langchain/google-vertexai": "^0.2.8",
|
||||
"@langchain/community": "^0.3.44",
|
||||
"@langchain/core": "^0.3.57",
|
||||
"@langchain/google-genai": "^0.2.9",
|
||||
"@langchain/google-vertexai": "^0.2.9",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.4.317",
|
||||
"@librechat/agents": "^2.4.37",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
@@ -75,6 +75,7 @@
|
||||
"ioredis": "^5.3.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"jwks-rsa": "^3.2.0",
|
||||
"keyv": "^5.3.2",
|
||||
"keyv-file": "^5.1.2",
|
||||
"klona": "^2.0.6",
|
||||
@@ -92,7 +93,7 @@
|
||||
"ollama": "^0.5.0",
|
||||
"openai": "^4.96.2",
|
||||
"openai-chat-tokens": "^0.2.8",
|
||||
"openid-client": "^5.4.2",
|
||||
"openid-client": "^6.5.0",
|
||||
"passport": "^0.6.0",
|
||||
"passport-apple": "^2.0.2",
|
||||
"passport-discord": "^0.1.4",
|
||||
|
||||
@@ -16,17 +16,17 @@ const FinalizationRegistry = global.FinalizationRegistry || null;
|
||||
*/
|
||||
const clientRegistry = FinalizationRegistry
|
||||
? new FinalizationRegistry((heldValue) => {
|
||||
try {
|
||||
// This will run when the client is garbage collected
|
||||
if (heldValue && heldValue.userId) {
|
||||
logger.debug(`[FinalizationRegistry] Cleaning up client for user ${heldValue.userId}`);
|
||||
} else {
|
||||
logger.debug('[FinalizationRegistry] Cleaning up client');
|
||||
try {
|
||||
// This will run when the client is garbage collected
|
||||
if (heldValue && heldValue.userId) {
|
||||
logger.debug(`[FinalizationRegistry] Cleaning up client for user ${heldValue.userId}`);
|
||||
} else {
|
||||
logger.debug('[FinalizationRegistry] Cleaning up client');
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors
|
||||
}
|
||||
})
|
||||
})
|
||||
: null;
|
||||
|
||||
/**
|
||||
@@ -134,8 +134,8 @@ function disposeClient(client) {
|
||||
if (client.message_delta) {
|
||||
client.message_delta = null;
|
||||
}
|
||||
if (client.isClaude3 !== undefined) {
|
||||
client.isClaude3 = null;
|
||||
if (client.isClaudeLatest !== undefined) {
|
||||
client.isClaudeLatest = null;
|
||||
}
|
||||
if (client.useMessages !== undefined) {
|
||||
client.useMessages = null;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
const openIdClient = require('openid-client');
|
||||
const cookies = require('cookie');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const {
|
||||
@@ -5,9 +6,12 @@ const {
|
||||
resetPassword,
|
||||
setAuthTokens,
|
||||
requestPasswordReset,
|
||||
setOpenIDAuthTokens,
|
||||
} = require('~/server/services/AuthService');
|
||||
const { findSession, getUserById, deleteAllUserSessions } = require('~/models');
|
||||
const { findSession, getUserById, deleteAllUserSessions, findUser } = require('~/models');
|
||||
const { getOpenIdConfig } = require('~/strategies');
|
||||
const { logger } = require('~/config');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
const registrationController = async (req, res) => {
|
||||
try {
|
||||
@@ -55,10 +59,28 @@ const resetPasswordController = async (req, res) => {
|
||||
|
||||
const refreshController = async (req, res) => {
|
||||
const refreshToken = req.headers.cookie ? cookies.parse(req.headers.cookie).refreshToken : null;
|
||||
const token_provider = req.headers.cookie
|
||||
? cookies.parse(req.headers.cookie).token_provider
|
||||
: null;
|
||||
if (!refreshToken) {
|
||||
return res.status(200).send('Refresh token not provided');
|
||||
}
|
||||
|
||||
if (token_provider === 'openid' && isEnabled(process.env.OPENID_REUSE_TOKENS) === true) {
|
||||
try {
|
||||
const openIdConfig = getOpenIdConfig();
|
||||
const tokenset = await openIdClient.refreshTokenGrant(openIdConfig, refreshToken);
|
||||
const claims = tokenset.claims();
|
||||
const user = await findUser({ email: claims.email });
|
||||
if (!user) {
|
||||
return res.status(401).redirect('/login');
|
||||
}
|
||||
const token = setOpenIDAuthTokens(tokenset, res);
|
||||
return res.status(200).send({ token, user });
|
||||
} catch (error) {
|
||||
logger.error('[refreshController] OpenID token refresh error', error);
|
||||
return res.status(403).send('Invalid OpenID refresh token');
|
||||
}
|
||||
}
|
||||
try {
|
||||
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
const user = await getUserById(payload.id, '-password -__v -totpSecret');
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
const { FileSources } = require('librechat-data-provider');
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
FileSources,
|
||||
webSearchKeys,
|
||||
extractWebSearchEnvVars,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
Balance,
|
||||
getFiles,
|
||||
@@ -83,7 +89,6 @@ const deleteUserFiles = async (req) => {
|
||||
const updateUserPluginsController = async (req, res) => {
|
||||
const { user } = req;
|
||||
const { pluginKey, action, auth, isEntityTool } = req.body;
|
||||
let authService;
|
||||
try {
|
||||
if (!isEntityTool) {
|
||||
const userPluginsService = await updateUserPluginsService(user, pluginKey, action);
|
||||
@@ -95,32 +100,55 @@ const updateUserPluginsController = async (req, res) => {
|
||||
}
|
||||
}
|
||||
|
||||
if (auth) {
|
||||
const keys = Object.keys(auth);
|
||||
const values = Object.values(auth);
|
||||
if (action === 'install' && keys.length > 0) {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await updateUserPluginAuth(user.id, keys[i], pluginKey, values[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
const { status, message } = authService;
|
||||
res.status(status).send({ message });
|
||||
}
|
||||
if (auth == null) {
|
||||
return res.status(200).send();
|
||||
}
|
||||
|
||||
let keys = Object.keys(auth);
|
||||
if (keys.length === 0 && pluginKey !== Tools.web_search) {
|
||||
return res.status(200).send();
|
||||
}
|
||||
const values = Object.values(auth);
|
||||
|
||||
/** @type {number} */
|
||||
let status = 200;
|
||||
/** @type {string} */
|
||||
let message;
|
||||
/** @type {IPluginAuth | Error} */
|
||||
let authService;
|
||||
|
||||
if (pluginKey === Tools.web_search) {
|
||||
/** @type {TCustomConfig['webSearch']} */
|
||||
const webSearchConfig = req.app.locals?.webSearch;
|
||||
keys = extractWebSearchEnvVars({
|
||||
keys: action === 'install' ? keys : webSearchKeys,
|
||||
config: webSearchConfig,
|
||||
});
|
||||
}
|
||||
|
||||
if (action === 'install') {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await updateUserPluginAuth(user.id, keys[i], pluginKey, values[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
({ status, message } = authService);
|
||||
}
|
||||
}
|
||||
if (action === 'uninstall' && keys.length > 0) {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await deleteUserPluginAuth(user.id, keys[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
const { status, message } = authService;
|
||||
res.status(status).send({ message });
|
||||
}
|
||||
} else if (action === 'uninstall') {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await deleteUserPluginAuth(user.id, keys[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
({ status, message } = authService);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res.status(200).send();
|
||||
if (status === 200) {
|
||||
return res.status(status).send();
|
||||
}
|
||||
|
||||
res.status(status).send({ message });
|
||||
} catch (err) {
|
||||
logger.error('[updateUserPluginsController]', err);
|
||||
return res.status(500).json({ message: 'Something went wrong.' });
|
||||
|
||||
@@ -237,6 +237,30 @@ function createToolEndCallback({ req, res, artifactPromises }) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (output.artifact[Tools.web_search]) {
|
||||
artifactPromises.push(
|
||||
(async () => {
|
||||
const name = `${output.name}_${output.tool_call_id}_${nanoid()}`;
|
||||
const attachment = {
|
||||
name,
|
||||
type: Tools.web_search,
|
||||
messageId: metadata.run_id,
|
||||
toolCallId: output.tool_call_id,
|
||||
conversationId: metadata.thread_id,
|
||||
[Tools.web_search]: { ...output.artifact[Tools.web_search] },
|
||||
};
|
||||
if (!res.headersSent) {
|
||||
return attachment;
|
||||
}
|
||||
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||
return attachment;
|
||||
})().catch((error) => {
|
||||
logger.error('Error processing artifact content:', error);
|
||||
return null;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (output.artifact.content) {
|
||||
/** @type {FormattedContent[]} */
|
||||
const content = output.artifact.content;
|
||||
|
||||
@@ -39,9 +39,6 @@ const BaseClient = require('~/app/clients/BaseClient');
|
||||
const { logger, sendEvent } = require('~/config');
|
||||
const { createRun } = require('./run');
|
||||
|
||||
/** @typedef {import('@librechat/agents').MessageContentComplex} MessageContentComplex */
|
||||
/** @typedef {import('@langchain/core/runnables').RunnableConfig} RunnableConfig */
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req
|
||||
* @param {Agent} agent
|
||||
@@ -543,7 +540,7 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
|
||||
async chatCompletion({ payload, abortController = null }) {
|
||||
/** @type {Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string; streamMode: string }} */
|
||||
/** @type {Partial<GraphRunnableConfig>} */
|
||||
let config;
|
||||
/** @type {ReturnType<createRun>} */
|
||||
let run;
|
||||
|
||||
@@ -23,6 +23,7 @@ const { updateAction, getActions } = require('~/models/Action');
|
||||
const { updateAgentProjects } = require('~/models/Agent');
|
||||
const { getProjectByName } = require('~/models/Project');
|
||||
const { deleteFileByFilter } = require('~/models/File');
|
||||
const { revertAgentVersion } = require('~/models/Agent');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const systemTools = {
|
||||
@@ -104,11 +105,13 @@ const getAgentHandler = async (req, res) => {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
agent.version = agent.versions ? agent.versions.length : 0;
|
||||
|
||||
if (agent.avatar && agent.avatar?.source === FileSources.s3) {
|
||||
const originalUrl = agent.avatar.filepath;
|
||||
agent.avatar.filepath = await refreshS3Url(agent.avatar);
|
||||
if (originalUrl !== agent.avatar.filepath) {
|
||||
await updateAgent({ id }, { avatar: agent.avatar });
|
||||
await updateAgent({ id }, { avatar: agent.avatar }, req.user.id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -127,6 +130,7 @@ const getAgentHandler = async (req, res) => {
|
||||
author: agent.author,
|
||||
projectIds: agent.projectIds,
|
||||
isCollaborative: agent.isCollaborative,
|
||||
version: agent.version,
|
||||
});
|
||||
}
|
||||
return res.status(200).json(agent);
|
||||
@@ -165,7 +169,9 @@ const updateAgentHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
let updatedAgent =
|
||||
Object.keys(updateData).length > 0 ? await updateAgent({ id }, updateData) : existingAgent;
|
||||
Object.keys(updateData).length > 0
|
||||
? await updateAgent({ id }, updateData, req.user.id)
|
||||
: existingAgent;
|
||||
|
||||
if (projectIds || removeProjectIds) {
|
||||
updatedAgent = await updateAgentProjects({
|
||||
@@ -187,6 +193,14 @@ const updateAgentHandler = async (req, res) => {
|
||||
return res.json(updatedAgent);
|
||||
} catch (error) {
|
||||
logger.error('[/Agents/:id] Error updating Agent', error);
|
||||
|
||||
if (error.statusCode === 409) {
|
||||
return res.status(409).json({
|
||||
error: error.message,
|
||||
details: error.details,
|
||||
});
|
||||
}
|
||||
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
};
|
||||
@@ -393,7 +407,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
||||
},
|
||||
};
|
||||
|
||||
promises.push(await updateAgent({ id: agent_id, author: req.user.id }, data));
|
||||
promises.push(await updateAgent({ id: agent_id, author: req.user.id }, data, req.user.id));
|
||||
|
||||
const resolved = await Promise.all(promises);
|
||||
res.status(201).json(resolved[0]);
|
||||
@@ -411,6 +425,66 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Reverts an agent to a previous version from its version history.
|
||||
* @route PATCH /agents/:id/revert
|
||||
* @param {object} req - Express Request object
|
||||
* @param {object} req.params - Request parameters
|
||||
* @param {string} req.params.id - The ID of the agent to revert
|
||||
* @param {object} req.body - Request body
|
||||
* @param {number} req.body.version_index - The index of the version to revert to
|
||||
* @param {object} req.user - Authenticated user information
|
||||
* @param {string} req.user.id - User ID
|
||||
* @param {string} req.user.role - User role
|
||||
* @param {ServerResponse} res - Express Response object
|
||||
* @returns {Promise<Agent>} 200 - The updated agent after reverting to the specified version
|
||||
* @throws {Error} 400 - If version_index is missing
|
||||
* @throws {Error} 403 - If user doesn't have permission to modify the agent
|
||||
* @throws {Error} 404 - If agent not found
|
||||
* @throws {Error} 500 - If there's an internal server error during the reversion process
|
||||
*/
|
||||
const revertAgentVersionHandler = async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const { version_index } = req.body;
|
||||
|
||||
if (version_index === undefined) {
|
||||
return res.status(400).json({ error: 'version_index is required' });
|
||||
}
|
||||
|
||||
const isAdmin = req.user.role === SystemRoles.ADMIN;
|
||||
const existingAgent = await getAgent({ id });
|
||||
|
||||
if (!existingAgent) {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id;
|
||||
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
|
||||
|
||||
if (!hasEditPermission) {
|
||||
return res.status(403).json({
|
||||
error: 'You do not have permission to modify this non-collaborative agent',
|
||||
});
|
||||
}
|
||||
|
||||
const updatedAgent = await revertAgentVersion({ id }, version_index);
|
||||
|
||||
if (updatedAgent.author) {
|
||||
updatedAgent.author = updatedAgent.author.toString();
|
||||
}
|
||||
|
||||
if (updatedAgent.author !== req.user.id) {
|
||||
delete updatedAgent.author;
|
||||
}
|
||||
|
||||
return res.json(updatedAgent);
|
||||
} catch (error) {
|
||||
logger.error('[/agents/:id/revert] Error reverting Agent version', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
createAgent: createAgentHandler,
|
||||
getAgent: getAgentHandler,
|
||||
@@ -419,4 +493,5 @@ module.exports = {
|
||||
deleteAgent: deleteAgentHandler,
|
||||
getListAgents: getListAgentsHandler,
|
||||
uploadAgentAvatar: uploadAgentAvatarHandler,
|
||||
revertAgentVersion: revertAgentVersionHandler,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const cookies = require('cookie');
|
||||
const { Issuer } = require('openid-client');
|
||||
const { getOpenIdConfig } = require('~/strategies');
|
||||
const { logoutUser } = require('~/server/services/AuthService');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
@@ -10,20 +10,29 @@ const logoutController = async (req, res) => {
|
||||
const logout = await logoutUser(req, refreshToken);
|
||||
const { status, message } = logout;
|
||||
res.clearCookie('refreshToken');
|
||||
res.clearCookie('token_provider');
|
||||
const response = { message };
|
||||
if (
|
||||
req.user.openidId != null &&
|
||||
isEnabled(process.env.OPENID_USE_END_SESSION_ENDPOINT) &&
|
||||
process.env.OPENID_ISSUER
|
||||
) {
|
||||
const issuer = await Issuer.discover(process.env.OPENID_ISSUER);
|
||||
const redirect = issuer.metadata.end_session_endpoint;
|
||||
if (!redirect) {
|
||||
const openIdConfig = getOpenIdConfig();
|
||||
if (!openIdConfig) {
|
||||
logger.warn(
|
||||
'[logoutController] end_session_endpoint not found in OpenID issuer metadata. Please verify that the issuer is correct.',
|
||||
'[logoutController] OpenID config not found. Please verify that the open id configuration and initialization are correct.',
|
||||
);
|
||||
} else {
|
||||
response.redirect = redirect;
|
||||
const endSessionEndpoint = openIdConfig
|
||||
? openIdConfig.serverMetadata().end_session_endpoint
|
||||
: null;
|
||||
if (endSessionEndpoint) {
|
||||
response.redirect = endSessionEndpoint;
|
||||
} else {
|
||||
logger.warn(
|
||||
'[logoutController] end_session_endpoint not found in OpenID issuer metadata. Please verify that the issuer is correct.',
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
return res.status(status).send(response);
|
||||
|
||||
@@ -6,6 +6,7 @@ const {
|
||||
Permissions,
|
||||
ToolCallTypes,
|
||||
PermissionTypes,
|
||||
loadWebSearchAuth,
|
||||
} = require('librechat-data-provider');
|
||||
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
|
||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||
@@ -24,6 +25,36 @@ const toolAccessPermType = {
|
||||
[Tools.execute_code]: PermissionTypes.RUN_CODE,
|
||||
};
|
||||
|
||||
/**
|
||||
* Verifies web search authentication, ensuring each category has at least
|
||||
* one fully authenticated service.
|
||||
*
|
||||
* @param {ServerRequest} req - The request object
|
||||
* @param {ServerResponse} res - The response object
|
||||
* @returns {Promise<void>} A promise that resolves when the function has completed
|
||||
*/
|
||||
const verifyWebSearchAuth = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
/** @type {TCustomConfig['webSearch']} */
|
||||
const webSearchConfig = req.app.locals?.webSearch || {};
|
||||
const result = await loadWebSearchAuth({
|
||||
userId,
|
||||
loadAuthValues,
|
||||
webSearchConfig,
|
||||
throwError: false,
|
||||
});
|
||||
|
||||
return res.status(200).json({
|
||||
authenticated: result.authenticated,
|
||||
authTypes: result.authTypes,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in verifyWebSearchAuth:', error);
|
||||
return res.status(500).json({ message: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req - The request object, containing information about the HTTP request.
|
||||
* @param {ServerResponse} res - The response object, used to send back the desired HTTP response.
|
||||
@@ -32,6 +63,9 @@ const toolAccessPermType = {
|
||||
const verifyToolAuth = async (req, res) => {
|
||||
try {
|
||||
const { toolId } = req.params;
|
||||
if (toolId === Tools.web_search) {
|
||||
return await verifyWebSearchAuth(req, res);
|
||||
}
|
||||
const authFields = fieldsMap[toolId];
|
||||
if (!authFields) {
|
||||
res.status(404).json({ message: 'Tool not found' });
|
||||
|
||||
@@ -75,7 +75,7 @@ const startServer = async () => {
|
||||
|
||||
/* OAUTH */
|
||||
app.use(passport.initialize());
|
||||
passport.use(await jwtLogin());
|
||||
passport.use(jwtLogin());
|
||||
passport.use(passportLogin());
|
||||
|
||||
/* LDAP Auth */
|
||||
@@ -84,7 +84,7 @@ const startServer = async () => {
|
||||
}
|
||||
|
||||
if (isEnabled(ALLOW_SOCIAL_LOGIN)) {
|
||||
configureSocialLogins(app);
|
||||
await configureSocialLogins(app);
|
||||
}
|
||||
|
||||
app.use('/oauth', routes.oauth);
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
const cookies = require('cookie');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const passport = require('passport');
|
||||
|
||||
// This middleware does not require authentication,
|
||||
// but if the user is authenticated, it will set the user object.
|
||||
const optionalJwtAuth = (req, res, next) => {
|
||||
passport.authenticate('jwt', { session: false }, (err, user) => {
|
||||
const cookieHeader = req.headers.cookie;
|
||||
const tokenProvider = cookieHeader ? cookies.parse(cookieHeader).token_provider : null;
|
||||
const callback = (err, user) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
@@ -11,7 +15,11 @@ const optionalJwtAuth = (req, res, next) => {
|
||||
req.user = user;
|
||||
}
|
||||
next();
|
||||
})(req, res, next);
|
||||
};
|
||||
if (tokenProvider === 'openid' && isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
return passport.authenticate('openidJwt', { session: false }, callback)(req, res, next);
|
||||
}
|
||||
passport.authenticate('jwt', { session: false }, callback)(req, res, next);
|
||||
};
|
||||
|
||||
module.exports = optionalJwtAuth;
|
||||
|
||||
@@ -1,5 +1,23 @@
|
||||
const passport = require('passport');
|
||||
const cookies = require('cookie');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
const requireJwtAuth = passport.authenticate('jwt', { session: false });
|
||||
/**
|
||||
* Custom Middleware to handle JWT authentication, with support for OpenID token reuse
|
||||
* Switches between JWT and OpenID authentication based on cookies and environment settings
|
||||
*/
|
||||
const requireJwtAuth = (req, res, next) => {
|
||||
// Check if token provider is specified in cookies
|
||||
const cookieHeader = req.headers.cookie;
|
||||
const tokenProvider = cookieHeader ? cookies.parse(cookieHeader).token_provider : null;
|
||||
|
||||
// Use OpenID authentication if token provider is OpenID and OPENID_REUSE_TOKENS is enabled
|
||||
if (tokenProvider === 'openid' && isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
return passport.authenticate('openidJwt', { session: false })(req, res, next);
|
||||
}
|
||||
|
||||
// Default to standard JWT authentication
|
||||
return passport.authenticate('jwt', { session: false })(req, res, next);
|
||||
};
|
||||
|
||||
module.exports = requireJwtAuth;
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
jest.mock('~/cache/getLogStores');
|
||||
const request = require('supertest');
|
||||
const express = require('express');
|
||||
const routes = require('../');
|
||||
const configRoute = require('../config');
|
||||
// file deepcode ignore UseCsurfForExpress/test: test
|
||||
const app = express();
|
||||
app.disable('x-powered-by');
|
||||
app.use('/api/config', routes.config);
|
||||
app.use('/api/config', configRoute);
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env.APP_TITLE;
|
||||
|
||||
@@ -107,7 +107,7 @@ router.post('/:agent_id', async (req, res) => {
|
||||
.filter((tool) => !(tool && (tool.includes(domain) || tool.includes(action_id))))
|
||||
.concat(functions.map((tool) => `${tool.function.name}${actionDelimiter}${domain}`));
|
||||
|
||||
const updatedAgent = await updateAgent(agentQuery, { tools, actions });
|
||||
const updatedAgent = await updateAgent(agentQuery, { tools, actions }, req.user.id);
|
||||
|
||||
// Only update user field for new actions
|
||||
const actionUpdateData = { metadata, agent_id };
|
||||
@@ -172,7 +172,7 @@ router.delete('/:agent_id/:action_id', async (req, res) => {
|
||||
|
||||
const updatedTools = tools.filter((tool) => !(tool && tool.includes(domain)));
|
||||
|
||||
await updateAgent(agentQuery, { tools: updatedTools, actions: updatedActions });
|
||||
await updateAgent(agentQuery, { tools: updatedTools, actions: updatedActions }, req.user.id);
|
||||
// If admin, can delete any action, otherwise only user's actions
|
||||
const actionQuery = admin ? { action_id } : { action_id, user: req.user.id };
|
||||
await deleteAction(actionQuery);
|
||||
|
||||
@@ -78,6 +78,15 @@ router.post('/:id/duplicate', checkAgentCreate, v1.duplicateAgent);
|
||||
*/
|
||||
router.delete('/:id', checkAgentCreate, v1.deleteAgent);
|
||||
|
||||
/**
|
||||
* Reverts an agent to a previous version.
|
||||
* @route POST /agents/:id/revert
|
||||
* @param {string} req.params.id - Agent identifier.
|
||||
* @param {number} req.body.version_index - Index of the version to revert to.
|
||||
* @returns {Agent} 200 - success response - application/json
|
||||
*/
|
||||
router.post('/:id/revert', checkGlobalAgentShare, v1.revertAgentVersion);
|
||||
|
||||
/**
|
||||
* Returns a list of agents.
|
||||
* @route GET /agents
|
||||
|
||||
@@ -85,6 +85,26 @@ router.get('/', async function (req, res) {
|
||||
bundlerURL: process.env.SANDPACK_BUNDLER_URL,
|
||||
staticBundlerURL: process.env.SANDPACK_STATIC_BUNDLER_URL,
|
||||
};
|
||||
/** @type {TCustomConfig['webSearch']} */
|
||||
const webSearchConfig = req.app.locals.webSearch;
|
||||
if (
|
||||
webSearchConfig != null &&
|
||||
(webSearchConfig.searchProvider ||
|
||||
webSearchConfig.scraperType ||
|
||||
webSearchConfig.rerankerType)
|
||||
) {
|
||||
payload.webSearch = {};
|
||||
}
|
||||
|
||||
if (webSearchConfig?.searchProvider) {
|
||||
payload.webSearch.searchProvider = webSearchConfig.searchProvider;
|
||||
}
|
||||
if (webSearchConfig?.scraperType) {
|
||||
payload.webSearch.scraperType = webSearchConfig.scraperType;
|
||||
}
|
||||
if (webSearchConfig?.rerankerType) {
|
||||
payload.webSearch.rerankerType = webSearchConfig.rerankerType;
|
||||
}
|
||||
|
||||
if (ldap) {
|
||||
payload.ldap = ldap;
|
||||
|
||||
@@ -74,7 +74,7 @@ router.post('/gen_title', async (req, res) => {
|
||||
res.status(200).json({ title });
|
||||
} else {
|
||||
res.status(404).json({
|
||||
message: 'Title not found or method not implemented for the conversation\'s endpoint',
|
||||
message: "Title not found or method not implemented for the conversation's endpoint",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
// file deepcode ignore NoRateLimitingForLogin: Rate limiting is handled by the `loginLimiter` middleware
|
||||
const express = require('express');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const passport = require('passport');
|
||||
const client = require('openid-client');
|
||||
const {
|
||||
checkBan,
|
||||
logHeaders,
|
||||
@@ -8,8 +10,9 @@ const {
|
||||
setBalanceConfig,
|
||||
checkDomainAllowed,
|
||||
} = require('~/server/middleware');
|
||||
const { setAuthTokens } = require('~/server/services/AuthService');
|
||||
const { setAuthTokens, setOpenIDAuthTokens } = require('~/server/services/AuthService');
|
||||
const { logger } = require('~/config');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -18,6 +21,8 @@ const domains = {
|
||||
server: process.env.DOMAIN_SERVER,
|
||||
};
|
||||
|
||||
const JWT_SECRET = process.env.JWT_SECRET || process.env.OPENID_SESSION_SECRET;
|
||||
|
||||
router.use(logHeaders);
|
||||
router.use(loginLimiter);
|
||||
|
||||
@@ -28,7 +33,15 @@ const oauthHandler = async (req, res) => {
|
||||
if (req.banned) {
|
||||
return;
|
||||
}
|
||||
await setAuthTokens(req.user._id, res);
|
||||
if (
|
||||
req.user &&
|
||||
req.user.provider == 'openid' &&
|
||||
isEnabled(process.env.OPENID_REUSE_TOKENS) === true
|
||||
) {
|
||||
setOpenIDAuthTokens(req.user.tokenset, res);
|
||||
} else {
|
||||
await setAuthTokens(req.user._id, res);
|
||||
}
|
||||
res.redirect(domains.client);
|
||||
} catch (err) {
|
||||
logger.error('Error in setting authentication tokens:', err);
|
||||
@@ -94,20 +107,71 @@ router.get(
|
||||
/**
|
||||
* OpenID Routes
|
||||
*/
|
||||
router.get(
|
||||
'/openid',
|
||||
passport.authenticate('openid', {
|
||||
session: false,
|
||||
}),
|
||||
);
|
||||
router.get('/openid', (req, res, next) => {
|
||||
const state = client.randomState();
|
||||
|
||||
try {
|
||||
const stateToken = jwt.sign(
|
||||
{
|
||||
state: state,
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
JWT_SECRET,
|
||||
{ expiresIn: '10m' },
|
||||
);
|
||||
|
||||
res.cookie('oauth_state', stateToken, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
signed: false,
|
||||
maxAge: 10 * 60 * 1000,
|
||||
sameSite: 'lax',
|
||||
});
|
||||
passport.authenticate('openid', {
|
||||
session: false,
|
||||
state: state,
|
||||
})(req, res, next);
|
||||
} catch (error) {
|
||||
logger.error('Error creating state token for OpenID authentication', error);
|
||||
return res.redirect(`${domains.client}/oauth/error`);
|
||||
}
|
||||
});
|
||||
|
||||
router.get(
|
||||
'/openid/callback',
|
||||
passport.authenticate('openid', {
|
||||
failureRedirect: `${domains.client}/oauth/error`,
|
||||
failureMessage: true,
|
||||
session: false,
|
||||
}),
|
||||
(req, res, next) => {
|
||||
if (!req.query.state) {
|
||||
logger.error('Missing state parameter in OpenID callback');
|
||||
return res.redirect(`${domains.client}/oauth/error`);
|
||||
}
|
||||
|
||||
const stateToken = req.cookies.oauth_state;
|
||||
if (!stateToken) {
|
||||
logger.error('No state cookie found for OpenID callback');
|
||||
return res.redirect(`${domains.client}/oauth/error`);
|
||||
}
|
||||
|
||||
try {
|
||||
const decodedState = jwt.verify(stateToken, JWT_SECRET);
|
||||
if (req.query.state !== decodedState.state) {
|
||||
logger.error('Invalid state parameter in OpenID callback', {
|
||||
received: req.query.state,
|
||||
expected: decodedState.state,
|
||||
});
|
||||
return res.redirect(`${domains.client}/oauth/error`);
|
||||
}
|
||||
res.clearCookie('oauth_state');
|
||||
passport.authenticate('openid', {
|
||||
failureRedirect: `${domains.client}/oauth/error`,
|
||||
failureMessage: true,
|
||||
session: false,
|
||||
})(req, res, next);
|
||||
} catch (error) {
|
||||
logger.error('Invalid or expired state token in OpenID callback', error);
|
||||
res.clearCookie('oauth_state');
|
||||
return res.redirect(`${domains.client}/oauth/error`);
|
||||
}
|
||||
},
|
||||
setBalanceConfig,
|
||||
oauthHandler,
|
||||
);
|
||||
|
||||
@@ -25,6 +25,7 @@ jest.mock('./start/checks', () => ({
|
||||
checkHealth: jest.fn(),
|
||||
checkConfig: jest.fn(),
|
||||
checkAzureVariables: jest.fn(),
|
||||
checkWebSearchConfig: jest.fn(),
|
||||
}));
|
||||
|
||||
const AppService = require('./AppService');
|
||||
|
||||
@@ -1,11 +1,18 @@
|
||||
const {
|
||||
FileSources,
|
||||
EModelEndpoint,
|
||||
loadOCRConfig,
|
||||
processMCPEnv,
|
||||
EModelEndpoint,
|
||||
getConfigDefaults,
|
||||
loadWebSearchConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { checkVariables, checkHealth, checkConfig, checkAzureVariables } = require('./start/checks');
|
||||
const {
|
||||
checkHealth,
|
||||
checkConfig,
|
||||
checkVariables,
|
||||
checkAzureVariables,
|
||||
checkWebSearchConfig,
|
||||
} = require('./start/checks');
|
||||
const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants');
|
||||
const { initializeAzureBlobService } = require('./Files/Azure/initialize');
|
||||
const { initializeFirebase } = require('./Files/Firebase/initialize');
|
||||
@@ -35,6 +42,8 @@ const AppService = async (app) => {
|
||||
const configDefaults = getConfigDefaults();
|
||||
|
||||
const ocr = loadOCRConfig(config.ocr);
|
||||
const webSearch = loadWebSearchConfig(config.webSearch);
|
||||
checkWebSearchConfig(webSearch);
|
||||
const filteredTools = config.filteredTools;
|
||||
const includedTools = config.includedTools;
|
||||
const fileStrategy = config.fileStrategy ?? configDefaults.fileStrategy;
|
||||
@@ -79,6 +88,7 @@ const AppService = async (app) => {
|
||||
const defaultLocals = {
|
||||
ocr,
|
||||
paths,
|
||||
webSearch,
|
||||
fileStrategy,
|
||||
socialLogins,
|
||||
filteredTools,
|
||||
|
||||
@@ -141,6 +141,14 @@ describe('AppService', () => {
|
||||
balance: { enabled: true },
|
||||
filteredTools: undefined,
|
||||
includedTools: undefined,
|
||||
webSearch: {
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
firecrawlApiKey: '${FIRECRAWL_API_KEY}',
|
||||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
safeSearch: 1,
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
@@ -537,7 +545,7 @@ describe('AppService updating app.locals and issuing warnings', () => {
|
||||
const { logger } = require('~/config');
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'The \'assistants\' endpoint has both \'supportedIds\' and \'excludedIds\' defined.',
|
||||
"The 'assistants' endpoint has both 'supportedIds' and 'excludedIds' defined.",
|
||||
),
|
||||
);
|
||||
});
|
||||
@@ -559,7 +567,7 @@ describe('AppService updating app.locals and issuing warnings', () => {
|
||||
const { logger } = require('~/config');
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'The \'assistants\' endpoint has both \'privateAssistants\' and \'supportedIds\' or \'excludedIds\' defined.',
|
||||
"The 'assistants' endpoint has both 'privateAssistants' and 'supportedIds' or 'excludedIds' defined.",
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -377,13 +377,62 @@ const setAuthTokens = async (userId, res, sessionId = null) => {
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
|
||||
res.cookie('token_provider', 'librechat', {
|
||||
expires: new Date(refreshTokenExpires),
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
return token;
|
||||
} catch (error) {
|
||||
logger.error('[setAuthTokens] Error in setting authentication tokens:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
/**
|
||||
* @function setOpenIDAuthTokens
|
||||
* Set OpenID Authentication Tokens
|
||||
* //type tokenset from openid-client
|
||||
* @param {import('openid-client').TokenEndpointResponse & import('openid-client').TokenEndpointResponseHelpers} tokenset
|
||||
* - The tokenset object containing access and refresh tokens
|
||||
* @param {Object} res - response object
|
||||
* @returns {String} - access token
|
||||
*/
|
||||
const setOpenIDAuthTokens = (tokenset, res) => {
|
||||
try {
|
||||
if (!tokenset) {
|
||||
logger.error('[setOpenIDAuthTokens] No tokenset found in request');
|
||||
return;
|
||||
}
|
||||
const { REFRESH_TOKEN_EXPIRY } = process.env ?? {};
|
||||
const expiryInMilliseconds = eval(REFRESH_TOKEN_EXPIRY) ?? 1000 * 60 * 60 * 24 * 7; // 7 days default
|
||||
const expirationDate = new Date(Date.now() + expiryInMilliseconds);
|
||||
if (tokenset == null) {
|
||||
logger.error('[setOpenIDAuthTokens] No tokenset found in request');
|
||||
return;
|
||||
}
|
||||
if (!tokenset.access_token || !tokenset.refresh_token) {
|
||||
logger.error('[setOpenIDAuthTokens] No access or refresh token found in tokenset');
|
||||
return;
|
||||
}
|
||||
res.cookie('refreshToken', tokenset.refresh_token, {
|
||||
expires: expirationDate,
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
res.cookie('token_provider', 'openid', {
|
||||
expires: expirationDate,
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
return tokenset.access_token;
|
||||
} catch (error) {
|
||||
logger.error('[setOpenIDAuthTokens] Error in setting authentication tokens:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Resend Verification Email
|
||||
@@ -452,4 +501,5 @@ module.exports = {
|
||||
resetPassword,
|
||||
requestPasswordReset,
|
||||
resendVerificationEmail,
|
||||
setOpenIDAuthTokens,
|
||||
};
|
||||
|
||||
@@ -15,20 +15,14 @@ function checkPromptCacheSupport(modelName) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
modelMatch === 'claude-3-7-sonnet' ||
|
||||
modelMatch === 'claude-3-5-sonnet' ||
|
||||
modelMatch === 'claude-3-5-haiku' ||
|
||||
modelMatch === 'claude-3-haiku' ||
|
||||
modelMatch === 'claude-3-opus' ||
|
||||
modelMatch === 'claude-3.7-sonnet' ||
|
||||
modelMatch === 'claude-3.5-sonnet' ||
|
||||
modelMatch === 'claude-3.5-haiku'
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
return (
|
||||
/claude-3[-.]7/.test(modelMatch) ||
|
||||
/claude-3[-.]5-(?:sonnet|haiku)/.test(modelMatch) ||
|
||||
/claude-3-(?:sonnet|haiku|opus)?/.test(modelMatch) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(modelMatch) ||
|
||||
/claude-[4-9]-(?:sonnet|opus|haiku)?/.test(modelMatch) ||
|
||||
/claude-4(?:-(?:sonnet|opus|haiku))?/.test(modelMatch)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -51,6 +45,14 @@ function getClaudeHeaders(model, supportsCacheControl) {
|
||||
'anthropic-beta':
|
||||
'token-efficient-tools-2025-02-19,output-128k-2025-02-19,prompt-caching-2024-07-31',
|
||||
};
|
||||
} else if (
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(model) ||
|
||||
/claude-[4-9]-(?:sonnet|opus|haiku)?/.test(model) ||
|
||||
/claude-4(?:-(?:sonnet|opus|haiku))?/.test(model)
|
||||
) {
|
||||
return {
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
@@ -72,7 +74,8 @@ function configureReasoning(anthropicInput, extendedOptions = {}) {
|
||||
if (
|
||||
extendedOptions.thinking &&
|
||||
updatedOptions?.model &&
|
||||
/claude-3[-.]7/.test(updatedOptions.model)
|
||||
(/claude-3[-.]7/.test(updatedOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(updatedOptions.model))
|
||||
) {
|
||||
updatedOptions.thinking = {
|
||||
type: 'enabled',
|
||||
|
||||
@@ -25,10 +25,10 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => {
|
||||
let credentials = isUserProvided
|
||||
? await getUserKey({ userId: req.user.id, name: EModelEndpoint.bedrock })
|
||||
: {
|
||||
accessKeyId: BEDROCK_AWS_ACCESS_KEY_ID,
|
||||
secretAccessKey: BEDROCK_AWS_SECRET_ACCESS_KEY,
|
||||
...(BEDROCK_AWS_SESSION_TOKEN && { sessionToken: BEDROCK_AWS_SESSION_TOKEN }),
|
||||
};
|
||||
accessKeyId: BEDROCK_AWS_ACCESS_KEY_ID,
|
||||
secretAccessKey: BEDROCK_AWS_SECRET_ACCESS_KEY,
|
||||
...(BEDROCK_AWS_SESSION_TOKEN && { sessionToken: BEDROCK_AWS_SESSION_TOKEN }),
|
||||
};
|
||||
|
||||
if (!credentials) {
|
||||
throw new Error('Bedrock credentials not provided. Please provide them again.');
|
||||
|
||||
@@ -2,7 +2,12 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const FormData = require('form-data');
|
||||
const { FileSources, envVarRegex, extractEnvVariable } = require('librechat-data-provider');
|
||||
const {
|
||||
FileSources,
|
||||
envVarRegex,
|
||||
extractEnvVariable,
|
||||
extractVariableName,
|
||||
} = require('librechat-data-provider');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { logger, createAxiosInstance } = require('~/config');
|
||||
const { logAxiosError } = require('~/utils/axios');
|
||||
@@ -108,11 +113,6 @@ async function performOCR({
|
||||
});
|
||||
}
|
||||
|
||||
function extractVariableName(str) {
|
||||
const match = str.match(envVarRegex);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a file to the Mistral OCR API and processes the OCR result.
|
||||
*
|
||||
|
||||
@@ -66,16 +66,26 @@ const getUserPluginAuthValue = async (userId, authField, throwError = true) => {
|
||||
// }
|
||||
// };
|
||||
|
||||
/**
|
||||
*
|
||||
* @async
|
||||
* @param {string} userId
|
||||
* @param {string} authField
|
||||
* @param {string} pluginKey
|
||||
* @param {string} value
|
||||
* @returns {Promise<IPluginAuth>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const updateUserPluginAuth = async (userId, authField, pluginKey, value) => {
|
||||
try {
|
||||
const encryptedValue = await encrypt(value);
|
||||
const pluginAuth = await PluginAuth.findOne({ userId, authField }).lean();
|
||||
if (pluginAuth) {
|
||||
const pluginAuth = await PluginAuth.updateOne(
|
||||
return await PluginAuth.findOneAndUpdate(
|
||||
{ userId, authField },
|
||||
{ $set: { value: encryptedValue } },
|
||||
);
|
||||
return pluginAuth;
|
||||
{ new: true, upsert: true },
|
||||
).lean();
|
||||
} else {
|
||||
const newPluginAuth = await new PluginAuth({
|
||||
userId,
|
||||
@@ -84,7 +94,7 @@ const updateUserPluginAuth = async (userId, authField, pluginKey, value) => {
|
||||
pluginKey,
|
||||
});
|
||||
await newPluginAuth.save();
|
||||
return newPluginAuth;
|
||||
return newPluginAuth.toObject();
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('[updateUserPluginAuth]', err);
|
||||
@@ -92,6 +102,14 @@ const updateUserPluginAuth = async (userId, authField, pluginKey, value) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @async
|
||||
* @param {string} userId
|
||||
* @param {string} authField
|
||||
* @param {boolean} [all]
|
||||
* @returns {Promise<import('mongoose').DeleteResult>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const deleteUserPluginAuth = async (userId, authField, all = false) => {
|
||||
if (all) {
|
||||
try {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { zodToJsonSchema } = require('zod-to-json-schema');
|
||||
const { tool: toolFn, Tool, DynamicStructuredTool } = require('@langchain/core/tools');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
const { tool: toolFn, Tool, DynamicStructuredTool } = require('@langchain/core/tools');
|
||||
const {
|
||||
Tools,
|
||||
ErrorTypes,
|
||||
@@ -29,6 +29,7 @@ const {
|
||||
toolkits,
|
||||
} = require('~/app/clients/tools');
|
||||
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
|
||||
const { createOnSearchResults } = require('~/server/services/Tools/search');
|
||||
const { isActionDomainAllowed } = require('~/server/services/domains');
|
||||
const { getEndpointsConfig } = require('~/server/services/Config');
|
||||
const { recordUsage } = require('~/server/services/Threads');
|
||||
@@ -504,11 +505,15 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
||||
const checkCapability = (capability) => enabledCapabilities.has(capability);
|
||||
const areToolsEnabled = checkCapability(AgentCapabilities.tools);
|
||||
|
||||
let includesWebSearch = false;
|
||||
const _agentTools = agent.tools?.filter((tool) => {
|
||||
if (tool === Tools.file_search) {
|
||||
return checkCapability(AgentCapabilities.file_search);
|
||||
} else if (tool === Tools.execute_code) {
|
||||
return checkCapability(AgentCapabilities.execute_code);
|
||||
} else if (tool === Tools.web_search) {
|
||||
includesWebSearch = checkCapability(AgentCapabilities.web_search);
|
||||
return includesWebSearch;
|
||||
} else if (!areToolsEnabled && !tool.includes(actionDelimiter)) {
|
||||
return false;
|
||||
}
|
||||
@@ -518,7 +523,11 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
||||
if (!_agentTools || _agentTools.length === 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
/** @type {ReturnType<createOnSearchResults>} */
|
||||
let webSearchCallbacks;
|
||||
if (includesWebSearch) {
|
||||
webSearchCallbacks = createOnSearchResults(res);
|
||||
}
|
||||
const { loadedTools, toolContextMap } = await loadTools({
|
||||
agent,
|
||||
functions: true,
|
||||
@@ -532,6 +541,7 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
||||
uploadImageBuffer,
|
||||
returnMetadata: true,
|
||||
fileStrategy: req.app.locals.fileStrategy,
|
||||
[Tools.web_search]: webSearchCallbacks,
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
122
api/server/services/Tools/search.js
Normal file
122
api/server/services/Tools/search.js
Normal file
@@ -0,0 +1,122 @@
|
||||
const { nanoid } = require('nanoid');
|
||||
const { Tools } = require('librechat-data-provider');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Creates a function to handle search results and stream them as attachments
|
||||
* @param {import('http').ServerResponse} res - The HTTP server response object
|
||||
* @returns {{ onSearchResults: function(SearchResult, GraphRunnableConfig): void; onGetHighlights: function(string): void}} - Function that takes search results and returns or streams an attachment
|
||||
*/
|
||||
function createOnSearchResults(res) {
|
||||
const context = {
|
||||
sourceMap: new Map(),
|
||||
searchResultData: undefined,
|
||||
toolCallId: undefined,
|
||||
attachmentName: undefined,
|
||||
messageId: undefined,
|
||||
conversationId: undefined,
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {SearchResult} results
|
||||
* @param {GraphRunnableConfig} runnableConfig
|
||||
*/
|
||||
function onSearchResults(results, runnableConfig) {
|
||||
logger.info(
|
||||
`[onSearchResults] user: ${runnableConfig.metadata.user_id} | thread_id: ${runnableConfig.metadata.thread_id} | run_id: ${runnableConfig.metadata.run_id}`,
|
||||
results,
|
||||
);
|
||||
|
||||
if (!results.success) {
|
||||
logger.error(
|
||||
`[onSearchResults] user: ${runnableConfig.metadata.user_id} | thread_id: ${runnableConfig.metadata.thread_id} | run_id: ${runnableConfig.metadata.run_id} | error: ${results.error}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const turn = runnableConfig.toolCall?.turn ?? 0;
|
||||
const data = { turn, ...structuredClone(results.data ?? {}) };
|
||||
context.searchResultData = data;
|
||||
|
||||
// Map sources to links
|
||||
for (let i = 0; i < data.organic.length; i++) {
|
||||
const source = data.organic[i];
|
||||
if (source.link) {
|
||||
context.sourceMap.set(source.link, {
|
||||
type: 'organic',
|
||||
index: i,
|
||||
turn,
|
||||
});
|
||||
}
|
||||
}
|
||||
for (let i = 0; i < data.topStories.length; i++) {
|
||||
const source = data.topStories[i];
|
||||
if (source.link) {
|
||||
context.sourceMap.set(source.link, {
|
||||
type: 'topStories',
|
||||
index: i,
|
||||
turn,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
context.toolCallId = runnableConfig.toolCall.id;
|
||||
context.messageId = runnableConfig.metadata.run_id;
|
||||
context.conversationId = runnableConfig.metadata.thread_id;
|
||||
context.attachmentName = `${runnableConfig.toolCall.name}_${context.toolCallId}_${nanoid()}`;
|
||||
|
||||
const attachment = buildAttachment(context);
|
||||
|
||||
if (!res.headersSent) {
|
||||
return attachment;
|
||||
}
|
||||
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} link
|
||||
* @returns {void}
|
||||
*/
|
||||
function onGetHighlights(link) {
|
||||
const source = context.sourceMap.get(link);
|
||||
if (!source) {
|
||||
return;
|
||||
}
|
||||
const { type, index } = source;
|
||||
const data = context.searchResultData;
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
if (data[type][index] != null) {
|
||||
data[type][index].processed = true;
|
||||
}
|
||||
|
||||
const attachment = buildAttachment(context);
|
||||
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||
}
|
||||
|
||||
return {
|
||||
onSearchResults,
|
||||
onGetHighlights,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to build an attachment object
|
||||
* @param {object} context - The context containing attachment data
|
||||
* @returns {object} - The attachment object
|
||||
*/
|
||||
function buildAttachment(context) {
|
||||
return {
|
||||
messageId: context.messageId,
|
||||
toolCallId: context.toolCallId,
|
||||
conversationId: context.conversationId,
|
||||
name: context.attachmentName,
|
||||
type: Tools.web_search,
|
||||
[Tools.web_search]: context.searchResultData,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createOnSearchResults,
|
||||
};
|
||||
@@ -1,7 +1,9 @@
|
||||
const {
|
||||
Constants,
|
||||
webSearchKeys,
|
||||
deprecatedAzureVariables,
|
||||
conflictingAzureVariables,
|
||||
extractVariableName,
|
||||
} = require('librechat-data-provider');
|
||||
const { isEnabled, checkEmailConfig } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
@@ -141,4 +143,56 @@ function checkPasswordReset() {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { checkVariables, checkHealth, checkConfig, checkAzureVariables };
|
||||
/**
|
||||
* Checks web search configuration values to ensure they are environment variable references.
|
||||
* Warns if actual API keys or URLs are used instead of environment variable references.
|
||||
* Logs debug information for properly configured environment variable references.
|
||||
* @param {Object} webSearchConfig - The loaded web search configuration object.
|
||||
*/
|
||||
function checkWebSearchConfig(webSearchConfig) {
|
||||
if (!webSearchConfig) {
|
||||
return;
|
||||
}
|
||||
|
||||
webSearchKeys.forEach((key) => {
|
||||
const value = webSearchConfig[key];
|
||||
|
||||
if (typeof value === 'string') {
|
||||
const varName = extractVariableName(value);
|
||||
|
||||
if (varName) {
|
||||
// This is a proper environment variable reference
|
||||
const actualValue = process.env[varName];
|
||||
if (actualValue) {
|
||||
logger.debug(`Web search ${key}: Using environment variable ${varName} with value set`);
|
||||
} else {
|
||||
logger.debug(
|
||||
`Web search ${key}: Using environment variable ${varName} (not set in environment, user provided value)`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// This is not an environment variable reference - warn user
|
||||
logger.warn(
|
||||
`❗ Web search configuration error: ${key} contains an actual value instead of an environment variable reference.
|
||||
|
||||
Current value: "${value.substring(0, 10)}..."
|
||||
|
||||
This is incorrect! You should use environment variable references in your librechat.yaml file, such as:
|
||||
${key}: "\${YOUR_ENV_VAR_NAME}"
|
||||
|
||||
Then set the actual API key in your .env file or environment variables.
|
||||
|
||||
More info: https://www.librechat.ai/docs/configuration/librechat_yaml/web_search`,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkHealth,
|
||||
checkConfig,
|
||||
checkVariables,
|
||||
checkAzureVariables,
|
||||
checkWebSearchConfig,
|
||||
};
|
||||
|
||||
203
api/server/services/start/checks.spec.js
Normal file
203
api/server/services/start/checks.spec.js
Normal file
@@ -0,0 +1,203 @@
|
||||
// Mock librechat-data-provider
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
...jest.requireActual('librechat-data-provider'),
|
||||
extractVariableName: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock the config logger
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
debug: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const { checkWebSearchConfig } = require('./checks');
|
||||
const { logger } = require('~/config');
|
||||
const { extractVariableName } = require('librechat-data-provider');
|
||||
|
||||
describe('checkWebSearchConfig', () => {
|
||||
let originalEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Clear all mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Store original environment
|
||||
originalEnv = process.env;
|
||||
|
||||
// Reset process.env
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
describe('when webSearchConfig is undefined or null', () => {
|
||||
it('should return early without logging when config is undefined', () => {
|
||||
checkWebSearchConfig(undefined);
|
||||
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return early without logging when config is null', () => {
|
||||
checkWebSearchConfig(null);
|
||||
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config values are proper environment variable references', () => {
|
||||
it('should log debug message for each valid environment variable with value set', () => {
|
||||
const config = {
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValueOnce('SERPER_API_KEY').mockReturnValueOnce('JINA_API_KEY');
|
||||
|
||||
process.env.SERPER_API_KEY = 'test-serper-key';
|
||||
process.env.JINA_API_KEY = 'test-jina-key';
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).toHaveBeenCalledWith('${SERPER_API_KEY}');
|
||||
expect(extractVariableName).toHaveBeenCalledWith('${JINA_API_KEY}');
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search serperApiKey: Using environment variable SERPER_API_KEY with value set',
|
||||
);
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search jinaApiKey: Using environment variable JINA_API_KEY with value set',
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log debug message for environment variables not set in environment', () => {
|
||||
const config = {
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue('COHERE_API_KEY');
|
||||
|
||||
delete process.env.COHERE_API_KEY;
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search cohereApiKey: Using environment variable COHERE_API_KEY (not set in environment, user provided value)',
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config values are actual values instead of environment variable references', () => {
|
||||
it('should warn when serperApiKey contains actual API key', () => {
|
||||
const config = {
|
||||
serperApiKey: 'sk-1234567890abcdef',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'❗ Web search configuration error: serperApiKey contains an actual value',
|
||||
),
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "sk-1234567..."'),
|
||||
);
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should warn when firecrawlApiUrl contains actual URL', () => {
|
||||
const config = {
|
||||
firecrawlApiUrl: 'https://api.firecrawl.dev',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'❗ Web search configuration error: firecrawlApiUrl contains an actual value',
|
||||
),
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "https://ap..."'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should include documentation link in warning message', () => {
|
||||
const config = {
|
||||
firecrawlApiKey: 'fc-actual-key',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'More info: https://www.librechat.ai/docs/configuration/librechat_yaml/web_search',
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config contains mixed value types', () => {
|
||||
it('should only process string values and ignore non-string values', () => {
|
||||
const config = {
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
safeSearch: 1,
|
||||
scraperTimeout: 7500,
|
||||
jinaApiKey: 'actual-key',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValueOnce('SERPER_API_KEY').mockReturnValueOnce(null);
|
||||
|
||||
process.env.SERPER_API_KEY = 'test-key';
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).toHaveBeenCalledTimes(2);
|
||||
expect(logger.debug).toHaveBeenCalledTimes(1);
|
||||
expect(logger.warn).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle config with no web search keys', () => {
|
||||
const config = {
|
||||
someOtherKey: 'value',
|
||||
anotherKey: '${SOME_VAR}',
|
||||
};
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).not.toHaveBeenCalled();
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should truncate long values in warning messages', () => {
|
||||
const config = {
|
||||
serperApiKey: 'this-is-a-very-long-api-key-that-should-be-truncated-in-the-warning-message',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "this-is-a-..."'),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -38,6 +38,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
||||
agents: interfaceConfig?.agents ?? defaults.agents,
|
||||
temporaryChat: interfaceConfig?.temporaryChat ?? defaults.temporaryChat,
|
||||
runCode: interfaceConfig?.runCode ?? defaults.runCode,
|
||||
webSearch: interfaceConfig?.webSearch ?? defaults.webSearch,
|
||||
customWelcome: interfaceConfig?.customWelcome ?? defaults.customWelcome,
|
||||
});
|
||||
|
||||
@@ -48,6 +49,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: loadedInterface.webSearch },
|
||||
});
|
||||
await updateAccessPermissions(SystemRoles.ADMIN, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
|
||||
@@ -56,6 +58,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: loadedInterface.webSearch },
|
||||
});
|
||||
|
||||
let i = 0;
|
||||
@@ -74,7 +77,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
||||
// warn about config.modelSpecs.prioritize if true and presets are enabled, that default presets will conflict with prioritizing model specs.
|
||||
if (config?.modelSpecs?.prioritize && loadedInterface.presets) {
|
||||
logger.warn(
|
||||
'Note: Prioritizing model specs can conflict with default presets if a default preset is set. It\'s recommended to disable presets from the interface or disable use of a default preset.',
|
||||
"Note: Prioritizing model specs can conflict with default presets if a default preset is set. It's recommended to disable presets from the interface or disable use of a default preset.",
|
||||
);
|
||||
i === 0 && i++;
|
||||
}
|
||||
@@ -88,14 +91,14 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
||||
loadedInterface.parameters)
|
||||
) {
|
||||
logger.warn(
|
||||
'Note: Enforcing model specs can conflict with the interface options: endpointsMenu, modelSelect, presets, and parameters. It\'s recommended to disable these options from the interface or disable enforcing model specs.',
|
||||
"Note: Enforcing model specs can conflict with the interface options: endpointsMenu, modelSelect, presets, and parameters. It's recommended to disable these options from the interface or disable enforcing model specs.",
|
||||
);
|
||||
i === 0 && i++;
|
||||
}
|
||||
// warn if enforce is true and prioritize is not, that enforcing model specs without prioritizing them can lead to unexpected behavior.
|
||||
if (config?.modelSpecs?.enforce && !config?.modelSpecs?.prioritize) {
|
||||
logger.warn(
|
||||
'Note: Enforcing model specs without prioritizing them can lead to unexpected behavior. It\'s recommended to enable prioritizing model specs if enforcing them.',
|
||||
"Note: Enforcing model specs without prioritizing them can lead to unexpected behavior. It's recommended to enable prioritizing model specs if enforcing them.",
|
||||
);
|
||||
i === 0 && i++;
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ describe('loadDefaultInterface', () => {
|
||||
agents: true,
|
||||
temporaryChat: true,
|
||||
runCode: true,
|
||||
webSearch: true,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
@@ -29,6 +30,7 @@ describe('loadDefaultInterface', () => {
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -41,6 +43,7 @@ describe('loadDefaultInterface', () => {
|
||||
agents: false,
|
||||
temporaryChat: false,
|
||||
runCode: false,
|
||||
webSearch: false,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
@@ -54,6 +57,7 @@ describe('loadDefaultInterface', () => {
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: false },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -70,6 +74,7 @@ describe('loadDefaultInterface', () => {
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -82,6 +87,7 @@ describe('loadDefaultInterface', () => {
|
||||
agents: undefined,
|
||||
temporaryChat: undefined,
|
||||
runCode: undefined,
|
||||
webSearch: undefined,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
@@ -95,6 +101,7 @@ describe('loadDefaultInterface', () => {
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -107,6 +114,7 @@ describe('loadDefaultInterface', () => {
|
||||
agents: true,
|
||||
temporaryChat: undefined,
|
||||
runCode: false,
|
||||
webSearch: true,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
@@ -120,6 +128,7 @@ describe('loadDefaultInterface', () => {
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -133,6 +142,7 @@ describe('loadDefaultInterface', () => {
|
||||
agents: true,
|
||||
temporaryChat: true,
|
||||
runCode: true,
|
||||
webSearch: true,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -145,6 +155,7 @@ describe('loadDefaultInterface', () => {
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -161,6 +172,7 @@ describe('loadDefaultInterface', () => {
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -177,6 +189,7 @@ describe('loadDefaultInterface', () => {
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -193,6 +206,7 @@ describe('loadDefaultInterface', () => {
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -218,6 +232,7 @@ describe('loadDefaultInterface', () => {
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -231,6 +246,7 @@ describe('loadDefaultInterface', () => {
|
||||
agents: undefined,
|
||||
temporaryChat: undefined,
|
||||
runCode: undefined,
|
||||
webSearch: undefined,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -243,6 +259,33 @@ describe('loadDefaultInterface', () => {
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call updateAccessPermissions with the correct parameters when WEB_SEARCH is undefined', async () => {
|
||||
const config = {
|
||||
interface: {
|
||||
prompts: true,
|
||||
bookmarks: false,
|
||||
multiConvo: true,
|
||||
agents: false,
|
||||
temporaryChat: true,
|
||||
runCode: false,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
|
||||
await loadDefaultInterface(config, configDefaults);
|
||||
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -10,6 +10,7 @@ const {
|
||||
discordLogin,
|
||||
facebookLogin,
|
||||
appleLogin,
|
||||
openIdJwtLogin,
|
||||
} = require('~/strategies');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const keyvRedis = require('~/cache/keyvRedis');
|
||||
@@ -19,7 +20,7 @@ const { logger } = require('~/config');
|
||||
*
|
||||
* @param {Express.Application} app
|
||||
*/
|
||||
const configureSocialLogins = (app) => {
|
||||
const configureSocialLogins = async (app) => {
|
||||
logger.info('Configuring social logins...');
|
||||
|
||||
if (process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET) {
|
||||
@@ -62,8 +63,11 @@ const configureSocialLogins = (app) => {
|
||||
}
|
||||
app.use(session(sessionOptions));
|
||||
app.use(passport.session());
|
||||
setupOpenId();
|
||||
|
||||
const config = await setupOpenId();
|
||||
if (isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
logger.info('OpenID token reuse is enabled.');
|
||||
passport.use('openidJwt', openIdJwtLogin(config));
|
||||
}
|
||||
logger.info('OpenID Connect configured.');
|
||||
}
|
||||
};
|
||||
|
||||
@@ -200,11 +200,12 @@ function generateConfig(key, baseURL, endpoint) {
|
||||
config.capabilities = [
|
||||
AgentCapabilities.execute_code,
|
||||
AgentCapabilities.file_search,
|
||||
AgentCapabilities.web_search,
|
||||
AgentCapabilities.artifacts,
|
||||
AgentCapabilities.actions,
|
||||
AgentCapabilities.tools,
|
||||
AgentCapabilities.ocr,
|
||||
AgentCapabilities.chain,
|
||||
AgentCapabilities.ocr,
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
const fs = require('fs').promises;
|
||||
const { getImporter } = require('./importers');
|
||||
const { indexSync } = require('~/lib/db');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
@@ -15,8 +14,6 @@ const importConversations = async (job) => {
|
||||
const jsonData = JSON.parse(fileData);
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId);
|
||||
// Sync Meilisearch index
|
||||
await indexSync();
|
||||
logger.debug(`user: ${requestUserId} | Finished importing conversations`);
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Failed to import conversation: `, error);
|
||||
|
||||
@@ -84,14 +84,14 @@ describe('importChatGptConvo', () => {
|
||||
const { parent } = jsonData[0].mapping[id];
|
||||
|
||||
const expectedParentId = parent
|
||||
? idToUUIDMap.get(parent) ?? Constants.NO_PARENT
|
||||
? (idToUUIDMap.get(parent) ?? Constants.NO_PARENT)
|
||||
: Constants.NO_PARENT;
|
||||
|
||||
const actualMessageId = idToUUIDMap.get(id);
|
||||
const actualParentId = actualMessageId
|
||||
? importBatchBuilder.saveMessage.mock.calls.find(
|
||||
(call) => call[0].messageId === actualMessageId,
|
||||
)[0].parentMessageId
|
||||
(call) => call[0].messageId === actualMessageId,
|
||||
)[0].parentMessageId
|
||||
: Constants.NO_PARENT;
|
||||
|
||||
expect(actualParentId).toBe(expectedParentId);
|
||||
@@ -544,7 +544,7 @@ describe('processAssistantMessage', () => {
|
||||
|
||||
// Expected output should have all citations replaced with markdown links
|
||||
const expectedOutput =
|
||||
'Signal Sciences is a web application security company that was founded on March 10, 2014, by Andrew Peterson, Nick Galbreath, and Zane Lackey. It operates as a for-profit company with its legal name being Signal Sciences Corp. The company has achieved significant growth and is recognized as the fastest-growing web application security company in the world. Signal Sciences developed a next-gen web application firewall (NGWAF) and runtime application self-protection (RASP) technologies designed to increase security and maintain reliability without compromising the performance of modern web applications distributed across cloud, on-premise, edge, or hybrid environments ([Signal Sciences - Crunchbase Company Profile & Funding](https://www.crunchbase.com/organization/signal-sciences)) ([Demand More from Your WAF - Signal Sciences now part of Fastly](https://www.signalsciences.com/)).\n\nIn a major development, Fastly, Inc., a provider of an edge cloud platform, announced the completion of its acquisition of Signal Sciences on October 1, 2020. This acquisition was valued at approximately $775 million in cash and stock. By integrating Signal Sciences\' powerful web application and API security solutions with Fastly\'s edge cloud platform and existing security offerings, they aimed to form a unified suite of security solutions. The merger was aimed at expanding Fastly\'s security portfolio, particularly at a time when digital security has become paramount for businesses operating online ([Fastly Completes Acquisition of Signal Sciences | Fastly](https://www.fastly.com/press/press-releases/fastly-completes-acquisition-signal-sciences)) ([Fastly Agrees to Acquire Signal Sciences for $775 Million - Cooley](https://www.cooley.com/news/coverage/2020/2020-08-27-fastly-agrees-to-acquire-signal-sciences-for-775-million)).';
|
||||
"Signal Sciences is a web application security company that was founded on March 10, 2014, by Andrew Peterson, Nick Galbreath, and Zane Lackey. It operates as a for-profit company with its legal name being Signal Sciences Corp. The company has achieved significant growth and is recognized as the fastest-growing web application security company in the world. Signal Sciences developed a next-gen web application firewall (NGWAF) and runtime application self-protection (RASP) technologies designed to increase security and maintain reliability without compromising the performance of modern web applications distributed across cloud, on-premise, edge, or hybrid environments ([Signal Sciences - Crunchbase Company Profile & Funding](https://www.crunchbase.com/organization/signal-sciences)) ([Demand More from Your WAF - Signal Sciences now part of Fastly](https://www.signalsciences.com/)).\n\nIn a major development, Fastly, Inc., a provider of an edge cloud platform, announced the completion of its acquisition of Signal Sciences on October 1, 2020. This acquisition was valued at approximately $775 million in cash and stock. By integrating Signal Sciences' powerful web application and API security solutions with Fastly's edge cloud platform and existing security offerings, they aimed to form a unified suite of security solutions. The merger was aimed at expanding Fastly's security portfolio, particularly at a time when digital security has become paramount for businesses operating online ([Fastly Completes Acquisition of Signal Sciences | Fastly](https://www.fastly.com/press/press-releases/fastly-completes-acquisition-signal-sciences)) ([Fastly Agrees to Acquire Signal Sciences for $775 Million - Cooley](https://www.cooley.com/news/coverage/2020/2020-08-27-fastly-agrees-to-acquire-signal-sciences-for-775-million)).";
|
||||
|
||||
const result = processAssistantMessage(assistantMessage, messageText);
|
||||
expect(result).toBe(expectedOutput);
|
||||
@@ -603,7 +603,7 @@ describe('processAssistantMessage', () => {
|
||||
// In a ReDoS vulnerability, time would roughly double with each size increase
|
||||
for (let i = 1; i < results.length; i++) {
|
||||
const ratio = results[i] / results[i - 1];
|
||||
expect(ratio).toBeLessThan(2); // Processing time should not double
|
||||
expect(ratio).toBeLessThan(3); // Allow for CI environment variability while still catching ReDoS
|
||||
console.log(`Size ${sizes[i]} processing time ratio: ${ratio}`);
|
||||
}
|
||||
|
||||
|
||||
@@ -4,9 +4,10 @@ const googleLogin = require('./googleStrategy');
|
||||
const githubLogin = require('./githubStrategy');
|
||||
const discordLogin = require('./discordStrategy');
|
||||
const facebookLogin = require('./facebookStrategy');
|
||||
const setupOpenId = require('./openidStrategy');
|
||||
const { setupOpenId, getOpenIdConfig } = require('./openidStrategy');
|
||||
const jwtLogin = require('./jwtStrategy');
|
||||
const ldapLogin = require('./ldapStrategy');
|
||||
const openIdJwtLogin = require('./openIdJwtStrategy');
|
||||
|
||||
module.exports = {
|
||||
appleLogin,
|
||||
@@ -17,5 +18,7 @@ module.exports = {
|
||||
jwtLogin,
|
||||
facebookLogin,
|
||||
setupOpenId,
|
||||
getOpenIdConfig,
|
||||
ldapLogin,
|
||||
};
|
||||
openIdJwtLogin,
|
||||
};
|
||||
|
||||
@@ -4,7 +4,7 @@ const { getUserById, updateUser } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
// JWT strategy
|
||||
const jwtLogin = async () =>
|
||||
const jwtLogin = () =>
|
||||
new JwtStrategy(
|
||||
{
|
||||
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
|
||||
|
||||
52
api/strategies/openIdJwtStrategy.js
Normal file
52
api/strategies/openIdJwtStrategy.js
Normal file
@@ -0,0 +1,52 @@
|
||||
const { SystemRoles } = require('librechat-data-provider');
|
||||
const { Strategy: JwtStrategy, ExtractJwt } = require('passport-jwt');
|
||||
const { updateUser, findUser } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
const jwksRsa = require('jwks-rsa');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
/**
|
||||
* @function openIdJwtLogin
|
||||
* @param {import('openid-client').Configuration} openIdConfig - Configuration object for the JWT strategy.
|
||||
* @returns {JwtStrategy}
|
||||
* @description This function creates a JWT strategy for OpenID authentication.
|
||||
* It uses the jwks-rsa library to retrieve the signing key from a JWKS endpoint.
|
||||
* The strategy extracts the JWT from the Authorization header as a Bearer token.
|
||||
* The JWT is then verified using the signing key, and the user is retrieved from the database.
|
||||
*/
|
||||
const openIdJwtLogin = (openIdConfig) =>
|
||||
new JwtStrategy(
|
||||
{
|
||||
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
|
||||
secretOrKeyProvider: jwksRsa.passportJwtSecret({
|
||||
cache: isEnabled(process.env.OPENID_JWKS_URL_CACHE_ENABLED) || true,
|
||||
cacheMaxAge: process.env.OPENID_JWKS_URL_CACHE_TIME
|
||||
? eval(process.env.OPENID_JWKS_URL_CACHE_TIME)
|
||||
: 60000,
|
||||
jwksUri: openIdConfig.serverMetadata().jwks_uri,
|
||||
}),
|
||||
},
|
||||
async (payload, done) => {
|
||||
try {
|
||||
const user = await findUser({ openidId: payload?.sub });
|
||||
|
||||
if (user) {
|
||||
user.id = user._id.toString();
|
||||
if (!user.role) {
|
||||
user.role = SystemRoles.USER;
|
||||
await updateUser(user.id, { role: user.role });
|
||||
}
|
||||
done(null, user);
|
||||
} else {
|
||||
logger.warn(
|
||||
'[openIdJwtLogin] openId JwtStrategy => no user found with the sub claims: ' +
|
||||
payload?.sub,
|
||||
);
|
||||
done(null, false);
|
||||
}
|
||||
} catch (err) {
|
||||
done(err, false);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = openIdJwtLogin;
|
||||
@@ -1,28 +1,112 @@
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const fetch = require('node-fetch');
|
||||
const passport = require('passport');
|
||||
const jwtDecode = require('jsonwebtoken/decode');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { Issuer, Strategy: OpenIDStrategy, custom } = require('openid-client');
|
||||
const client = require('openid-client');
|
||||
const { Strategy: OpenIDStrategy } = require('openid-client/passport');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { findUser, createUser, updateUser } = require('~/models/userMethods');
|
||||
const { hashToken } = require('~/server/utils/crypto');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
let crypto;
|
||||
try {
|
||||
crypto = require('node:crypto');
|
||||
} catch (err) {
|
||||
logger.error('[openidStrategy] crypto support is disabled!', err);
|
||||
/**
|
||||
* @typedef {import('openid-client').ClientMetadata} ClientMetadata
|
||||
* @typedef {import('openid-client').Configuration} Configuration
|
||||
**/
|
||||
|
||||
/** @typedef {Configuration | null} */
|
||||
let openidConfig = null;
|
||||
|
||||
//overload currenturl function because of express version 4 buggy req.host doesn't include port
|
||||
//More info https://github.com/panva/openid-client/pull/713
|
||||
|
||||
class CustomOpenIDStrategy extends OpenIDStrategy {
|
||||
currentUrl(req) {
|
||||
const hostAndProtocol = process.env.DOMAIN_SERVER;
|
||||
return new URL(`${hostAndProtocol}${req.originalUrl ?? req.url}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Override to ensure proper authorization request parameters
|
||||
*/
|
||||
authorizationRequestParams(req, options) {
|
||||
const params = super.authorizationRequestParams?.(req, options) || {};
|
||||
if (options?.state != null && options.state && !params.has('state')) {
|
||||
params.set('state', options.state);
|
||||
}
|
||||
return params;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exchange the access token for a new access token using the on-behalf-of flow if required.
|
||||
* @param {Configuration} config
|
||||
* @param {string} accessToken access token to be exchanged if necessary
|
||||
* @param {string} sub - The subject identifier of the user. usually found as "sub" in the claims of the token
|
||||
* @param {boolean} fromCache - Indicates whether to use cached tokens.
|
||||
* @returns {Promise<string>} The new access token if exchanged, otherwise the original access token.
|
||||
*/
|
||||
const exchangeAccessTokenIfNeeded = async (config, accessToken, sub, fromCache = false) => {
|
||||
const tokensCache = getLogStores(CacheKeys.OPENID_EXCHANGED_TOKENS);
|
||||
const onBehalfFlowRequired = isEnabled(process.env.OPENID_ON_BEHALF_FLOW_FOR_USERINFRO_REQUIRED);
|
||||
if (onBehalfFlowRequired) {
|
||||
if (fromCache) {
|
||||
const cachedToken = await tokensCache.get(sub);
|
||||
if (cachedToken) {
|
||||
return cachedToken.access_token;
|
||||
}
|
||||
}
|
||||
const grantResponse = await client.genericGrantRequest(
|
||||
config,
|
||||
'urn:ietf:params:oauth:grant-type:jwt-bearer',
|
||||
{
|
||||
scope: process.env.OPENID_ON_BEHALF_FLOW_USERINFRO_SCOPE || 'user.read',
|
||||
assertion: accessToken,
|
||||
requested_token_use: 'on_behalf_of',
|
||||
},
|
||||
);
|
||||
await tokensCache.set(
|
||||
sub,
|
||||
{
|
||||
access_token: grantResponse.access_token,
|
||||
},
|
||||
grantResponse.expires_in * 1000,
|
||||
);
|
||||
return grantResponse.access_token;
|
||||
}
|
||||
return accessToken;
|
||||
};
|
||||
|
||||
/**
|
||||
* get user info from openid provider
|
||||
* @param {Configuration} config
|
||||
* @param {string} accessToken access token
|
||||
* @param {string} sub - The subject identifier of the user. usually found as "sub" in the claims of the token
|
||||
* @returns {Promise<Object|null>}
|
||||
*/
|
||||
const getUserInfo = async (config, accessToken, sub) => {
|
||||
try {
|
||||
const exchangedAccessToken = await exchangeAccessTokenIfNeeded(config, accessToken, sub);
|
||||
return await client.fetchUserInfo(config, exchangedAccessToken, sub);
|
||||
} catch (error) {
|
||||
logger.warn(`[openidStrategy] getUserInfo: Error fetching user info: ${error}`);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Downloads an image from a URL using an access token.
|
||||
* @param {string} url
|
||||
* @param {string} accessToken
|
||||
* @returns {Promise<Buffer>}
|
||||
* @param {Configuration} config
|
||||
* @param {string} accessToken access token
|
||||
* @param {string} sub - The subject identifier of the user. usually found as "sub" in the claims of the token
|
||||
* @returns {Promise<Buffer | string>} The image buffer or an empty string if the download fails.
|
||||
*/
|
||||
const downloadImage = async (url, accessToken) => {
|
||||
const downloadImage = async (url, config, accessToken, sub) => {
|
||||
const exchangedAccessToken = await exchangeAccessTokenIfNeeded(config, accessToken, sub, true);
|
||||
if (!url) {
|
||||
return '';
|
||||
}
|
||||
@@ -31,7 +115,7 @@ const downloadImage = async (url, accessToken) => {
|
||||
const options = {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Authorization: `Bearer ${exchangedAccessToken}`,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -105,63 +189,68 @@ function convertToUsername(input, defaultValue = '') {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up the OpenID strategy for authentication.
|
||||
* This function configures the OpenID client, handles proxy settings,
|
||||
* and defines the OpenID strategy for Passport.js.
|
||||
*
|
||||
* @async
|
||||
* @function setupOpenId
|
||||
* @returns {Promise<Configuration | null>} A promise that resolves when the OpenID strategy is set up and returns the openid client config object.
|
||||
* @throws {Error} If an error occurs during the setup process.
|
||||
*/
|
||||
async function setupOpenId() {
|
||||
try {
|
||||
if (process.env.PROXY) {
|
||||
const proxyAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
custom.setHttpOptionsDefaults({
|
||||
agent: proxyAgent,
|
||||
});
|
||||
logger.info(`[openidStrategy] proxy agent added: ${process.env.PROXY}`);
|
||||
}
|
||||
const issuer = await Issuer.discover(process.env.OPENID_ISSUER);
|
||||
/* Supported Algorithms, openid-client v5 doesn't set it automatically as discovered from server.
|
||||
- id_token_signed_response_alg // defaults to 'RS256'
|
||||
- request_object_signing_alg // defaults to 'RS256'
|
||||
- userinfo_signed_response_alg // not in v5
|
||||
- introspection_signed_response_alg // not in v5
|
||||
- authorization_signed_response_alg // not in v5
|
||||
*/
|
||||
/** @type {import('openid-client').ClientMetadata} */
|
||||
/** @type {ClientMetadata} */
|
||||
const clientMetadata = {
|
||||
client_id: process.env.OPENID_CLIENT_ID,
|
||||
client_secret: process.env.OPENID_CLIENT_SECRET,
|
||||
redirect_uris: [process.env.DOMAIN_SERVER + process.env.OPENID_CALLBACK_URL],
|
||||
};
|
||||
if (isEnabled(process.env.OPENID_SET_FIRST_SUPPORTED_ALGORITHM)) {
|
||||
clientMetadata.id_token_signed_response_alg =
|
||||
issuer.id_token_signing_alg_values_supported?.[0] || 'RS256';
|
||||
|
||||
/** @type {Configuration} */
|
||||
openidConfig = await client.discovery(
|
||||
new URL(process.env.OPENID_ISSUER),
|
||||
process.env.OPENID_CLIENT_ID,
|
||||
clientMetadata,
|
||||
);
|
||||
if (process.env.PROXY) {
|
||||
const proxyAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
openidConfig[client.customFetch] = (...args) => {
|
||||
return fetch(args[0], { ...args[1], agent: proxyAgent });
|
||||
};
|
||||
logger.info(`[openidStrategy] proxy agent added: ${process.env.PROXY}`);
|
||||
}
|
||||
const client = new issuer.Client(clientMetadata);
|
||||
const requiredRole = process.env.OPENID_REQUIRED_ROLE;
|
||||
const requiredRoleParameterPath = process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH;
|
||||
const requiredRoleTokenKind = process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND;
|
||||
const openidLogin = new OpenIDStrategy(
|
||||
const usePKCE = isEnabled(process.env.OPENID_USE_PKCE);
|
||||
const openidLogin = new CustomOpenIDStrategy(
|
||||
{
|
||||
client,
|
||||
params: {
|
||||
scope: process.env.OPENID_SCOPE,
|
||||
},
|
||||
config: openidConfig,
|
||||
scope: process.env.OPENID_SCOPE,
|
||||
callbackURL: process.env.DOMAIN_SERVER + process.env.OPENID_CALLBACK_URL,
|
||||
usePKCE,
|
||||
},
|
||||
async (tokenset, userinfo, done) => {
|
||||
async (tokenset, done) => {
|
||||
try {
|
||||
logger.info(`[openidStrategy] verify login openidId: ${userinfo.sub}`);
|
||||
logger.debug('[openidStrategy] very login tokenset and userinfo', { tokenset, userinfo });
|
||||
|
||||
let user = await findUser({ openidId: userinfo.sub });
|
||||
const claims = tokenset.claims();
|
||||
let user = await findUser({ openidId: claims.sub });
|
||||
logger.info(
|
||||
`[openidStrategy] user ${user ? 'found' : 'not found'} with openidId: ${userinfo.sub}`,
|
||||
`[openidStrategy] user ${user ? 'found' : 'not found'} with openidId: ${claims.sub}`,
|
||||
);
|
||||
|
||||
if (!user) {
|
||||
user = await findUser({ email: userinfo.email });
|
||||
user = await findUser({ email: claims.email });
|
||||
logger.info(
|
||||
`[openidStrategy] user ${user ? 'found' : 'not found'} with email: ${
|
||||
userinfo.email
|
||||
} for openidId: ${userinfo.sub}`,
|
||||
claims.email
|
||||
} for openidId: ${claims.sub}`,
|
||||
);
|
||||
}
|
||||
|
||||
const userinfo = {
|
||||
...claims,
|
||||
...(await getUserInfo(openidConfig, tokenset.access_token, claims.sub)),
|
||||
};
|
||||
const fullName = getFullName(userinfo);
|
||||
|
||||
if (requiredRole) {
|
||||
@@ -220,7 +309,7 @@ async function setupOpenId() {
|
||||
user.name = fullName;
|
||||
}
|
||||
|
||||
if (userinfo.picture && !user.avatar?.includes('manual=true')) {
|
||||
if (!!userinfo && userinfo.picture && !user.avatar?.includes('manual=true')) {
|
||||
/** @type {string | undefined} */
|
||||
const imageUrl = userinfo.picture;
|
||||
|
||||
@@ -231,7 +320,12 @@ async function setupOpenId() {
|
||||
fileName = userinfo.sub + '.png';
|
||||
}
|
||||
|
||||
const imageBuffer = await downloadImage(imageUrl, tokenset.access_token);
|
||||
const imageBuffer = await downloadImage(
|
||||
imageUrl,
|
||||
openidConfig,
|
||||
tokenset.access_token,
|
||||
userinfo.sub,
|
||||
);
|
||||
if (imageBuffer) {
|
||||
const { saveBuffer } = getStrategyFunctions(process.env.CDN_PROVIDER);
|
||||
const imagePath = await saveBuffer({
|
||||
@@ -257,18 +351,34 @@ async function setupOpenId() {
|
||||
},
|
||||
);
|
||||
|
||||
done(null, user);
|
||||
done(null, { ...user, tokenset });
|
||||
} catch (err) {
|
||||
logger.error('[openidStrategy] login failed', err);
|
||||
done(err);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
passport.use('openid', openidLogin);
|
||||
return openidConfig;
|
||||
} catch (err) {
|
||||
logger.error('[openidStrategy]', err);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @function getOpenIdConfig
|
||||
* @description Returns the OpenID client instance.
|
||||
* @throws {Error} If the OpenID client is not initialized.
|
||||
* @returns {Configuration}
|
||||
*/
|
||||
function getOpenIdConfig() {
|
||||
if (!openidConfig) {
|
||||
throw new Error('OpenID client is not initialized. Please call setupOpenId first.');
|
||||
}
|
||||
return openidConfig;
|
||||
}
|
||||
|
||||
module.exports = setupOpenId;
|
||||
module.exports = {
|
||||
setupOpenId,
|
||||
getOpenIdConfig,
|
||||
};
|
||||
|
||||
@@ -1,16 +1,13 @@
|
||||
const fetch = require('node-fetch');
|
||||
const jwtDecode = require('jsonwebtoken/decode');
|
||||
const { Issuer, Strategy: OpenIDStrategy } = require('openid-client');
|
||||
const { findUser, createUser, updateUser } = require('~/models/userMethods');
|
||||
const setupOpenId = require('./openidStrategy');
|
||||
const { setupOpenId } = require('./openidStrategy');
|
||||
|
||||
// --- Mocks ---
|
||||
jest.mock('node-fetch');
|
||||
jest.mock('openid-client');
|
||||
jest.mock('jsonwebtoken/decode');
|
||||
jest.mock('~/server/services/Files/strategies', () => ({
|
||||
getStrategyFunctions: jest.fn(() => ({
|
||||
// You can modify this mock as needed (here returning a dummy function)
|
||||
saveBuffer: jest.fn().mockResolvedValue('/fake/path/to/avatar.png'),
|
||||
})),
|
||||
}));
|
||||
@@ -23,38 +20,73 @@ jest.mock('~/server/utils/crypto', () => ({
|
||||
hashToken: jest.fn().mockResolvedValue('hashed-token'),
|
||||
}));
|
||||
jest.mock('~/server/utils', () => ({
|
||||
isEnabled: jest.fn(() => false), // default to false, override per test if needed
|
||||
isEnabled: jest.fn(() => false),
|
||||
}));
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
},
|
||||
}));
|
||||
jest.mock('~/cache/getLogStores', () =>
|
||||
jest.fn(() => ({
|
||||
get: jest.fn(),
|
||||
set: jest.fn(),
|
||||
})),
|
||||
);
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
CacheKeys: {
|
||||
OPENID_EXCHANGED_TOKENS: 'openid-exchanged-tokens',
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock Issuer.discover so that setupOpenId gets a fake issuer and client
|
||||
Issuer.discover = jest.fn().mockResolvedValue({
|
||||
id_token_signing_alg_values_supported: ['RS256'],
|
||||
Client: jest.fn().mockImplementation((clientMetadata) => {
|
||||
return {
|
||||
metadata: clientMetadata,
|
||||
};
|
||||
}),
|
||||
// Mock the openid-client module and all its dependencies
|
||||
jest.mock('openid-client', () => {
|
||||
return {
|
||||
discovery: jest.fn().mockResolvedValue({
|
||||
clientId: 'fake_client_id',
|
||||
clientSecret: 'fake_client_secret',
|
||||
issuer: 'https://fake-issuer.com',
|
||||
// Add any other properties needed by the implementation
|
||||
}),
|
||||
fetchUserInfo: jest.fn().mockImplementation((config, accessToken, sub) => {
|
||||
// Only return additional properties, but don't override any claims
|
||||
return Promise.resolve({
|
||||
preferred_username: 'preferred_username',
|
||||
});
|
||||
}),
|
||||
customFetch: Symbol('customFetch'),
|
||||
};
|
||||
});
|
||||
|
||||
// To capture the verify callback from the strategy, we grab it from the mock constructor
|
||||
let verifyCallback;
|
||||
OpenIDStrategy.mockImplementation((options, verify) => {
|
||||
verifyCallback = verify;
|
||||
return { name: 'openid', options, verify };
|
||||
jest.mock('openid-client/passport', () => {
|
||||
let verifyCallback;
|
||||
const mockStrategy = jest.fn((options, verify) => {
|
||||
verifyCallback = verify;
|
||||
return { name: 'openid', options, verify };
|
||||
});
|
||||
|
||||
return {
|
||||
Strategy: mockStrategy,
|
||||
__getVerifyCallback: () => verifyCallback,
|
||||
};
|
||||
});
|
||||
|
||||
// Mock passport
|
||||
jest.mock('passport', () => ({
|
||||
use: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('setupOpenId', () => {
|
||||
// Store a reference to the verify callback once it's set up
|
||||
let verifyCallback;
|
||||
|
||||
// Helper to wrap the verify callback in a promise
|
||||
const validate = (tokenset, userinfo) =>
|
||||
const validate = (tokenset) =>
|
||||
new Promise((resolve, reject) => {
|
||||
verifyCallback(tokenset, userinfo, (err, user, details) => {
|
||||
verifyCallback(tokenset, (err, user, details) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
@@ -66,17 +98,16 @@ describe('setupOpenId', () => {
|
||||
const tokenset = {
|
||||
id_token: 'fake_id_token',
|
||||
access_token: 'fake_access_token',
|
||||
};
|
||||
|
||||
const baseUserinfo = {
|
||||
sub: '1234',
|
||||
email: 'test@example.com',
|
||||
email_verified: true,
|
||||
given_name: 'First',
|
||||
family_name: 'Last',
|
||||
name: 'My Full',
|
||||
username: 'flast',
|
||||
picture: 'https://example.com/avatar.png',
|
||||
claims: () => ({
|
||||
sub: '1234',
|
||||
email: 'test@example.com',
|
||||
email_verified: true,
|
||||
given_name: 'First',
|
||||
family_name: 'Last',
|
||||
name: 'My Full',
|
||||
username: 'flast',
|
||||
picture: 'https://example.com/avatar.png',
|
||||
}),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -96,6 +127,7 @@ describe('setupOpenId', () => {
|
||||
delete process.env.OPENID_USERNAME_CLAIM;
|
||||
delete process.env.OPENID_NAME_CLAIM;
|
||||
delete process.env.PROXY;
|
||||
delete process.env.OPENID_USE_PKCE;
|
||||
|
||||
// Default jwtDecode mock returns a token that includes the required role.
|
||||
jwtDecode.mockReturnValue({
|
||||
@@ -120,16 +152,17 @@ describe('setupOpenId', () => {
|
||||
};
|
||||
fetch.mockResolvedValue(fakeResponse);
|
||||
|
||||
// Finally, call the setup function so that passport.use gets called
|
||||
// Call the setup function and capture the verify callback
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
});
|
||||
|
||||
it('should create a new user with correct username when username claim exists', async () => {
|
||||
// Arrange – our userinfo already has username 'flast'
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert
|
||||
expect(user.username).toBe(userinfo.username);
|
||||
@@ -148,13 +181,13 @@ describe('setupOpenId', () => {
|
||||
|
||||
it('should use given_name as username when username claim is missing', async () => {
|
||||
// Arrange – remove username from userinfo
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = { ...tokenset.claims() };
|
||||
delete userinfo.username;
|
||||
// Expect the username to be the given name (unchanged case)
|
||||
const expectUsername = userinfo.given_name;
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate({ ...tokenset, claims: () => userinfo });
|
||||
|
||||
// Assert
|
||||
expect(user.username).toBe(expectUsername);
|
||||
@@ -167,13 +200,13 @@ describe('setupOpenId', () => {
|
||||
|
||||
it('should use email as username when username and given_name are missing', async () => {
|
||||
// Arrange – remove username and given_name
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = { ...tokenset.claims() };
|
||||
delete userinfo.username;
|
||||
delete userinfo.given_name;
|
||||
const expectUsername = userinfo.email;
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate({ ...tokenset, claims: () => userinfo });
|
||||
|
||||
// Assert
|
||||
expect(user.username).toBe(expectUsername);
|
||||
@@ -187,10 +220,10 @@ describe('setupOpenId', () => {
|
||||
it('should override username with OPENID_USERNAME_CLAIM when set', async () => {
|
||||
// Arrange – set OPENID_USERNAME_CLAIM so that the sub claim is used
|
||||
process.env.OPENID_USERNAME_CLAIM = 'sub';
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert – username should equal the sub (converted as-is)
|
||||
expect(user.username).toBe(userinfo.sub);
|
||||
@@ -203,11 +236,11 @@ describe('setupOpenId', () => {
|
||||
|
||||
it('should set the full name correctly when given_name and family_name exist', async () => {
|
||||
// Arrange
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
const expectedFullName = `${userinfo.given_name} ${userinfo.family_name}`;
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert
|
||||
expect(user.name).toBe(expectedFullName);
|
||||
@@ -216,10 +249,10 @@ describe('setupOpenId', () => {
|
||||
it('should override full name with OPENID_NAME_CLAIM when set', async () => {
|
||||
// Arrange – use the name claim as the full name
|
||||
process.env.OPENID_NAME_CLAIM = 'name';
|
||||
const userinfo = { ...baseUserinfo, name: 'Custom Name' };
|
||||
const userinfo = { ...tokenset.claims(), name: 'Custom Name' };
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate({ ...tokenset, claims: () => userinfo });
|
||||
|
||||
// Assert
|
||||
expect(user.name).toBe('Custom Name');
|
||||
@@ -230,31 +263,31 @@ describe('setupOpenId', () => {
|
||||
const existingUser = {
|
||||
_id: 'existingUserId',
|
||||
provider: 'local',
|
||||
email: baseUserinfo.email,
|
||||
email: tokenset.claims().email,
|
||||
openidId: '',
|
||||
username: '',
|
||||
name: '',
|
||||
};
|
||||
findUser.mockImplementation(async (query) => {
|
||||
if (query.openidId === baseUserinfo.sub || query.email === baseUserinfo.email) {
|
||||
if (query.openidId === tokenset.claims().sub || query.email === tokenset.claims().email) {
|
||||
return existingUser;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
await validate(tokenset, userinfo);
|
||||
await validate(tokenset);
|
||||
|
||||
// Assert – updateUser should be called and the user object updated
|
||||
expect(updateUser).toHaveBeenCalledWith(
|
||||
existingUser._id,
|
||||
expect.objectContaining({
|
||||
provider: 'openid',
|
||||
openidId: baseUserinfo.sub,
|
||||
username: baseUserinfo.username,
|
||||
name: `${baseUserinfo.given_name} ${baseUserinfo.family_name}`,
|
||||
openidId: userinfo.sub,
|
||||
username: userinfo.username,
|
||||
name: `${userinfo.given_name} ${userinfo.family_name}`,
|
||||
}),
|
||||
);
|
||||
});
|
||||
@@ -264,10 +297,10 @@ describe('setupOpenId', () => {
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['SomeOtherRole'],
|
||||
});
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
const { user, details } = await validate(tokenset, userinfo);
|
||||
const { user, details } = await validate(tokenset);
|
||||
|
||||
// Assert – verify that the strategy rejects login
|
||||
expect(user).toBe(false);
|
||||
@@ -276,10 +309,10 @@ describe('setupOpenId', () => {
|
||||
|
||||
it('should attempt to download and save the avatar if picture is provided', async () => {
|
||||
// Arrange – ensure userinfo contains a picture URL
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert – verify that download was attempted and the avatar field was set via updateUser
|
||||
expect(fetch).toHaveBeenCalled();
|
||||
@@ -289,14 +322,25 @@ describe('setupOpenId', () => {
|
||||
|
||||
it('should not attempt to download avatar if picture is not provided', async () => {
|
||||
// Arrange – remove picture
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = { ...tokenset.claims() };
|
||||
delete userinfo.picture;
|
||||
|
||||
// Act
|
||||
await validate(tokenset, userinfo);
|
||||
await validate({ ...tokenset, claims: () => userinfo });
|
||||
|
||||
// Assert – fetch should not be called and avatar should remain undefined or empty
|
||||
expect(fetch).not.toHaveBeenCalled();
|
||||
// Depending on your implementation, user.avatar may be undefined or an empty string.
|
||||
});
|
||||
|
||||
it('should default to usePKCE false when OPENID_USE_PKCE is not defined', async () => {
|
||||
const OpenIDStrategy = require('openid-client/passport').Strategy;
|
||||
|
||||
delete process.env.OPENID_USE_PKCE;
|
||||
await setupOpenId();
|
||||
|
||||
const callOptions = OpenIDStrategy.mock.calls[OpenIDStrategy.mock.calls.length - 1][0];
|
||||
expect(callOptions.usePKCE).toBe(false);
|
||||
expect(callOptions.params?.code_challenge_method).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,7 +7,8 @@ const socialLogin =
|
||||
(provider, getProfileDetails) => async (accessToken, refreshToken, idToken, profile, cb) => {
|
||||
try {
|
||||
const { email, id, avatarUrl, username, name, emailVerified } = getProfileDetails({
|
||||
idToken, profile,
|
||||
idToken,
|
||||
profile,
|
||||
});
|
||||
|
||||
const oldUser = await findUser({ email: email.trim() });
|
||||
|
||||
6
api/test/__mocks__/openid-client-passport.js
Normal file
6
api/test/__mocks__/openid-client-passport.js
Normal file
@@ -0,0 +1,6 @@
|
||||
// api/test/__mocks__/openid-client-passport.js
|
||||
const Strategy = jest.fn().mockImplementation((options, verify) => {
|
||||
return { name: 'mocked-openid-passport-strategy', options, verify };
|
||||
});
|
||||
|
||||
module.exports = { Strategy };
|
||||
67
api/test/__mocks__/openid-client.js
Normal file
67
api/test/__mocks__/openid-client.js
Normal file
@@ -0,0 +1,67 @@
|
||||
// api/test/__mocks__/openid-client.js
|
||||
module.exports = {
|
||||
Issuer: {
|
||||
discover: jest.fn().mockResolvedValue({
|
||||
Client: jest.fn().mockImplementation(() => ({
|
||||
authorizationUrl: jest.fn().mockReturnValue('mock_auth_url'),
|
||||
callback: jest.fn().mockResolvedValue({
|
||||
access_token: 'mock_access_token',
|
||||
id_token: 'mock_id_token',
|
||||
claims: () => ({
|
||||
sub: 'mock_sub',
|
||||
email: 'mock@example.com',
|
||||
}),
|
||||
}),
|
||||
userinfo: jest.fn().mockResolvedValue({
|
||||
sub: 'mock_sub',
|
||||
email: 'mock@example.com',
|
||||
}),
|
||||
})),
|
||||
}),
|
||||
},
|
||||
Strategy: jest.fn().mockImplementation((options, verify) => {
|
||||
// Store verify to call it if needed, or just mock the strategy behavior
|
||||
return { name: 'openid-mock-strategy' };
|
||||
}),
|
||||
custom: {
|
||||
setHttpOptionsDefaults: jest.fn(),
|
||||
},
|
||||
// Add any other exports from openid-client that are used directly
|
||||
// For example, if your code uses `client.Issuer.discover`, then mock `Issuer`
|
||||
// If it uses `new Strategy()`, then mock `Strategy`
|
||||
// Based on openidStrategy.js, it uses:
|
||||
// const client = require('openid-client'); -> client.discovery, client.fetchUserInfo, client.genericGrantRequest
|
||||
// const { Strategy: OpenIDStrategy } = require('openid-client/passport');
|
||||
// So the mock needs to cover these.
|
||||
// The provided mock in openidStrategy.spec.js is a good reference.
|
||||
|
||||
// Simpler mock based on the spec file:
|
||||
discovery: jest.fn().mockResolvedValue({
|
||||
clientId: 'fake_client_id',
|
||||
clientSecret: 'fake_client_secret',
|
||||
issuer: 'https://fake-issuer.com',
|
||||
Client: jest.fn().mockImplementation(() => ({
|
||||
authorizationUrl: jest.fn().mockReturnValue('mock_auth_url'),
|
||||
callback: jest.fn().mockResolvedValue({
|
||||
access_token: 'mock_access_token',
|
||||
id_token: 'mock_id_token',
|
||||
claims: () => ({
|
||||
sub: 'mock_sub',
|
||||
email: 'mock@example.com',
|
||||
}),
|
||||
}),
|
||||
userinfo: jest.fn().mockResolvedValue({
|
||||
sub: 'mock_sub',
|
||||
email: 'mock@example.com',
|
||||
}),
|
||||
grant: jest.fn().mockResolvedValue({ access_token: 'mock_grant_token' }), // For genericGrantRequest
|
||||
})),
|
||||
}),
|
||||
fetchUserInfo: jest.fn().mockResolvedValue({
|
||||
preferred_username: 'preferred_username',
|
||||
}),
|
||||
genericGrantRequest: jest
|
||||
.fn()
|
||||
.mockResolvedValue({ access_token: 'mock_grant_access_token', expires_in: 3600 }),
|
||||
customFetch: Symbol('customFetch'),
|
||||
};
|
||||
@@ -55,6 +55,12 @@
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports MessageContentComplex
|
||||
* @typedef {import('@librechat/agents').MessageContentComplex} MessageContentComplex
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports EventHandler
|
||||
* @typedef {import('@librechat/agents').EventHandler} EventHandler
|
||||
@@ -186,6 +192,8 @@
|
||||
* agent_index: number;
|
||||
* last_agent_index: number;
|
||||
* hide_sequential_outputs: boolean;
|
||||
* version?: 'v1' | 'v2';
|
||||
* streamMode?: string
|
||||
* }> & {
|
||||
* toolCall?: LangChainToolCall & { stepId?: string };
|
||||
* }} GraphRunnableConfig
|
||||
@@ -473,6 +481,25 @@
|
||||
* @typedef {import('librechat-data-provider').Agents.MessageContentImageUrl} MessageContentImageUrl
|
||||
* @memberof typedefs
|
||||
*/
|
||||
/** Web Search */
|
||||
|
||||
/**
|
||||
* @exports SearchResult
|
||||
* @typedef {import('@librechat/agents').SearchResult} SearchResult
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports SearchResultData
|
||||
* @typedef {import('@librechat/agents').SearchResultData} SearchResultData
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports ValidSource
|
||||
* @typedef {import('librechat-data-provider').ValidSource} ValidSource
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/** Prompts */
|
||||
/**
|
||||
@@ -848,6 +875,12 @@
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports IPluginAuth
|
||||
* @typedef {import('@librechat/data-schemas').IPluginAuth} IPluginAuth
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports ObjectId
|
||||
* @typedef {import('mongoose').Types.ObjectId} ObjectId
|
||||
@@ -990,6 +1023,18 @@
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TEphemeralAgent
|
||||
* @typedef {import('librechat-data-provider').TEphemeralAgent} TEphemeralAgent
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TWebSearchKeys
|
||||
* @typedef {import('librechat-data-provider').TWebSearchKeys} TWebSearchKeys
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports AgentToolResources
|
||||
* @typedef {import('librechat-data-provider').AgentToolResources} AgentToolResources
|
||||
|
||||
@@ -105,6 +105,9 @@ const anthropicModels = {
|
||||
'claude-3.7-sonnet': 200000,
|
||||
'claude-3-5-sonnet-latest': 200000,
|
||||
'claude-3.5-sonnet-latest': 200000,
|
||||
'claude-sonnet-4': 200000,
|
||||
'claude-opus-4': 200000,
|
||||
'claude-4': 200000,
|
||||
};
|
||||
|
||||
const deepseekModels = {
|
||||
@@ -246,6 +249,8 @@ const anthropicMaxOutputs = {
|
||||
'claude-3-haiku': 4096,
|
||||
'claude-3-sonnet': 4096,
|
||||
'claude-3-opus': 4096,
|
||||
'claude-opus-4': 32000,
|
||||
'claude-sonnet-4': 64000,
|
||||
'claude-3.5-sonnet': 8192,
|
||||
'claude-3-5-sonnet': 8192,
|
||||
'claude-3.7-sonnet': 128000,
|
||||
|
||||
@@ -649,3 +649,58 @@ describe('Grok Model Tests - Tokens', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Claude Model Tests', () => {
|
||||
it('should return correct context length for Claude 4 models', () => {
|
||||
expect(getModelMaxTokens('claude-sonnet-4')).toBe(200000);
|
||||
expect(getModelMaxTokens('claude-opus-4')).toBe(200000);
|
||||
});
|
||||
|
||||
it('should handle Claude 4 model name variations with different prefixes and suffixes', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
expect(getModelMaxTokens(model)).toBe(200000);
|
||||
});
|
||||
});
|
||||
|
||||
it('should match model names correctly for Claude 4 models', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const isSonnet = model.includes('sonnet');
|
||||
const expectedModel = isSonnet ? 'claude-sonnet-4' : 'claude-opus-4';
|
||||
expect(matchModelName(model, EModelEndpoint.anthropic)).toBe(expectedModel);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
@@ -1,4 +0,0 @@
|
||||
apiVersion: v2
|
||||
name: librechat
|
||||
type: application
|
||||
version: 1.0.0
|
||||
@@ -1,22 +0,0 @@
|
||||
1. Get the application URL by running these commands:
|
||||
{{- if .Values.ingress.enabled }}
|
||||
{{- range $host := .Values.ingress.hosts }}
|
||||
{{- range .paths }}
|
||||
http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- else if contains "NodePort" .Values.service.type }}
|
||||
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "librechat.fullname" . }})
|
||||
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
|
||||
echo http://$NODE_IP:$NODE_PORT
|
||||
{{- else if contains "LoadBalancer" .Values.service.type }}
|
||||
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
|
||||
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "librechat.fullname" . }}'
|
||||
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "librechat.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
|
||||
echo http://$SERVICE_IP:{{ .Values.service.port }}
|
||||
{{- else if contains "ClusterIP" .Values.service.type }}
|
||||
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "librechat.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
|
||||
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
|
||||
echo "Visit http://127.0.0.1:8080 to use your application"
|
||||
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
|
||||
{{- end }}
|
||||
@@ -1,74 +0,0 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "librechat.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "librechat.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "librechat.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "librechat.labels" -}}
|
||||
helm.sh/chart: {{ include "librechat.chart" . }}
|
||||
{{ include "librechat.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "librechat.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "librechat.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "librechat.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "librechat.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Print string from list split by ,
|
||||
*/}}
|
||||
{{- define "model.list" -}}
|
||||
{{- range $idx, $val := $.Values.configEndpoint.models -}}
|
||||
{{- if $idx }}
|
||||
{{- print ", " -}}
|
||||
{{- end -}}
|
||||
{{- $val -}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
@@ -1,10 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ include "librechat.fullname" . }}-env
|
||||
labels:
|
||||
{{- include "librechat.labels" . | nindent 4 }}
|
||||
data:
|
||||
{{- range $key, $val := .Values.config.env }}
|
||||
{{ $key }}: {{ $val | quote }}
|
||||
{{- end }}
|
||||
@@ -1,81 +0,0 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "librechat.fullname" . }}
|
||||
labels:
|
||||
{{- include "librechat.labels" . | nindent 4 }}
|
||||
spec:
|
||||
{{- if not .Values.autoscaling.enabled }}
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
{{- end }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "librechat.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
{{- with .Values.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "librechat.selectorLabels" . | nindent 8 }}
|
||||
{{- with .Values.podLabels }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
serviceAccountName: {{ include "librechat.serviceAccountName" . }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
envFrom:
|
||||
{{ if .Values.config.envSecrets.secretRef }}
|
||||
- secretRef:
|
||||
name: {{ .Values.config.envSecrets.secretRef }}
|
||||
{{- end }}
|
||||
- configMapRef:
|
||||
name: {{ include "librechat.fullname" . }}-env
|
||||
env:
|
||||
{{- range $secretKeyRef := .Values.config.envSecrets.secretKeyRef }}
|
||||
- name: {{ $secretKeyRef.name }}
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ $secretKeyRef.secretName }}
|
||||
key: {{ $secretKeyRef.secretKey }}
|
||||
{{- end }}
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 3080
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
initialDelaySeconds: 5
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
readinessProbe:
|
||||
initialDelaySeconds: 5
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
@@ -1,28 +0,0 @@
|
||||
{{- if .Values.autoscaling.enabled }}
|
||||
apiVersion: autoscaling/v2beta1
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
name: {{ include "librechat.fullname" . }}
|
||||
labels:
|
||||
{{- include "librechat.labels" . | nindent 4 }}
|
||||
spec:
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
name: {{ include "librechat.fullname" . }}
|
||||
minReplicas: {{ .Values.autoscaling.minReplicas }}
|
||||
maxReplicas: {{ .Values.autoscaling.maxReplicas }}
|
||||
metrics:
|
||||
{{- if .Values.autoscaling.targetCPUUtilizationPercentage }}
|
||||
- type: Resource
|
||||
resource:
|
||||
name: cpu
|
||||
targetAverageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }}
|
||||
{{- end }}
|
||||
{{- if .Values.autoscaling.targetMemoryUtilizationPercentage }}
|
||||
- type: Resource
|
||||
resource:
|
||||
name: memory
|
||||
targetAverageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -1,61 +0,0 @@
|
||||
{{- if .Values.ingress.enabled -}}
|
||||
{{- $fullName := include "librechat.fullname" . -}}
|
||||
{{- $svcPort := .Values.service.port -}}
|
||||
{{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }}
|
||||
{{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }}
|
||||
{{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}}
|
||||
apiVersion: networking.k8s.io/v1
|
||||
{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}}
|
||||
apiVersion: networking.k8s.io/v1beta1
|
||||
{{- else -}}
|
||||
apiVersion: extensions/v1beta1
|
||||
{{- end }}
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: {{ $fullName }}
|
||||
labels:
|
||||
{{- include "librechat.labels" . | nindent 4 }}
|
||||
{{- with .Values.ingress.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }}
|
||||
ingressClassName: {{ .Values.ingress.className }}
|
||||
{{- end }}
|
||||
{{- if .Values.ingress.tls }}
|
||||
tls:
|
||||
{{- range .Values.ingress.tls }}
|
||||
- hosts:
|
||||
{{- range .hosts }}
|
||||
- {{ . | quote }}
|
||||
{{- end }}
|
||||
secretName: {{ .secretName }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
rules:
|
||||
{{- range .Values.ingress.hosts }}
|
||||
- host: {{ .host | quote }}
|
||||
http:
|
||||
paths:
|
||||
{{- range .paths }}
|
||||
- path: {{ .path }}
|
||||
{{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }}
|
||||
pathType: {{ .pathType }}
|
||||
{{- end }}
|
||||
backend:
|
||||
{{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }}
|
||||
service:
|
||||
name: {{ $fullName }}
|
||||
port:
|
||||
number: {{ $svcPort }}
|
||||
{{- else }}
|
||||
serviceName: {{ $fullName }}
|
||||
servicePort: {{ $svcPort }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -1,15 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "librechat.fullname" . }}
|
||||
labels:
|
||||
{{- include "librechat.labels" . | nindent 4 }}
|
||||
spec:
|
||||
type: {{ .Values.service.type }}
|
||||
ports:
|
||||
- port: {{ .Values.service.port }}
|
||||
targetPort: http
|
||||
protocol: TCP
|
||||
name: http
|
||||
selector:
|
||||
{{- include "librechat.selectorLabels" . | nindent 4 }}
|
||||
@@ -1,12 +0,0 @@
|
||||
{{- if .Values.serviceAccount.create -}}
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ include "librechat.serviceAccountName" . }}
|
||||
labels:
|
||||
{{- include "librechat.labels" . | nindent 4 }}
|
||||
{{- with .Values.serviceAccount.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -1,112 +0,0 @@
|
||||
# Default values for librechat.
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
repository: ghcr.io/danny-avila/librechat
|
||||
pullPolicy: IfNotPresent
|
||||
# Overrides the image tag whose default is the chart appVersion.
|
||||
tag: "latest"
|
||||
|
||||
imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
serviceAccount:
|
||||
# Specifies whether a service account should be created
|
||||
create: true
|
||||
# Annotations to add to the service account
|
||||
annotations: {}
|
||||
# The name of the service account to use.
|
||||
# If not set and create is true, a name is generated using the fullname template
|
||||
name: ""
|
||||
|
||||
podAnnotations: {}
|
||||
|
||||
podLabels: {}
|
||||
|
||||
podSecurityContext: {}
|
||||
# fsGroup: 2000
|
||||
|
||||
securityContext: {}
|
||||
# capabilities:
|
||||
# drop:
|
||||
# - ALL
|
||||
# readOnlyRootFilesystem: true
|
||||
# runAsNonRoot: true
|
||||
# runAsUser: 1000
|
||||
|
||||
networkPolicies:
|
||||
enabled: true
|
||||
|
||||
service:
|
||||
type: LoadBalancer
|
||||
port: 80
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
className: "nginx"
|
||||
annotations: {}
|
||||
# kubernetes.io/ingress.class: nginx
|
||||
# kubernetes.io/tls-acme: "true"
|
||||
hosts:
|
||||
- host: chat.example.com
|
||||
paths:
|
||||
- path: /
|
||||
pathType: ImplementationSpecific
|
||||
tls: []
|
||||
# - secretName: chart-example-tls
|
||||
# hosts:
|
||||
# - chart-example.local
|
||||
|
||||
resources: {}
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
autoscaling:
|
||||
enabled: false
|
||||
minReplicas: 1
|
||||
maxReplicas: 100
|
||||
targetCPUUtilizationPercentage: 80
|
||||
# targetMemoryUtilizationPercentage: 80
|
||||
|
||||
nodeSelector: {}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
|
||||
config:
|
||||
envSecrets:
|
||||
# Use this when using one k8s secret for multiply env secrets
|
||||
# secretRef: librechat
|
||||
|
||||
# Use this when using one k8s secret for each env secret
|
||||
secretKeyRef: []
|
||||
# - name: CREDS_IV
|
||||
# secretName: librechat
|
||||
# secretKey: CREDS_IV
|
||||
|
||||
env:
|
||||
# Full list of possible values
|
||||
# https://github.com/danny-avila/LibreChat/blob/main/.env.example
|
||||
ALLOW_EMAIL_LOGIN: "true"
|
||||
ALLOW_REGISTRATION: "true"
|
||||
ALLOW_SOCIAL_LOGIN: "false"
|
||||
ALLOW_SOCIAL_REGISTRATION: "false"
|
||||
APP_TITLE: "Librechat"
|
||||
CUSTOM_FOOTER: "Provided with ❤️"
|
||||
DEBUG_CONSOLE: "true"
|
||||
DEBUG_LOGGING: "true"
|
||||
DEBUG_OPENAI: "true"
|
||||
DEBUG_PLUGINS: "true"
|
||||
DOMAIN_CLIENT: ""
|
||||
DOMAIN_SERVER: ""
|
||||
ENDPOINTS: "openAI,azureOpenAI,chatGPTBrowser,google,gptPlugins,anthropic"
|
||||
SEARCH: false
|
||||
@@ -87,7 +87,7 @@
|
||||
"react-i18next": "^15.4.0",
|
||||
"react-lazy-load-image-component": "^1.6.0",
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-resizable-panels": "^2.1.8",
|
||||
"react-resizable-panels": "^3.0.2",
|
||||
"react-router-dom": "^6.11.2",
|
||||
"react-speech-recognition": "^3.10.0",
|
||||
"react-textarea-autosize": "^8.4.0",
|
||||
@@ -139,6 +139,7 @@
|
||||
"postcss": "^8.4.31",
|
||||
"postcss-loader": "^7.1.0",
|
||||
"postcss-preset-env": "^8.2.0",
|
||||
"rollup-plugin-visualizer": "^6.0.0",
|
||||
"tailwindcss": "^3.4.1",
|
||||
"ts-jest": "^29.2.5",
|
||||
"typescript": "^5.3.3",
|
||||
|
||||
9
client/src/Providers/SearchContext.tsx
Normal file
9
client/src/Providers/SearchContext.tsx
Normal file
@@ -0,0 +1,9 @@
|
||||
import { createContext, useContext } from 'react';
|
||||
import type { SearchResultData } from 'librechat-data-provider';
|
||||
|
||||
type SearchContext = {
|
||||
searchResults?: { [key: string]: SearchResultData };
|
||||
};
|
||||
|
||||
export const SearchContext = createContext<SearchContext>({} as SearchContext);
|
||||
export const useSearchContext = () => useContext(SearchContext);
|
||||
@@ -20,3 +20,4 @@ export * from './ArtifactContext';
|
||||
export * from './CodeBlockContext';
|
||||
export * from './ToolCallsMapContext';
|
||||
export * from './SetConvoContext';
|
||||
export * from './SearchContext';
|
||||
|
||||
@@ -10,6 +10,7 @@ export type TAgentOption = OptionWithIcon &
|
||||
};
|
||||
|
||||
export type TAgentCapabilities = {
|
||||
[AgentCapabilities.web_search]: boolean;
|
||||
[AgentCapabilities.file_search]: boolean;
|
||||
[AgentCapabilities.execute_code]: boolean;
|
||||
[AgentCapabilities.end_after_tools]?: boolean;
|
||||
|
||||
@@ -142,6 +142,7 @@ export enum Panel {
|
||||
builder = 'builder',
|
||||
actions = 'actions',
|
||||
model = 'model',
|
||||
version = 'version',
|
||||
}
|
||||
|
||||
export type FileSetter =
|
||||
|
||||
@@ -15,6 +15,7 @@ import type { BadgeItem } from '~/common';
|
||||
import { useChatBadges } from '~/hooks';
|
||||
import { Badge } from '~/components/ui';
|
||||
import MCPSelect from './MCPSelect';
|
||||
import WebSearch from './WebSearch';
|
||||
import store from '~/store';
|
||||
|
||||
interface BadgeRowProps {
|
||||
@@ -354,6 +355,7 @@ function BadgeRow({
|
||||
)}
|
||||
{showEphemeralBadges === true && (
|
||||
<>
|
||||
<WebSearch conversationId={conversationId} />
|
||||
<CodeInterpreter conversationId={conversationId} />
|
||||
<MCPSelect conversationId={conversationId} />
|
||||
</>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import debounce from 'lodash/debounce';
|
||||
import React, { memo, useMemo, useCallback } from 'react';
|
||||
import React, { memo, useMemo, useCallback, useRef } from 'react';
|
||||
import { useRecoilState } from 'recoil';
|
||||
import { TerminalSquareIcon } from 'lucide-react';
|
||||
import {
|
||||
@@ -32,6 +32,7 @@ const storageCondition = (value: unknown, rawCurrentValue?: string | null) => {
|
||||
};
|
||||
|
||||
function CodeInterpreter({ conversationId }: { conversationId?: string | null }) {
|
||||
const triggerRef = useRef<HTMLInputElement>(null);
|
||||
const localize = useLocalize();
|
||||
const key = conversationId ?? Constants.NEW_CONVO;
|
||||
|
||||
@@ -73,9 +74,10 @@ function CodeInterpreter({ conversationId }: { conversationId?: string | null })
|
||||
);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(isChecked: boolean) => {
|
||||
(e: React.ChangeEvent<HTMLInputElement>, isChecked: boolean) => {
|
||||
if (!isAuthenticated) {
|
||||
setIsDialogOpen(true);
|
||||
e.preventDefault();
|
||||
return;
|
||||
}
|
||||
setRunCode(isChecked);
|
||||
@@ -95,6 +97,7 @@ function CodeInterpreter({ conversationId }: { conversationId?: string | null })
|
||||
return (
|
||||
<>
|
||||
<CheckboxButton
|
||||
ref={triggerRef}
|
||||
className="max-w-fit"
|
||||
defaultChecked={runCode}
|
||||
setValue={debouncedChange}
|
||||
@@ -105,6 +108,7 @@ function CodeInterpreter({ conversationId }: { conversationId?: string | null })
|
||||
<ApiKeyDialog
|
||||
onSubmit={onSubmit}
|
||||
isOpen={isDialogOpen}
|
||||
triggerRef={triggerRef}
|
||||
register={methods.register}
|
||||
onRevoke={handleRevokeApiKey}
|
||||
onOpenChange={setIsDialogOpen}
|
||||
|
||||
@@ -26,7 +26,7 @@ export default function PopoverButtons({
|
||||
buttonClass?: string;
|
||||
iconClass?: string;
|
||||
endpoint?: EModelEndpoint | string;
|
||||
endpointType?: EModelEndpoint | string;
|
||||
endpointType?: EModelEndpoint | string | null;
|
||||
model?: string | null;
|
||||
}) {
|
||||
const {
|
||||
|
||||
123
client/src/components/Chat/Input/WebSearch.tsx
Normal file
123
client/src/components/Chat/Input/WebSearch.tsx
Normal file
@@ -0,0 +1,123 @@
|
||||
import React, { memo, useRef, useMemo, useCallback } from 'react';
|
||||
import { Globe } from 'lucide-react';
|
||||
import debounce from 'lodash/debounce';
|
||||
import { useRecoilState } from 'recoil';
|
||||
import {
|
||||
Tools,
|
||||
AuthType,
|
||||
Constants,
|
||||
Permissions,
|
||||
PermissionTypes,
|
||||
LocalStorageKeys,
|
||||
} from 'librechat-data-provider';
|
||||
import ApiKeyDialog from '~/components/SidePanel/Agents/Search/ApiKeyDialog';
|
||||
import { useLocalize, useHasAccess, useSearchApiKeyForm } from '~/hooks';
|
||||
import CheckboxButton from '~/components/ui/CheckboxButton';
|
||||
import useLocalStorage from '~/hooks/useLocalStorageAlt';
|
||||
import { useVerifyAgentToolAuth } from '~/data-provider';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
|
||||
const storageCondition = (value: unknown, rawCurrentValue?: string | null) => {
|
||||
if (rawCurrentValue) {
|
||||
try {
|
||||
const currentValue = rawCurrentValue?.trim() ?? '';
|
||||
if (currentValue === 'true' && value === false) {
|
||||
return true;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
return value !== undefined && value !== null && value !== '' && value !== false;
|
||||
};
|
||||
|
||||
function WebSearch({ conversationId }: { conversationId?: string | null }) {
|
||||
const triggerRef = useRef<HTMLInputElement>(null);
|
||||
const localize = useLocalize();
|
||||
const key = conversationId ?? Constants.NEW_CONVO;
|
||||
|
||||
const canUseWebSearch = useHasAccess({
|
||||
permissionType: PermissionTypes.WEB_SEARCH,
|
||||
permission: Permissions.USE,
|
||||
});
|
||||
const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key));
|
||||
const isWebSearchToggleEnabled = useMemo(() => {
|
||||
return ephemeralAgent?.web_search ?? false;
|
||||
}, [ephemeralAgent?.web_search]);
|
||||
|
||||
const { data } = useVerifyAgentToolAuth(
|
||||
{ toolId: Tools.web_search },
|
||||
{
|
||||
retry: 1,
|
||||
},
|
||||
);
|
||||
const authTypes = useMemo(() => data?.authTypes ?? [], [data?.authTypes]);
|
||||
const isAuthenticated = useMemo(() => data?.authenticated ?? false, [data?.authenticated]);
|
||||
const { methods, onSubmit, isDialogOpen, setIsDialogOpen, handleRevokeApiKey } =
|
||||
useSearchApiKeyForm({});
|
||||
|
||||
const setValue = useCallback(
|
||||
(isChecked: boolean) => {
|
||||
setEphemeralAgent((prev) => ({
|
||||
...prev,
|
||||
web_search: isChecked,
|
||||
}));
|
||||
},
|
||||
[setEphemeralAgent],
|
||||
);
|
||||
|
||||
const [webSearch, setWebSearch] = useLocalStorage<boolean>(
|
||||
`${LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_}${key}`,
|
||||
isWebSearchToggleEnabled,
|
||||
setValue,
|
||||
storageCondition,
|
||||
);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(e: React.ChangeEvent<HTMLInputElement>, isChecked: boolean) => {
|
||||
if (!isAuthenticated) {
|
||||
setIsDialogOpen(true);
|
||||
e.preventDefault();
|
||||
return;
|
||||
}
|
||||
setWebSearch(isChecked);
|
||||
},
|
||||
[setWebSearch, setIsDialogOpen, isAuthenticated],
|
||||
);
|
||||
|
||||
const debouncedChange = useMemo(
|
||||
() => debounce(handleChange, 50, { leading: true }),
|
||||
[handleChange],
|
||||
);
|
||||
|
||||
if (!canUseWebSearch) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<CheckboxButton
|
||||
ref={triggerRef}
|
||||
className="max-w-fit"
|
||||
defaultChecked={webSearch}
|
||||
setValue={debouncedChange}
|
||||
label={localize('com_ui_search')}
|
||||
isCheckedClassName="border-blue-600/40 bg-blue-500/10 hover:bg-blue-700/10"
|
||||
icon={<Globe className="icon-md" />}
|
||||
/>
|
||||
<ApiKeyDialog
|
||||
onSubmit={onSubmit}
|
||||
authTypes={authTypes}
|
||||
isOpen={isDialogOpen}
|
||||
triggerRef={triggerRef}
|
||||
register={methods.register}
|
||||
onRevoke={handleRevokeApiKey}
|
||||
onOpenChange={setIsDialogOpen}
|
||||
handleSubmit={methods.handleSubmit}
|
||||
isToolAuthenticated={isAuthenticated}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(WebSearch);
|
||||
@@ -10,10 +10,16 @@ import {
|
||||
mapEndpoints,
|
||||
getConvoSwitchLogic,
|
||||
} from '~/utils';
|
||||
import { Input, Label, SelectDropDown, Dialog, DialogClose, DialogButton } from '~/components';
|
||||
import {
|
||||
Input,
|
||||
Label,
|
||||
OGDialog,
|
||||
OGDialogTitle,
|
||||
SelectDropDown,
|
||||
OGDialogContent,
|
||||
} from '~/components';
|
||||
import { useSetIndexOptions, useLocalize, useDebouncedInput } from '~/hooks';
|
||||
import PopoverButtons from '~/components/Chat/Input/PopoverButtons';
|
||||
import DialogTemplate from '~/components/ui/DialogTemplate';
|
||||
import { EndpointSettings } from '~/components/Endpoints';
|
||||
import { useGetEndpointsQuery } from '~/data-provider';
|
||||
import { useChatContext } from '~/Providers';
|
||||
@@ -117,111 +123,107 @@ const EditPresetDialog = ({
|
||||
[queryClient, setOptions],
|
||||
);
|
||||
|
||||
const handleOpenChange = (open: boolean) => {
|
||||
setPresetModalVisible(open);
|
||||
if (!open) {
|
||||
setPreset(null);
|
||||
}
|
||||
};
|
||||
|
||||
const { endpoint: _endpoint, endpointType, model } = preset || {};
|
||||
const endpoint = _endpoint ?? '';
|
||||
|
||||
if (!endpoint) {
|
||||
return null;
|
||||
} else if (isAgentsEndpoint(endpoint)) {
|
||||
}
|
||||
|
||||
if (isAgentsEndpoint(endpoint)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
open={presetModalVisible}
|
||||
onOpenChange={(open) => {
|
||||
setPresetModalVisible(open);
|
||||
if (!open) {
|
||||
setPreset(null);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<DialogTemplate
|
||||
title={`${localize('com_ui_edit') + ' ' + localize('com_endpoint_preset')} - ${
|
||||
preset?.title
|
||||
}`}
|
||||
className="h-full max-w-full overflow-y-auto pb-4 sm:w-[680px] sm:pb-0 md:h-[720px] md:w-[750px] md:overflow-y-hidden lg:w-[950px] xl:h-[720px]"
|
||||
main={
|
||||
<div className="flex w-full flex-col items-center gap-2 md:h-[550px] md:overflow-y-auto">
|
||||
<div className="grid w-full">
|
||||
<div className="col-span-4 flex flex-col items-start justify-start gap-6 pb-4 md:flex-row">
|
||||
<div className="flex w-full flex-col">
|
||||
<Label htmlFor="preset-name" className="mb-1 text-left text-sm font-medium">
|
||||
{localize('com_endpoint_preset_name')}
|
||||
</Label>
|
||||
<Input
|
||||
id="preset-name"
|
||||
value={(title as string | undefined) ?? ''}
|
||||
onChange={onTitleChange}
|
||||
placeholder={localize('com_endpoint_set_custom_name')}
|
||||
className={cn(
|
||||
defaultTextProps,
|
||||
'flex h-10 max-h-10 w-full resize-none px-3 py-2',
|
||||
removeFocusOutlines,
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex w-full flex-col">
|
||||
<Label htmlFor="endpoint" className="mb-1 text-left text-sm font-medium">
|
||||
{localize('com_endpoint')}
|
||||
</Label>
|
||||
<SelectDropDown
|
||||
value={endpoint || ''}
|
||||
setValue={switchEndpoint}
|
||||
showLabel={false}
|
||||
emptyTitle={true}
|
||||
searchPlaceholder={localize('com_endpoint_search')}
|
||||
availableValues={availableEndpoints}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="col-span-2 flex items-start justify-between gap-4 sm:col-span-4">
|
||||
<div className="flex w-full flex-col">
|
||||
<Label
|
||||
htmlFor="endpoint"
|
||||
className="mb-1 hidden text-left text-sm font-medium sm:block"
|
||||
>
|
||||
{'ㅤ'}
|
||||
</Label>
|
||||
<PopoverButtons
|
||||
buttonClass="ml-0 w-full border border-border-medium p-2 h-[40px] justify-center mt-0"
|
||||
iconClass="hidden lg:block w-4 "
|
||||
endpoint={endpoint}
|
||||
endpointType={endpointType}
|
||||
model={model}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<OGDialog open={presetModalVisible} onOpenChange={handleOpenChange}>
|
||||
<OGDialogContent className="h-[100dvh] max-h-[100dvh] w-full max-w-full overflow-y-auto bg-white dark:border-gray-700 dark:bg-gray-850 dark:text-gray-300 md:h-auto md:max-h-[90vh] md:max-w-[75vw] md:rounded-lg lg:max-w-[950px]">
|
||||
<OGDialogTitle>
|
||||
{`${localize('com_ui_edit')} ${localize('com_endpoint_preset')} - ${preset?.title}`}
|
||||
</OGDialogTitle>
|
||||
|
||||
<div className="flex w-full flex-col gap-2 px-1 pb-4 md:gap-4">
|
||||
{/* Header section with preset name and endpoint */}
|
||||
<div className="grid w-full gap-2 md:grid-cols-2 md:gap-4">
|
||||
<div className="flex w-full flex-col">
|
||||
<Label htmlFor="preset-name" className="mb-1 text-left text-sm font-medium">
|
||||
{localize('com_endpoint_preset_name')}
|
||||
</Label>
|
||||
<Input
|
||||
id="preset-name"
|
||||
value={(title as string | undefined) ?? ''}
|
||||
onChange={onTitleChange}
|
||||
placeholder={localize('com_endpoint_set_custom_name')}
|
||||
className={cn(
|
||||
defaultTextProps,
|
||||
'flex h-10 max-h-10 w-full resize-none px-3 py-2',
|
||||
removeFocusOutlines,
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
<div className="my-4 w-full border-t border-border-medium" />
|
||||
<div className="w-full p-0">
|
||||
<EndpointSettings
|
||||
conversation={preset}
|
||||
setOption={setOption}
|
||||
isPreset={true}
|
||||
className="h-full text-text-primary md:mb-4 md:h-[440px]"
|
||||
<div className="flex w-full flex-col">
|
||||
<Label htmlFor="endpoint" className="mb-1 text-left text-sm font-medium">
|
||||
{localize('com_endpoint')}
|
||||
</Label>
|
||||
<SelectDropDown
|
||||
value={endpoint || ''}
|
||||
setValue={switchEndpoint}
|
||||
showLabel={false}
|
||||
emptyTitle={true}
|
||||
searchPlaceholder={localize('com_endpoint_search')}
|
||||
availableValues={availableEndpoints}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
buttons={
|
||||
<div className="mb-6 md:mb-2">
|
||||
<DialogButton
|
||||
|
||||
{/* PopoverButtons section */}
|
||||
<div className="flex w-full">
|
||||
<PopoverButtons
|
||||
buttonClass="ml-0 w-full border border-border-medium p-2 h-[40px] justify-center mt-0"
|
||||
iconClass="hidden lg:block w-4"
|
||||
endpoint={endpoint}
|
||||
endpointType={endpointType}
|
||||
model={model}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Separator */}
|
||||
<div className="w-full border-t border-border-medium" />
|
||||
|
||||
{/* Settings section */}
|
||||
<div className="w-full flex-1">
|
||||
<EndpointSettings
|
||||
conversation={preset}
|
||||
setOption={setOption}
|
||||
isPreset={true}
|
||||
className="text-text-primary"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Action buttons */}
|
||||
<div className="flex justify-end gap-2 border-t border-border-medium pt-2 md:pt-4">
|
||||
<button
|
||||
onClick={exportPreset}
|
||||
className="border-gray-100 hover:bg-gray-100 dark:border-gray-600 dark:hover:bg-gray-600"
|
||||
className="rounded-md border border-gray-300 bg-white px-3 py-2 text-sm font-medium text-gray-700 hover:bg-gray-50 focus:outline-none focus:ring-2 focus:ring-blue-500 focus:ring-offset-2 dark:border-gray-600 dark:bg-gray-800 dark:text-gray-300 dark:hover:bg-gray-700 md:px-4"
|
||||
>
|
||||
{localize('com_endpoint_export')}
|
||||
</DialogButton>
|
||||
<DialogClose
|
||||
</button>
|
||||
<button
|
||||
onClick={submitPreset}
|
||||
className="ml-2 bg-green-500 text-white hover:bg-green-600 dark:hover:bg-green-600"
|
||||
className="rounded-md bg-green-500 px-3 py-2 text-sm font-medium text-white hover:bg-green-600 focus:outline-none focus:ring-2 focus:ring-green-500 focus:ring-offset-2 md:px-4"
|
||||
>
|
||||
{localize('com_ui_save')}
|
||||
</DialogClose>
|
||||
</button>
|
||||
</div>
|
||||
}
|
||||
footerClassName="bg-white dark:bg-gray-700"
|
||||
/>
|
||||
</Dialog>
|
||||
</div>
|
||||
</OGDialogContent>
|
||||
</OGDialog>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
import { memo, useMemo, useState } from 'react';
|
||||
import { useRecoilValue, useRecoilState } from 'recoil';
|
||||
import { useRecoilState } from 'recoil';
|
||||
import { ContentTypes } from 'librechat-data-provider';
|
||||
import type { TMessageContentParts, TAttachment, Agents } from 'librechat-data-provider';
|
||||
import type {
|
||||
TMessageContentParts,
|
||||
SearchResultData,
|
||||
TAttachment,
|
||||
Agents,
|
||||
} from 'librechat-data-provider';
|
||||
import { ThinkingButton } from '~/components/Artifacts/Thinking';
|
||||
import { MessageContext, SearchContext } from '~/Providers';
|
||||
import Sources from '~/components/Web/Sources';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
import { mapAttachments } from '~/utils/map';
|
||||
import { MessageContext } from '~/Providers';
|
||||
import { EditTextPart } from './Parts';
|
||||
import store from '~/store';
|
||||
import Part from './Part';
|
||||
@@ -15,6 +21,7 @@ type ContentPartsProps = {
|
||||
messageId: string;
|
||||
conversationId?: string | null;
|
||||
attachments?: TAttachment[];
|
||||
searchResults?: { [key: string]: SearchResultData };
|
||||
isCreatedByUser: boolean;
|
||||
isLast: boolean;
|
||||
isSubmitting: boolean;
|
||||
@@ -33,6 +40,7 @@ const ContentParts = memo(
|
||||
messageId,
|
||||
conversationId,
|
||||
attachments,
|
||||
searchResults,
|
||||
isCreatedByUser,
|
||||
isLast,
|
||||
isSubmitting,
|
||||
@@ -44,11 +52,7 @@ const ContentParts = memo(
|
||||
const localize = useLocalize();
|
||||
const [showThinking, setShowThinking] = useRecoilState<boolean>(store.showThinking);
|
||||
const [isExpanded, setIsExpanded] = useState(showThinking);
|
||||
const messageAttachmentsMap = useRecoilValue(store.messageAttachmentsMap);
|
||||
const attachmentMap = useMemo(
|
||||
() => mapAttachments(attachments ?? messageAttachmentsMap[messageId] ?? []),
|
||||
[attachments, messageAttachmentsMap, messageId],
|
||||
);
|
||||
const attachmentMap = useMemo(() => mapAttachments(attachments ?? []), [attachments]);
|
||||
|
||||
const hasReasoningParts = useMemo(() => {
|
||||
const hasThinkPart = content?.some((part) => part?.type === ContentTypes.THINK) ?? false;
|
||||
@@ -98,53 +102,56 @@ const ContentParts = memo(
|
||||
|
||||
return (
|
||||
<>
|
||||
{hasReasoningParts && (
|
||||
<div className="mb-5">
|
||||
<ThinkingButton
|
||||
isExpanded={isExpanded}
|
||||
onClick={() =>
|
||||
setIsExpanded((prev) => {
|
||||
const val = !prev;
|
||||
setShowThinking(val);
|
||||
return val;
|
||||
})
|
||||
}
|
||||
label={
|
||||
isSubmitting && isLast ? localize('com_ui_thinking') : localize('com_ui_thoughts')
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{content
|
||||
.filter((part) => part)
|
||||
.map((part, idx) => {
|
||||
const toolCallId =
|
||||
(part?.[ContentTypes.TOOL_CALL] as Agents.ToolCall | undefined)?.id ?? '';
|
||||
const attachments = attachmentMap[toolCallId];
|
||||
<SearchContext.Provider value={{ searchResults }}>
|
||||
<Sources />
|
||||
{hasReasoningParts && (
|
||||
<div className="mb-5">
|
||||
<ThinkingButton
|
||||
isExpanded={isExpanded}
|
||||
onClick={() =>
|
||||
setIsExpanded((prev) => {
|
||||
const val = !prev;
|
||||
setShowThinking(val);
|
||||
return val;
|
||||
})
|
||||
}
|
||||
label={
|
||||
isSubmitting && isLast ? localize('com_ui_thinking') : localize('com_ui_thoughts')
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{content
|
||||
.filter((part) => part)
|
||||
.map((part, idx) => {
|
||||
const toolCallId =
|
||||
(part?.[ContentTypes.TOOL_CALL] as Agents.ToolCall | undefined)?.id ?? '';
|
||||
const attachments = attachmentMap[toolCallId];
|
||||
|
||||
return (
|
||||
<MessageContext.Provider
|
||||
key={`provider-${messageId}-${idx}`}
|
||||
value={{
|
||||
messageId,
|
||||
conversationId,
|
||||
partIndex: idx,
|
||||
isExpanded,
|
||||
nextType: content[idx + 1]?.type,
|
||||
}}
|
||||
>
|
||||
<Part
|
||||
part={part}
|
||||
attachments={attachments}
|
||||
isSubmitting={isSubmitting}
|
||||
key={`part-${messageId}-${idx}`}
|
||||
isCreatedByUser={isCreatedByUser}
|
||||
isLast={idx === content.length - 1}
|
||||
showCursor={idx === content.length - 1 && isLast}
|
||||
/>
|
||||
</MessageContext.Provider>
|
||||
);
|
||||
})}
|
||||
return (
|
||||
<MessageContext.Provider
|
||||
key={`provider-${messageId}-${idx}`}
|
||||
value={{
|
||||
messageId,
|
||||
isExpanded,
|
||||
conversationId,
|
||||
partIndex: idx,
|
||||
nextType: content[idx + 1]?.type,
|
||||
}}
|
||||
>
|
||||
<Part
|
||||
part={part}
|
||||
attachments={attachments}
|
||||
isSubmitting={isSubmitting}
|
||||
key={`part-${messageId}-${idx}`}
|
||||
isCreatedByUser={isCreatedByUser}
|
||||
isLast={idx === content.length - 1}
|
||||
showCursor={idx === content.length - 1 && isLast}
|
||||
/>
|
||||
</MessageContext.Provider>
|
||||
);
|
||||
})}
|
||||
</SearchContext.Provider>
|
||||
</>
|
||||
);
|
||||
},
|
||||
|
||||
@@ -15,10 +15,12 @@ import {
|
||||
CodeBlockProvider,
|
||||
useCodeBlockContext,
|
||||
} from '~/Providers';
|
||||
import { Citation, CompositeCitation, HighlightedText } from '~/components/Web/Citation';
|
||||
import { Artifact, artifactPlugin } from '~/components/Artifacts/Artifact';
|
||||
import { langSubset, preprocessLaTeX, handleDoubleClick } from '~/utils';
|
||||
import CodeBlock from '~/components/Messages/Content/CodeBlock';
|
||||
import useHasAccess from '~/hooks/Roles/useHasAccess';
|
||||
import { unicodeCitation } from '~/components/Web';
|
||||
import { useFileDownload } from '~/data-provider';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
import store from '~/store';
|
||||
@@ -197,16 +199,14 @@ const Markdown = memo(({ content = '', isLatestMessage }: TContentProps) => {
|
||||
[],
|
||||
);
|
||||
|
||||
const remarkPlugins: Pluggable[] = useMemo(
|
||||
() => [
|
||||
supersub,
|
||||
remarkGfm,
|
||||
remarkDirective,
|
||||
artifactPlugin,
|
||||
[remarkMath, { singleDollarTextMath: true }],
|
||||
],
|
||||
[],
|
||||
);
|
||||
const remarkPlugins: Pluggable[] = [
|
||||
supersub,
|
||||
remarkGfm,
|
||||
remarkDirective,
|
||||
artifactPlugin,
|
||||
[remarkMath, { singleDollarTextMath: true }],
|
||||
unicodeCitation,
|
||||
];
|
||||
|
||||
if (isInitializing) {
|
||||
return (
|
||||
@@ -232,6 +232,9 @@ const Markdown = memo(({ content = '', isLatestMessage }: TContentProps) => {
|
||||
a,
|
||||
p,
|
||||
artifact: Artifact,
|
||||
citation: Citation,
|
||||
'highlighted-text': HighlightedText,
|
||||
'composite-citation': CompositeCitation,
|
||||
} as {
|
||||
[nodeType: string]: React.ElementType;
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import { ErrorMessage } from './MessageContent';
|
||||
import RetrievalCall from './RetrievalCall';
|
||||
import CodeAnalyze from './CodeAnalyze';
|
||||
import Container from './Container';
|
||||
import WebSearch from './WebSearch';
|
||||
import ToolCall from './ToolCall';
|
||||
import ImageGen from './ImageGen';
|
||||
import Image from './Image';
|
||||
@@ -107,6 +108,16 @@ const Part = memo(
|
||||
attachments={attachments}
|
||||
/>
|
||||
);
|
||||
} else if (isToolCall && toolCall.name === Tools.web_search) {
|
||||
return (
|
||||
<WebSearch
|
||||
output={toolCall.output ?? ''}
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
isSubmitting={isSubmitting}
|
||||
attachments={attachments}
|
||||
isLast={isLast}
|
||||
/>
|
||||
);
|
||||
} else if (isToolCall) {
|
||||
return (
|
||||
<ToolCall
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { memo, useState, useEffect } from 'react';
|
||||
import { imageExtRegex } from 'librechat-data-provider';
|
||||
import { imageExtRegex, Tools } from 'librechat-data-provider';
|
||||
import type { TAttachment, TFile, TAttachmentMetadata } from 'librechat-data-provider';
|
||||
import FileContainer from '~/components/Chat/Input/Files/FileContainer';
|
||||
import Image from '~/components/Chat/Messages/Content/Image';
|
||||
@@ -7,12 +7,12 @@ import { useAttachmentLink } from './LogLink';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
const FileAttachment = memo(({ attachment }: { attachment: Partial<TAttachment> }) => {
|
||||
const [isVisible, setIsVisible] = useState(false);
|
||||
const { handleDownload } = useAttachmentLink({
|
||||
href: attachment.filepath ?? '',
|
||||
filename: attachment.filename ?? '',
|
||||
});
|
||||
const extension = attachment.filename?.split('.').pop();
|
||||
const [isVisible, setIsVisible] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
const timer = setTimeout(() => setIsVisible(true), 50);
|
||||
@@ -84,6 +84,9 @@ export default function Attachment({ attachment }: { attachment?: TAttachment })
|
||||
if (!attachment) {
|
||||
return null;
|
||||
}
|
||||
if (attachment.type === Tools.web_search) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const { width, height, filepath = null } = attachment as TFile & TAttachmentMetadata;
|
||||
const isImage =
|
||||
@@ -115,7 +118,7 @@ export function AttachmentGroup({ attachments }: { attachments?: TAttachment[] }
|
||||
|
||||
if (isImage) {
|
||||
imageAttachments.push(attachment);
|
||||
} else {
|
||||
} else if (attachment.type !== Tools.web_search) {
|
||||
fileAttachments.push(attachment);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,26 +1,40 @@
|
||||
import { Suspense, useMemo } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { ContentTypes } from 'librechat-data-provider';
|
||||
import type { Agents, TMessage, TMessageContentParts } from 'librechat-data-provider';
|
||||
import type {
|
||||
Agents,
|
||||
TMessage,
|
||||
TAttachment,
|
||||
SearchResultData,
|
||||
TMessageContentParts,
|
||||
} from 'librechat-data-provider';
|
||||
import { UnfinishedMessage } from './MessageContent';
|
||||
import { DelayedRender } from '~/components/ui';
|
||||
import MarkdownLite from './MarkdownLite';
|
||||
import Sources from '~/components/Web/Sources';
|
||||
import { cn, mapAttachments } from '~/utils';
|
||||
import { SearchContext } from '~/Providers';
|
||||
import MarkdownLite from './MarkdownLite';
|
||||
import store from '~/store';
|
||||
import Part from './Part';
|
||||
|
||||
const SearchContent = ({ message }: { message: TMessage }) => {
|
||||
const SearchContent = ({
|
||||
message,
|
||||
attachments,
|
||||
searchResults,
|
||||
}: {
|
||||
message: TMessage;
|
||||
attachments?: TAttachment[];
|
||||
searchResults?: { [key: string]: SearchResultData };
|
||||
}) => {
|
||||
const enableUserMsgMarkdown = useRecoilValue(store.enableUserMsgMarkdown);
|
||||
const { messageId } = message;
|
||||
const messageAttachmentsMap = useRecoilValue(store.messageAttachmentsMap);
|
||||
const attachmentMap = useMemo(
|
||||
() => mapAttachments(message?.attachments ?? messageAttachmentsMap[messageId] ?? []),
|
||||
[message?.attachments, messageAttachmentsMap, messageId],
|
||||
);
|
||||
|
||||
const attachmentMap = useMemo(() => mapAttachments(attachments ?? []), [attachments]);
|
||||
|
||||
if (Array.isArray(message.content) && message.content.length > 0) {
|
||||
return (
|
||||
<>
|
||||
<SearchContext.Provider value={{ searchResults }}>
|
||||
<Sources />
|
||||
{message.content
|
||||
.filter((part: TMessageContentParts | undefined) => part)
|
||||
.map((part: TMessageContentParts | undefined, idx: number) => {
|
||||
@@ -49,7 +63,7 @@ const SearchContent = ({ message }: { message: TMessage }) => {
|
||||
</DelayedRender>
|
||||
</Suspense>
|
||||
)}
|
||||
</>
|
||||
</SearchContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -157,7 +157,11 @@ export default function ToolCall({
|
||||
<ProgressText
|
||||
progress={progress}
|
||||
onClick={() => setShowInfo((prev) => !prev)}
|
||||
inProgressText={localize('com_assistants_running_action')}
|
||||
inProgressText={
|
||||
function_name
|
||||
? localize('com_assistants_running_var', { 0: function_name })
|
||||
: localize('com_assistants_running_action')
|
||||
}
|
||||
authText={
|
||||
!cancelled && authDomain.length > 0 ? localize('com_ui_requires_auth') : undefined
|
||||
}
|
||||
|
||||
91
client/src/components/Chat/Messages/Content/WebSearch.tsx
Normal file
91
client/src/components/Chat/Messages/Content/WebSearch.tsx
Normal file
@@ -0,0 +1,91 @@
|
||||
import { useMemo } from 'react';
|
||||
import type { TAttachment } from 'librechat-data-provider';
|
||||
import { StackedFavicons } from '~/components/Web/Sources';
|
||||
import { useSearchContext } from '~/Providers';
|
||||
import ProgressText from './ProgressText';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
type ProgressKeys =
|
||||
| 'com_ui_web_searching'
|
||||
| 'com_ui_web_searching_again'
|
||||
| 'com_ui_web_search_processing'
|
||||
| 'com_ui_web_search_reading';
|
||||
|
||||
export default function WebSearch({
|
||||
initialProgress: progress = 0.1,
|
||||
isSubmitting,
|
||||
isLast,
|
||||
output,
|
||||
}: {
|
||||
isLast?: boolean;
|
||||
isSubmitting: boolean;
|
||||
output?: string | null;
|
||||
initialProgress: number;
|
||||
attachments?: TAttachment[];
|
||||
}) {
|
||||
const localize = useLocalize();
|
||||
const { searchResults } = useSearchContext();
|
||||
const error = typeof output === 'string' && output.toLowerCase().includes('error processing');
|
||||
const cancelled = (!isSubmitting && progress < 1) || error === true;
|
||||
|
||||
const complete = !isLast && progress === 1;
|
||||
const finalizing = isSubmitting && isLast && progress === 1;
|
||||
const processedSources = useMemo(() => {
|
||||
if (complete && !finalizing) {
|
||||
return [];
|
||||
}
|
||||
if (!searchResults) return [];
|
||||
const values = Object.values(searchResults);
|
||||
const result = values[values.length - 1];
|
||||
if (!result) return [];
|
||||
if (finalizing) {
|
||||
return [...(result.organic || []), ...(result.topStories || [])];
|
||||
}
|
||||
return [...(result.organic || []), ...(result.topStories || [])].filter(
|
||||
(source) => source.processed === true,
|
||||
);
|
||||
}, [searchResults, complete, finalizing]);
|
||||
const turns = useMemo(() => {
|
||||
if (!searchResults) return 0;
|
||||
return Object.values(searchResults).length;
|
||||
}, [searchResults]);
|
||||
|
||||
const clampedProgress = useMemo(() => {
|
||||
return Math.min(progress, 0.99);
|
||||
}, [progress]);
|
||||
|
||||
const showSources = processedSources.length > 0;
|
||||
const progressText = useMemo(() => {
|
||||
let text: ProgressKeys = turns > 1 ? 'com_ui_web_searching_again' : 'com_ui_web_searching';
|
||||
if (showSources) {
|
||||
text = 'com_ui_web_search_processing';
|
||||
}
|
||||
if (finalizing) {
|
||||
text = 'com_ui_web_search_reading';
|
||||
}
|
||||
return localize(text);
|
||||
}, [turns, localize, showSources, finalizing]);
|
||||
|
||||
if (complete || cancelled) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<>
|
||||
<div className="relative my-2.5 flex size-5 shrink-0 items-center gap-2.5">
|
||||
{showSources && (
|
||||
<div className="mr-2">
|
||||
<StackedFavicons sources={processedSources} start={-5} />
|
||||
</div>
|
||||
)}
|
||||
<ProgressText
|
||||
finishedText=""
|
||||
hasInput={false}
|
||||
error={cancelled}
|
||||
isExpanded={false}
|
||||
progress={clampedProgress}
|
||||
inProgressText={progressText}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -2,8 +2,8 @@ import React, { useMemo } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import type { TMessageContentParts } from 'librechat-data-provider';
|
||||
import type { TMessageProps, TMessageIcon } from '~/common';
|
||||
import { useMessageHelpers, useLocalize, useAttachments } from '~/hooks';
|
||||
import MessageIcon from '~/components/Chat/Messages/MessageIcon';
|
||||
import { useMessageHelpers, useLocalize } from '~/hooks';
|
||||
import ContentParts from './Content/ContentParts';
|
||||
import SiblingSwitch from './SiblingSwitch';
|
||||
|
||||
@@ -17,7 +17,10 @@ export default function Message(props: TMessageProps) {
|
||||
const localize = useLocalize();
|
||||
const { message, siblingIdx, siblingCount, setSiblingIdx, currentEditId, setCurrentEditId } =
|
||||
props;
|
||||
|
||||
const { attachments, searchResults } = useAttachments({
|
||||
messageId: message?.messageId,
|
||||
attachments: message?.attachments,
|
||||
});
|
||||
const {
|
||||
edit,
|
||||
index,
|
||||
@@ -91,7 +94,7 @@ export default function Message(props: TMessageProps) {
|
||||
>
|
||||
<div className="m-auto justify-center p-4 py-2 md:gap-6">
|
||||
<div
|
||||
id={messageId}
|
||||
id={messageId ?? ''}
|
||||
aria-label={`message-${message.depth}-${messageId}`}
|
||||
className={cn(baseClasses.common, baseClasses.chat, 'message-render')}
|
||||
>
|
||||
@@ -116,10 +119,11 @@ export default function Message(props: TMessageProps) {
|
||||
isLast={isLast}
|
||||
enterEdit={enterEdit}
|
||||
siblingIdx={siblingIdx}
|
||||
messageId={message.messageId}
|
||||
attachments={attachments}
|
||||
isSubmitting={isSubmitting}
|
||||
searchResults={searchResults}
|
||||
messageId={message.messageId}
|
||||
setSiblingIdx={setSiblingIdx}
|
||||
attachments={message.attachments}
|
||||
isCreatedByUser={message.isCreatedByUser}
|
||||
conversationId={conversation?.conversationId}
|
||||
content={message.content as Array<TMessageContentParts | undefined>}
|
||||
|
||||
@@ -1,16 +1,21 @@
|
||||
import { useState } from 'react';
|
||||
import type { TMessage } from 'librechat-data-provider';
|
||||
import type { TMessage, TAttachment, SearchResultData } from 'librechat-data-provider';
|
||||
import { useLocalize, useCopyToClipboard } from '~/hooks';
|
||||
import { Clipboard, CheckMark } from '~/components/svg';
|
||||
|
||||
type THoverButtons = {
|
||||
message: TMessage;
|
||||
searchResults?: { [key: string]: SearchResultData };
|
||||
};
|
||||
|
||||
export default function MinimalHoverButtons({ message }: THoverButtons) {
|
||||
export default function MinimalHoverButtons({ message, searchResults }: THoverButtons) {
|
||||
const localize = useLocalize();
|
||||
const [isCopied, setIsCopied] = useState(false);
|
||||
const copyToClipboard = useCopyToClipboard({ text: message.text, content: message.content });
|
||||
const copyToClipboard = useCopyToClipboard({
|
||||
text: message.text,
|
||||
content: message.content,
|
||||
searchResults,
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="visible mt-0 flex justify-center gap-1 self-end text-gray-400 lg:justify-start">
|
||||
|
||||
@@ -94,10 +94,10 @@ const MessageRender = memo(
|
||||
() =>
|
||||
showCardRender && !isLatestMessage
|
||||
? () => {
|
||||
logger.log(`Message Card click: Setting ${msg?.messageId} as latest message`);
|
||||
logger.dir(msg);
|
||||
setLatestMessage(msg!);
|
||||
}
|
||||
logger.log(`Message Card click: Setting ${msg?.messageId} as latest message`);
|
||||
logger.dir(msg);
|
||||
setLatestMessage(msg!);
|
||||
}
|
||||
: undefined,
|
||||
[showCardRender, isLatestMessage, msg, setLatestMessage],
|
||||
);
|
||||
|
||||
@@ -7,8 +7,8 @@ import PlaceholderRow from '~/components/Chat/Messages/ui/PlaceholderRow';
|
||||
import SiblingSwitch from '~/components/Chat/Messages/SiblingSwitch';
|
||||
import HoverButtons from '~/components/Chat/Messages/HoverButtons';
|
||||
import MessageIcon from '~/components/Chat/Messages/MessageIcon';
|
||||
import { useAttachments, useMessageActions } from '~/hooks';
|
||||
import SubRow from '~/components/Chat/Messages/SubRow';
|
||||
import { useMessageActions } from '~/hooks';
|
||||
import { cn, logger } from '~/utils';
|
||||
import store from '~/store';
|
||||
|
||||
@@ -34,6 +34,10 @@ const ContentRender = memo(
|
||||
setCurrentEditId,
|
||||
isSubmittingFamily = false,
|
||||
}: ContentRenderProps) => {
|
||||
const { attachments, searchResults } = useAttachments({
|
||||
messageId: msg?.messageId,
|
||||
attachments: msg?.attachments,
|
||||
});
|
||||
const {
|
||||
edit,
|
||||
index,
|
||||
@@ -50,6 +54,7 @@ const ContentRender = memo(
|
||||
regenerateMessage,
|
||||
} = useMessageActions({
|
||||
message: msg,
|
||||
searchResults,
|
||||
currentEditId,
|
||||
isMultiMessage,
|
||||
setCurrentEditId,
|
||||
@@ -91,10 +96,10 @@ const ContentRender = memo(
|
||||
() =>
|
||||
showCardRender && !isLatestMessage
|
||||
? () => {
|
||||
logger.log(`Message Card click: Setting ${msg?.messageId} as latest message`);
|
||||
logger.dir(msg);
|
||||
setLatestMessage(msg!);
|
||||
}
|
||||
logger.log(`Message Card click: Setting ${msg?.messageId} as latest message`);
|
||||
logger.dir(msg);
|
||||
setLatestMessage(msg!);
|
||||
}
|
||||
: undefined,
|
||||
[showCardRender, isLatestMessage, msg, setLatestMessage],
|
||||
);
|
||||
@@ -164,9 +169,10 @@ const ContentRender = memo(
|
||||
enterEdit={enterEdit}
|
||||
siblingIdx={siblingIdx}
|
||||
messageId={msg.messageId}
|
||||
attachments={attachments}
|
||||
isSubmitting={isSubmitting}
|
||||
searchResults={searchResults}
|
||||
setSiblingIdx={setSiblingIdx}
|
||||
attachments={msg.attachments}
|
||||
isCreatedByUser={msg.isCreatedByUser}
|
||||
conversationId={conversation?.conversationId}
|
||||
content={msg.content as Array<TMessageContentParts | undefined>}
|
||||
|
||||
@@ -7,7 +7,8 @@ import SiblingSwitch from '~/components/Chat/Messages/SiblingSwitch';
|
||||
import { Plugin } from '~/components/Messages/Content';
|
||||
import SubRow from '~/components/Chat/Messages/SubRow';
|
||||
import { MessageContext } from '~/Providers';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { useAttachments } from '~/hooks';
|
||||
|
||||
import MultiMessage from './MultiMessage';
|
||||
import { cn } from '~/utils';
|
||||
import store from '~/store';
|
||||
@@ -25,6 +26,11 @@ export default function Message(props: TMessageProps) {
|
||||
setCurrentEditId,
|
||||
} = props;
|
||||
|
||||
const { attachments, searchResults } = useAttachments({
|
||||
messageId: message?.messageId,
|
||||
attachments: message?.attachments,
|
||||
});
|
||||
|
||||
if (!message) {
|
||||
return null;
|
||||
}
|
||||
@@ -48,8 +54,8 @@ export default function Message(props: TMessageProps) {
|
||||
return (
|
||||
<>
|
||||
<div className="text-token-text-primary w-full border-0 bg-transparent dark:border-0 dark:bg-transparent">
|
||||
<div className="m-auto justify-center p-4 py-2 md:gap-6 ">
|
||||
<div className="final-completion group mx-auto flex flex-1 gap-3 md:max-w-3xl md:px-5 lg:max-w-[40rem] lg:px-1 xl:max-w-[48rem] xl:px-5">
|
||||
<div className="m-auto justify-center p-4 py-2 md:gap-6">
|
||||
<div className="final-completion group mx-auto flex flex-1 gap-3 md:max-w-[47rem] md:px-5 lg:px-1 xl:max-w-[55rem] xl:px-5">
|
||||
<div className="relative flex flex-shrink-0 flex-col items-end">
|
||||
<div>
|
||||
<div className="pt-0.5">
|
||||
@@ -68,13 +74,18 @@ export default function Message(props: TMessageProps) {
|
||||
<MessageContext.Provider
|
||||
value={{
|
||||
messageId,
|
||||
isExpanded: false,
|
||||
conversationId: conversation?.conversationId,
|
||||
}}
|
||||
>
|
||||
{/* Legacy Plugins */}
|
||||
{message.plugin && <Plugin plugin={message.plugin} />}
|
||||
{message.content ? (
|
||||
<SearchContent message={message} />
|
||||
<SearchContent
|
||||
message={message}
|
||||
attachments={attachments}
|
||||
searchResults={searchResults}
|
||||
/>
|
||||
) : (
|
||||
<MessageContent
|
||||
edit={false}
|
||||
@@ -100,7 +111,7 @@ export default function Message(props: TMessageProps) {
|
||||
siblingCount={siblingCount}
|
||||
setSiblingIdx={setSiblingIdx}
|
||||
/>
|
||||
<MinimalHoverButtons message={message} />
|
||||
<MinimalHoverButtons message={message} searchResults={searchResults} />
|
||||
</SubRow>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -15,7 +15,7 @@ function SharedView() {
|
||||
const { shareId } = useParams();
|
||||
const { data, isLoading } = useGetSharedMessages(shareId ?? '');
|
||||
const dataTree = data && buildTree({ messages: data.messages });
|
||||
const messagesTree = dataTree?.length === 0 ? null : dataTree ?? null;
|
||||
const messagesTree = dataTree?.length === 0 ? null : (dataTree ?? null);
|
||||
|
||||
// configure document title
|
||||
let docTitle = '';
|
||||
@@ -37,7 +37,7 @@ function SharedView() {
|
||||
} else if (data && messagesTree && messagesTree.length !== 0) {
|
||||
content = (
|
||||
<>
|
||||
<div className="final-completion group mx-auto flex min-w-[40rem] flex-col gap-3 pb-6 pt-4 md:max-w-3xl md:px-5 lg:max-w-[40rem] lg:px-1 xl:max-w-[48rem] xl:px-5">
|
||||
<div className="final-completion group mx-auto flex min-w-[40rem] flex-col gap-3 pb-6 pt-4 md:max-w-[47rem] md:px-5 lg:px-1 xl:max-w-[55rem] xl:px-5">
|
||||
<h1 className="text-4xl font-bold">{data.title}</h1>
|
||||
<div className="border-b border-border-medium pb-6 text-base text-text-secondary">
|
||||
{new Date(data.createdAt).toLocaleDateString('en-US', {
|
||||
@@ -53,7 +53,7 @@ function SharedView() {
|
||||
);
|
||||
} else {
|
||||
content = (
|
||||
<div className="flex h-screen items-center justify-center ">
|
||||
<div className="flex h-screen items-center justify-center">
|
||||
{localize('com_ui_shared_link_not_found')}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -13,6 +13,7 @@ import { processAgentOption } from '~/utils';
|
||||
import Instructions from './Instructions';
|
||||
import AgentAvatar from './AgentAvatar';
|
||||
import FileContext from './FileContext';
|
||||
import SearchForm from './Search/Form';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import FileSearch from './FileSearch';
|
||||
import Artifacts from './Artifacts';
|
||||
@@ -73,6 +74,10 @@ export default function AgentConfig({
|
||||
() => agentsConfig?.capabilities?.includes(AgentCapabilities.file_search) ?? false,
|
||||
[agentsConfig],
|
||||
);
|
||||
const webSearchEnabled = useMemo(
|
||||
() => agentsConfig?.capabilities?.includes(AgentCapabilities.web_search) ?? false,
|
||||
[agentsConfig],
|
||||
);
|
||||
const codeEnabled = useMemo(
|
||||
() => agentsConfig?.capabilities?.includes(AgentCapabilities.execute_code) ?? false,
|
||||
[agentsConfig],
|
||||
@@ -257,13 +262,19 @@ export default function AgentConfig({
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
{(codeEnabled || fileSearchEnabled || artifactsEnabled || ocrEnabled) && (
|
||||
{(codeEnabled ||
|
||||
fileSearchEnabled ||
|
||||
artifactsEnabled ||
|
||||
ocrEnabled ||
|
||||
webSearchEnabled) && (
|
||||
<div className="mb-4 flex w-full flex-col items-start gap-3">
|
||||
<label className="text-token-text-primary block font-medium">
|
||||
{localize('com_assistants_capabilities')}
|
||||
</label>
|
||||
{/* Code Execution */}
|
||||
{codeEnabled && <CodeForm agent_id={agent_id} files={code_files} />}
|
||||
{/* Web Search */}
|
||||
{webSearchEnabled && <SearchForm />}
|
||||
{/* File Context (OCR) */}
|
||||
{ocrEnabled && <FileContext agent_id={agent_id} files={context_files} />}
|
||||
{/* Artifacts */}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import React from 'react';
|
||||
import { useWatch, useFormContext } from 'react-hook-form';
|
||||
import { SystemRoles, Permissions, PermissionTypes } from 'librechat-data-provider';
|
||||
import type { AgentForm, AgentPanelProps } from '~/common';
|
||||
@@ -11,6 +10,7 @@ import DeleteButton from './DeleteButton';
|
||||
import { Spinner } from '~/components';
|
||||
import ShareAgent from './ShareAgent';
|
||||
import { Panel } from '~/common';
|
||||
import VersionButton from './Version/VersionButton';
|
||||
|
||||
export default function AgentFooter({
|
||||
activePanel,
|
||||
@@ -55,6 +55,7 @@ export default function AgentFooter({
|
||||
return (
|
||||
<div className="mb-1 flex w-full flex-col gap-2">
|
||||
{showButtons && <AdvancedButton setActivePanel={setActivePanel} />}
|
||||
{showButtons && agent_id && <VersionButton setActivePanel={setActivePanel} />}
|
||||
{user?.role === SystemRoles.ADMIN && showButtons && <AdminSettings />}
|
||||
{/* Context Button */}
|
||||
<div className="flex items-center justify-end gap-2">
|
||||
|
||||
@@ -87,7 +87,42 @@ export default function AgentPanel({
|
||||
});
|
||||
},
|
||||
onError: (err) => {
|
||||
const error = err as Error;
|
||||
const error = err as Error & {
|
||||
statusCode?: number;
|
||||
details?: { duplicateVersion?: any; versionIndex?: number };
|
||||
response?: { status?: number; data?: any };
|
||||
};
|
||||
|
||||
const isDuplicateVersionError =
|
||||
(error.statusCode === 409 && error.details?.duplicateVersion) ||
|
||||
(error.response?.status === 409 && error.response?.data?.details?.duplicateVersion);
|
||||
|
||||
if (isDuplicateVersionError) {
|
||||
let versionIndex: number | undefined = undefined;
|
||||
|
||||
if (error.details?.versionIndex !== undefined) {
|
||||
versionIndex = error.details.versionIndex;
|
||||
} else if (error.response?.data?.details?.versionIndex !== undefined) {
|
||||
versionIndex = error.response.data.details.versionIndex;
|
||||
}
|
||||
|
||||
if (versionIndex === undefined || versionIndex < 0) {
|
||||
showToast({
|
||||
message: localize('com_agents_update_error'),
|
||||
status: 'error',
|
||||
duration: 5000,
|
||||
});
|
||||
} else {
|
||||
showToast({
|
||||
message: localize('com_ui_agent_version_duplicate', { versionIndex: versionIndex + 1 }),
|
||||
status: 'error',
|
||||
duration: 10000,
|
||||
});
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
showToast({
|
||||
message: `${localize('com_agents_update_error')}${
|
||||
error.message ? ` ${localize('com_ui_error')}: ${error.message}` : ''
|
||||
@@ -127,6 +162,9 @@ export default function AgentPanel({
|
||||
if (data.file_search === true) {
|
||||
tools.push(Tools.file_search);
|
||||
}
|
||||
if (data.web_search === true) {
|
||||
tools.push(Tools.web_search);
|
||||
}
|
||||
|
||||
const {
|
||||
name,
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { useState, useEffect, useMemo } from 'react';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { EModelEndpoint, AgentCapabilities } from 'librechat-data-provider';
|
||||
import type { ActionsEndpoint } from '~/common';
|
||||
import type { Action, TConfig, TEndpointsConfig } from 'librechat-data-provider';
|
||||
import { useGetActionsQuery, useGetEndpointsQuery } from '~/data-provider';
|
||||
import type { Action, TConfig, TEndpointsConfig, TAgentsEndpoint } from 'librechat-data-provider';
|
||||
import { useGetActionsQuery, useGetEndpointsQuery, useCreateAgentMutation } from '~/data-provider';
|
||||
import { useChatContext } from '~/Providers';
|
||||
import ActionsPanel from './ActionsPanel';
|
||||
import AgentPanel from './AgentPanel';
|
||||
import VersionPanel from './Version/VersionPanel';
|
||||
import { Panel } from '~/common';
|
||||
|
||||
export default function AgentPanelSwitch() {
|
||||
@@ -15,11 +16,19 @@ export default function AgentPanelSwitch() {
|
||||
const [currentAgentId, setCurrentAgentId] = useState<string | undefined>(conversation?.agent_id);
|
||||
const { data: actions = [] } = useGetActionsQuery(conversation?.endpoint as ActionsEndpoint);
|
||||
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
|
||||
const createMutation = useCreateAgentMutation();
|
||||
|
||||
const agentsConfig = useMemo(
|
||||
() => endpointsConfig?.[EModelEndpoint.agents] ?? ({} as TConfig | null),
|
||||
[endpointsConfig],
|
||||
);
|
||||
const agentsConfig = useMemo<TAgentsEndpoint | null>(() => {
|
||||
const config = endpointsConfig?.[EModelEndpoint.agents] ?? null;
|
||||
if (!config) return null;
|
||||
|
||||
return {
|
||||
...(config as TConfig),
|
||||
capabilities: Array.isArray(config.capabilities)
|
||||
? config.capabilities.map((cap) => cap as unknown as AgentCapabilities)
|
||||
: ([] as AgentCapabilities[]),
|
||||
} as TAgentsEndpoint;
|
||||
}, [endpointsConfig]);
|
||||
|
||||
useEffect(() => {
|
||||
const agent_id = conversation?.agent_id ?? '';
|
||||
@@ -41,12 +50,23 @@ export default function AgentPanelSwitch() {
|
||||
setActivePanel,
|
||||
setCurrentAgentId,
|
||||
agent_id: currentAgentId,
|
||||
createMutation,
|
||||
};
|
||||
|
||||
if (activePanel === Panel.actions) {
|
||||
return <ActionsPanel {...commonProps} />;
|
||||
}
|
||||
|
||||
if (activePanel === Panel.version) {
|
||||
return (
|
||||
<VersionPanel
|
||||
setActivePanel={setActivePanel}
|
||||
agentsConfig={agentsConfig}
|
||||
selectedAgentId={currentAgentId}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<AgentPanel {...commonProps} agentsConfig={agentsConfig} endpointsConfig={endpointsConfig} />
|
||||
);
|
||||
|
||||
@@ -52,6 +52,7 @@ export default function AgentSelect({
|
||||
};
|
||||
|
||||
const capabilities: TAgentCapabilities = {
|
||||
[AgentCapabilities.web_search]: false,
|
||||
[AgentCapabilities.file_search]: false,
|
||||
[AgentCapabilities.execute_code]: false,
|
||||
[AgentCapabilities.end_after_tools]: false,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user