Compare commits
1 Commits
refactor/p
...
feat/searc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ed22aaa59 |
89
.env.example
89
.env.example
@@ -20,8 +20,8 @@ DOMAIN_CLIENT=http://localhost:3080
|
||||
DOMAIN_SERVER=http://localhost:3080
|
||||
|
||||
NO_INDEX=true
|
||||
# Use the address that is at most n number of hops away from the Express application.
|
||||
# req.socket.remoteAddress is the first hop, and the rest are looked for in the X-Forwarded-For header from right to left.
|
||||
# Use the address that is at most n number of hops away from the Express application.
|
||||
# req.socket.remoteAddress is the first hop, and the rest are looked for in the X-Forwarded-For header from right to left.
|
||||
# A value of 0 means that the first untrusted address would be req.socket.remoteAddress, i.e. there is no reverse proxy.
|
||||
# Defaulted to 1.
|
||||
TRUST_PROXY=1
|
||||
@@ -88,7 +88,7 @@ PROXY=
|
||||
#============#
|
||||
|
||||
ANTHROPIC_API_KEY=user_provided
|
||||
# ANTHROPIC_MODELS=claude-opus-4-20250514,claude-sonnet-4-20250514,claude-3-7-sonnet-20250219,claude-3-5-sonnet-20241022,claude-3-5-haiku-20241022,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307
|
||||
# ANTHROPIC_MODELS=claude-3-7-sonnet-latest,claude-3-7-sonnet-20250219,claude-3-5-haiku-20241022,claude-3-5-sonnet-20241022,claude-3-5-sonnet-latest,claude-3-5-sonnet-20240620,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
||||
# ANTHROPIC_REVERSE_PROXY=
|
||||
|
||||
#============#
|
||||
@@ -142,12 +142,12 @@ GOOGLE_KEY=user_provided
|
||||
# GOOGLE_AUTH_HEADER=true
|
||||
|
||||
# Gemini API (AI Studio)
|
||||
# GOOGLE_MODELS=gemini-2.5-pro-preview-05-06,gemini-2.5-flash-preview-04-17,gemini-2.0-flash-001,gemini-2.0-flash-exp,gemini-2.0-flash-lite-001,gemini-1.5-pro-002,gemini-1.5-flash-002
|
||||
# GOOGLE_MODELS=gemini-2.5-pro-exp-03-25,gemini-2.0-flash-exp,gemini-2.0-flash-thinking-exp-1219,gemini-exp-1121,gemini-exp-1114,gemini-1.5-flash-latest,gemini-1.0-pro,gemini-1.0-pro-001,gemini-1.0-pro-latest,gemini-1.0-pro-vision-latest,gemini-1.5-pro-latest,gemini-pro,gemini-pro-vision
|
||||
|
||||
# Vertex AI
|
||||
# GOOGLE_MODELS=gemini-2.5-pro-preview-05-06,gemini-2.5-flash-preview-04-17,gemini-2.0-flash-001,gemini-2.0-flash-exp,gemini-2.0-flash-lite-001,gemini-1.5-pro-002,gemini-1.5-flash-002
|
||||
# GOOGLE_MODELS=gemini-1.5-flash-preview-0514,gemini-1.5-pro-preview-0514,gemini-1.0-pro-vision-001,gemini-1.0-pro-002,gemini-1.0-pro-001,gemini-pro-vision,gemini-1.0-pro
|
||||
|
||||
# GOOGLE_TITLE_MODEL=gemini-2.0-flash-lite-001
|
||||
# GOOGLE_TITLE_MODEL=gemini-pro
|
||||
|
||||
# GOOGLE_LOC=us-central1
|
||||
|
||||
@@ -443,47 +443,6 @@ OPENID_IMAGE_URL=
|
||||
# Set to true to automatically redirect to the OpenID provider when a user visits the login page
|
||||
# This will bypass the login form completely for users, only use this if OpenID is your only authentication method
|
||||
OPENID_AUTO_REDIRECT=false
|
||||
# Set to true to use PKCE (Proof Key for Code Exchange) for OpenID authentication
|
||||
OPENID_USE_PKCE=false
|
||||
#Set to true to reuse openid tokens for authentication management instead of using the mongodb session and the custom refresh token.
|
||||
OPENID_REUSE_TOKENS=
|
||||
#By default, signing key verification results are cached in order to prevent excessive HTTP requests to the JWKS endpoint.
|
||||
#If a signing key matching the kid is found, this will be cached and the next time this kid is requested the signing key will be served from the cache.
|
||||
#Default is true.
|
||||
OPENID_JWKS_URL_CACHE_ENABLED=
|
||||
OPENID_JWKS_URL_CACHE_TIME= # 600000 ms eq to 10 minutes leave empty to disable caching
|
||||
#Set to true to trigger token exchange flow to acquire access token for the userinfo endpoint.
|
||||
OPENID_ON_BEHALF_FLOW_FOR_USERINFRO_REQUIRED=
|
||||
OPENID_ON_BEHALF_FLOW_USERINFRO_SCOPE = "user.read" # example for Scope Needed for Microsoft Graph API
|
||||
# Set to true to use the OpenID Connect end session endpoint for logout
|
||||
OPENID_USE_END_SESSION_ENDPOINT=
|
||||
|
||||
|
||||
# SAML
|
||||
# Note: If OpenID is enabled, SAML authentication will be automatically disabled.
|
||||
SAML_ENTRY_POINT=
|
||||
SAML_ISSUER=
|
||||
SAML_CERT=
|
||||
SAML_CALLBACK_URL=/oauth/saml/callback
|
||||
SAML_SESSION_SECRET=
|
||||
|
||||
# Attribute mappings (optional)
|
||||
SAML_EMAIL_CLAIM=
|
||||
SAML_USERNAME_CLAIM=
|
||||
SAML_GIVEN_NAME_CLAIM=
|
||||
SAML_FAMILY_NAME_CLAIM=
|
||||
SAML_PICTURE_CLAIM=
|
||||
SAML_NAME_CLAIM=
|
||||
|
||||
# Logint buttion settings (optional)
|
||||
SAML_BUTTON_LABEL=
|
||||
SAML_IMAGE_URL=
|
||||
|
||||
# Whether the SAML Response should be signed.
|
||||
# - If "true", the entire `SAML Response` will be signed.
|
||||
# - If "false" or unset, only the `SAML Assertion` will be signed (default behavior).
|
||||
# SAML_USE_AUTHN_RESPONSE_SIGNED=
|
||||
|
||||
|
||||
# LDAP
|
||||
LDAP_URL=
|
||||
@@ -604,9 +563,9 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
# users always get the latest version. Customize #
|
||||
# only if you understand caching implications. #
|
||||
|
||||
# INDEX_CACHE_CONTROL=no-cache, no-store, must-revalidate
|
||||
# INDEX_PRAGMA=no-cache
|
||||
# INDEX_EXPIRES=0
|
||||
# INDEX_HTML_CACHE_CONTROL=no-cache, no-store, must-revalidate
|
||||
# INDEX_HTML_PRAGMA=no-cache
|
||||
# INDEX_HTML_EXPIRES=0
|
||||
|
||||
# no-cache: Forces validation with server before using cached version
|
||||
# no-store: Prevents storing the response entirely
|
||||
@@ -616,33 +575,3 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
# OpenWeather #
|
||||
#=====================================================#
|
||||
OPENWEATHER_API_KEY=
|
||||
|
||||
#====================================#
|
||||
# LibreChat Code Interpreter API #
|
||||
#====================================#
|
||||
|
||||
# https://code.librechat.ai
|
||||
# LIBRECHAT_CODE_API_KEY=your-key
|
||||
|
||||
#======================#
|
||||
# Web Search #
|
||||
#======================#
|
||||
|
||||
# Note: All of the following variable names can be customized.
|
||||
# Omit values to allow user to provide them.
|
||||
|
||||
# For more information on configuration values, see:
|
||||
# https://librechat.ai/docs/features/web_search
|
||||
|
||||
# Search Provider (Required)
|
||||
# SERPER_API_KEY=your_serper_api_key
|
||||
|
||||
# Scraper (Required)
|
||||
# FIRECRAWL_API_KEY=your_firecrawl_api_key
|
||||
# Optional: Custom Firecrawl API URL
|
||||
# FIRECRAWL_API_URL=your_firecrawl_api_url
|
||||
|
||||
# Reranker (Required)
|
||||
# JINA_API_KEY=your_jina_api_key
|
||||
# or
|
||||
# COHERE_API_KEY=your_cohere_api_key
|
||||
@@ -4,7 +4,6 @@ on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
generate-release-changelog-pr:
|
||||
@@ -89,7 +88,7 @@ jobs:
|
||||
base: main
|
||||
branch: "changelog/${{ github.ref_name }}"
|
||||
reviewers: danny-avila
|
||||
title: "📜 docs: Changelog for release ${{ github.ref_name }}"
|
||||
title: "chore: update CHANGELOG for release ${{ github.ref_name }}"
|
||||
body: |
|
||||
**Description**:
|
||||
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${{ github.ref_name }} above previous releases.
|
||||
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${{ github.ref_name }} above previous releases.
|
||||
@@ -3,7 +3,6 @@ name: Generate Unreleased Changelog PR
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 1" # Runs every Monday at 00:00 UTC
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
generate-unreleased-changelog-pr:
|
||||
@@ -99,9 +98,9 @@ jobs:
|
||||
branch: "changelog/unreleased-update"
|
||||
sign-commits: true
|
||||
commit-message: "action: update Unreleased changelog"
|
||||
title: "📜 docs: Unreleased Changelog"
|
||||
title: "action: update Unreleased changelog"
|
||||
body: |
|
||||
**Description**:
|
||||
- This PR updates the Unreleased section in CHANGELOG.md.
|
||||
- It compares the current main branch with the latest version tag (determined as ${{ steps.get_latest_tag.outputs.tag }}),
|
||||
regenerates the Unreleased changelog, removes any old Unreleased block, and inserts the new content.
|
||||
regenerates the Unreleased changelog, removes any old Unreleased block, and inserts the new content.
|
||||
7
.github/workflows/helmcharts.yml
vendored
7
.github/workflows/helmcharts.yml
vendored
@@ -26,15 +26,8 @@ jobs:
|
||||
uses: azure/setup-helm@v4
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
- name: Build Subchart Deps
|
||||
run: |
|
||||
cd helm/librechat-rag-api
|
||||
helm dependency build
|
||||
|
||||
- name: Run chart-releaser
|
||||
uses: helm/chart-releaser-action@v1.6.0
|
||||
with:
|
||||
charts_dir: helm
|
||||
skip_existing: true
|
||||
env:
|
||||
CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
37
.github/workflows/i18n-unused-keys.yml
vendored
37
.github/workflows/i18n-unused-keys.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
# Define paths
|
||||
I18N_FILE="client/src/locales/en/translation.json"
|
||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src")
|
||||
SOURCE_DIRS=("client/src" "api")
|
||||
|
||||
# Check if translation file exists
|
||||
if [[ ! -f "$I18N_FILE" ]]; then
|
||||
@@ -39,35 +39,12 @@ jobs:
|
||||
# Check if each key is used in the source code
|
||||
for KEY in $KEYS; do
|
||||
FOUND=false
|
||||
|
||||
# Special case for dynamically constructed special variable keys
|
||||
if [[ "$KEY" == com_ui_special_var_* ]]; then
|
||||
# Check if TSpecialVarLabel is used in the codebase
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -q "TSpecialVarLabel" "$DIR"; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# Also check if the key is directly used somewhere
|
||||
if [[ "$FOUND" == false ]]; then
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -q "$KEY" "$DIR"; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -q "$KEY" "$DIR"; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
else
|
||||
# Regular check for other keys
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -q "$KEY" "$DIR"; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$FOUND" == false ]]; then
|
||||
UNUSED_KEYS+=("$KEY")
|
||||
@@ -113,4 +90,4 @@ jobs:
|
||||
|
||||
- name: Fail workflow if unused keys found
|
||||
if: env.unused_keys != '[]'
|
||||
run: exit 1
|
||||
run: exit 1
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -52,9 +52,8 @@ bower_components/
|
||||
*.d.ts
|
||||
!vite-env.d.ts
|
||||
|
||||
# AI
|
||||
# Cline
|
||||
.clineignore
|
||||
.cursor
|
||||
|
||||
# Floobits
|
||||
.floo
|
||||
@@ -114,13 +113,4 @@ uploads/
|
||||
|
||||
# owner
|
||||
release/
|
||||
|
||||
# Helm
|
||||
helm/librechat/Chart.lock
|
||||
helm/**/charts/
|
||||
helm/**/.values.yaml
|
||||
|
||||
!/client/src/@types/i18next.d.ts
|
||||
|
||||
# SAML Idp cert
|
||||
*.cert
|
||||
|
||||
226
CHANGELOG.md
226
CHANGELOG.md
@@ -2,235 +2,15 @@
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### ✨ New Features
|
||||
|
||||
- ✨ feat: implement search parameter updates by **@mawburn** in [#7151](https://github.com/danny-avila/LibreChat/pull/7151)
|
||||
- 🎏 feat: Add MCP support for Streamable HTTP Transport by **@benverhees** in [#7353](https://github.com/danny-avila/LibreChat/pull/7353)
|
||||
- 🔒 feat: Add Content Security Policy using Helmet middleware by **@rubentalstra** in [#7377](https://github.com/danny-avila/LibreChat/pull/7377)
|
||||
- ✨ feat: Add Normalization for MCP Server Names by **@danny-avila** in [#7421](https://github.com/danny-avila/LibreChat/pull/7421)
|
||||
- 📊 feat: Improve Helm Chart by **@hofq** in [#3638](https://github.com/danny-avila/LibreChat/pull/3638)
|
||||
- 🦾 feat: Claude-4 Support by **@danny-avila** in [#7509](https://github.com/danny-avila/LibreChat/pull/7509)
|
||||
- 🪨 feat: Bedrock Support for Claude-4 Reasoning by **@danny-avila** in [#7517](https://github.com/danny-avila/LibreChat/pull/7517)
|
||||
|
||||
### 🌍 Internationalization
|
||||
|
||||
- 🌍 i18n: Add `Danish` and `Czech` and `Catalan` localization support by **@rubentalstra** in [#7373](https://github.com/danny-avila/LibreChat/pull/7373)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7375](https://github.com/danny-avila/LibreChat/pull/7375)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7468](https://github.com/danny-avila/LibreChat/pull/7468)
|
||||
|
||||
### 🔧 Fixes
|
||||
|
||||
- 💬 fix: update aria-label for accessibility in ConvoLink component by **@berry-13** in [#7320](https://github.com/danny-avila/LibreChat/pull/7320)
|
||||
- 🔑 fix: use `apiKey` instead of `openAIApiKey` in OpenAI-like Config by **@danny-avila** in [#7337](https://github.com/danny-avila/LibreChat/pull/7337)
|
||||
- 🔄 fix: update navigation logic in `useFocusChatEffect` to ensure correct search parameters are used by **@mawburn** in [#7340](https://github.com/danny-avila/LibreChat/pull/7340)
|
||||
- 🔄 fix: Improve MCP Connection Cleanup by **@danny-avila** in [#7400](https://github.com/danny-avila/LibreChat/pull/7400)
|
||||
- 🛡️ fix: Preset and Validation Logic for URL Query Params by **@danny-avila** in [#7407](https://github.com/danny-avila/LibreChat/pull/7407)
|
||||
- 🌘 fix: artifact of preview text is illegible in dark mode by **@nhtruong** in [#7405](https://github.com/danny-avila/LibreChat/pull/7405)
|
||||
- 🛡️ fix: Temporarily Remove CSP until Configurable by **@danny-avila** in [#7419](https://github.com/danny-avila/LibreChat/pull/7419)
|
||||
- 💽 fix: Exclude index page `/` from static cache settings by **@sbruel** in [#7382](https://github.com/danny-avila/LibreChat/pull/7382)
|
||||
- 🪄 feat: Agent Artifacts by **@danny-avila** in [#5804](https://github.com/danny-avila/LibreChat/pull/5804)
|
||||
|
||||
### ⚙️ Other Changes
|
||||
|
||||
- 📜 docs: CHANGELOG for release v0.7.8 by **@github-actions[bot]** in [#7290](https://github.com/danny-avila/LibreChat/pull/7290)
|
||||
- 📦 chore: Update API Package Dependencies by **@danny-avila** in [#7359](https://github.com/danny-avila/LibreChat/pull/7359)
|
||||
- 📜 docs: Unreleased Changelog by **@github-actions[bot]** in [#7321](https://github.com/danny-avila/LibreChat/pull/7321)
|
||||
- 📜 docs: Unreleased Changelog by **@github-actions[bot]** in [#7434](https://github.com/danny-avila/LibreChat/pull/7434)
|
||||
- 🛡️ chore: `multer` v2.0.0 for CVE-2025-47935 and CVE-2025-47944 by **@danny-avila** in [#7454](https://github.com/danny-avila/LibreChat/pull/7454)
|
||||
- 📂 refactor: Improve `FileAttachment` & File Form Deletion by **@danny-avila** in [#7471](https://github.com/danny-avila/LibreChat/pull/7471)
|
||||
- 📊 chore: Remove Old Helm Chart by **@hofq** in [#7512](https://github.com/danny-avila/LibreChat/pull/7512)
|
||||
- 🪖 chore: bump helm app version to v0.7.8 by **@austin-barrington** in [#7524](https://github.com/danny-avila/LibreChat/pull/7524)
|
||||
|
||||
|
||||
|
||||
---
|
||||
## [v0.7.8] -
|
||||
|
||||
Changes from v0.7.8-rc1 to v0.7.8.
|
||||
|
||||
### ✨ New Features
|
||||
|
||||
- ✨ feat: Enhance form submission for touch screens by **@berry-13** in [#7198](https://github.com/danny-avila/LibreChat/pull/7198)
|
||||
- 🔍 feat: Additional Tavily API Tool Parameters by **@glowforge-opensource** in [#7232](https://github.com/danny-avila/LibreChat/pull/7232)
|
||||
- 🐋 feat: Add python to Dockerfile for increased MCP compatibility by **@technicalpickles** in [#7270](https://github.com/danny-avila/LibreChat/pull/7270)
|
||||
|
||||
### 🔧 Fixes
|
||||
|
||||
- 🔧 fix: Google Gemma Support & OpenAI Reasoning Instructions by **@danny-avila** in [#7196](https://github.com/danny-avila/LibreChat/pull/7196)
|
||||
- 🛠️ fix: Conversation Navigation State by **@danny-avila** in [#7210](https://github.com/danny-avila/LibreChat/pull/7210)
|
||||
- 🔄 fix: o-Series Model Regex for System Messages by **@danny-avila** in [#7245](https://github.com/danny-avila/LibreChat/pull/7245)
|
||||
- 🔖 fix: Custom Headers for Initial MCP SSE Connection by **@danny-avila** in [#7246](https://github.com/danny-avila/LibreChat/pull/7246)
|
||||
- 🛡️ fix: Deep Clone `MCPOptions` for User MCP Connections by **@danny-avila** in [#7247](https://github.com/danny-avila/LibreChat/pull/7247)
|
||||
- 🔄 fix: URL Param Race Condition and File Draft Persistence by **@danny-avila** in [#7257](https://github.com/danny-avila/LibreChat/pull/7257)
|
||||
- 🔄 fix: Assistants Endpoint & Minor Issues by **@danny-avila** in [#7274](https://github.com/danny-avila/LibreChat/pull/7274)
|
||||
- 🔄 fix: Ollama Think Tag Edge Case with Tools by **@danny-avila** in [#7275](https://github.com/danny-avila/LibreChat/pull/7275)
|
||||
|
||||
### ⚙️ Other Changes
|
||||
|
||||
- 📜 docs: CHANGELOG for release v0.7.8-rc1 by **@github-actions[bot]** in [#7153](https://github.com/danny-avila/LibreChat/pull/7153)
|
||||
- 🔄 refactor: Artifact Visibility Management by **@danny-avila** in [#7181](https://github.com/danny-avila/LibreChat/pull/7181)
|
||||
- 📦 chore: Bump Package Security by **@danny-avila** in [#7183](https://github.com/danny-avila/LibreChat/pull/7183)
|
||||
- 🌿 refactor: Unmount Fork Popover on Hide for Better Performance by **@danny-avila** in [#7189](https://github.com/danny-avila/LibreChat/pull/7189)
|
||||
- 🧰 chore: ESLint configuration to enforce Prettier formatting rules by **@mawburn** in [#7186](https://github.com/danny-avila/LibreChat/pull/7186)
|
||||
- 🎨 style: Improve KaTeX Rendering for LaTeX Equations by **@andresgit** in [#7223](https://github.com/danny-avila/LibreChat/pull/7223)
|
||||
- 📝 docs: Update `.env.example` Google models by **@marlonka** in [#7254](https://github.com/danny-avila/LibreChat/pull/7254)
|
||||
- 💬 refactor: MCP Chat Visibility Option, Google Rates, Remove OpenAPI Plugins by **@danny-avila** in [#7286](https://github.com/danny-avila/LibreChat/pull/7286)
|
||||
- 📜 docs: Unreleased Changelog by **@github-actions[bot]** in [#7214](https://github.com/danny-avila/LibreChat/pull/7214)
|
||||
|
||||
|
||||
|
||||
[See full release details][release-v0.7.8]
|
||||
|
||||
[release-v0.7.8]: https://github.com/danny-avila/LibreChat/releases/tag/v0.7.8
|
||||
|
||||
---
|
||||
## [v0.7.8-rc1] -
|
||||
|
||||
Changes from v0.7.7 to v0.7.8-rc1.
|
||||
|
||||
### ✨ New Features
|
||||
|
||||
- 🔍 feat: Mistral OCR API / Upload Files as Text by **@danny-avila** in [#6274](https://github.com/danny-avila/LibreChat/pull/6274)
|
||||
- 🤖 feat: Support OpenAI Web Search models by **@danny-avila** in [#6313](https://github.com/danny-avila/LibreChat/pull/6313)
|
||||
- 🔗 feat: Agent Chain (Mixture-of-Agents) by **@danny-avila** in [#6374](https://github.com/danny-avila/LibreChat/pull/6374)
|
||||
- ⌛ feat: `initTimeout` for Slow Starting MCP Servers by **@perweij** in [#6383](https://github.com/danny-avila/LibreChat/pull/6383)
|
||||
- 🚀 feat: `S3` Integration for File handling and Image uploads by **@rubentalstra** in [#6142](https://github.com/danny-avila/LibreChat/pull/6142)
|
||||
- 🔒feat: Enable OpenID Auto-Redirect by **@leondape** in [#6066](https://github.com/danny-avila/LibreChat/pull/6066)
|
||||
- 🚀 feat: Integrate `Azure Blob Storage` for file handling and image uploads by **@rubentalstra** in [#6153](https://github.com/danny-avila/LibreChat/pull/6153)
|
||||
- 🚀 feat: Add support for custom `AWS` endpoint in `S3` by **@rubentalstra** in [#6431](https://github.com/danny-avila/LibreChat/pull/6431)
|
||||
- 🚀 feat: Add support for LDAP STARTTLS in LDAP authentication by **@rubentalstra** in [#6438](https://github.com/danny-avila/LibreChat/pull/6438)
|
||||
- 🚀 feat: Refactor schema exports and update package version to 0.0.4 by **@rubentalstra** in [#6455](https://github.com/danny-avila/LibreChat/pull/6455)
|
||||
- 🔼 feat: Add Auto Submit For URL Query Params by **@mjaverto** in [#6440](https://github.com/danny-avila/LibreChat/pull/6440)
|
||||
- 🛠 feat: Enhance Redis Integration, Rate Limiters & Log Headers by **@danny-avila** in [#6462](https://github.com/danny-avila/LibreChat/pull/6462)
|
||||
- 💵 feat: Add Automatic Balance Refill by **@rubentalstra** in [#6452](https://github.com/danny-avila/LibreChat/pull/6452)
|
||||
- 🗣️ feat: add support for gpt-4o-transcribe models by **@berry-13** in [#6483](https://github.com/danny-avila/LibreChat/pull/6483)
|
||||
- 🎨 feat: UI Refresh for Enhanced UX by **@berry-13** in [#6346](https://github.com/danny-avila/LibreChat/pull/6346)
|
||||
- 🌍 feat: Add support for Hungarian language localization by **@rubentalstra** in [#6508](https://github.com/danny-avila/LibreChat/pull/6508)
|
||||
- 🚀 feat: Add Gemini 2.5 Token/Context Values, Increase Max Possible Output to 64k by **@danny-avila** in [#6563](https://github.com/danny-avila/LibreChat/pull/6563)
|
||||
- 🚀 feat: Enhance MCP Connections For Multi-User Support by **@danny-avila** in [#6610](https://github.com/danny-avila/LibreChat/pull/6610)
|
||||
- 🚀 feat: Enhance S3 URL Expiry with Refresh; fix: S3 File Deletion by **@danny-avila** in [#6647](https://github.com/danny-avila/LibreChat/pull/6647)
|
||||
- 🚀 feat: enhance UI components and refactor settings by **@berry-13** in [#6625](https://github.com/danny-avila/LibreChat/pull/6625)
|
||||
- 💬 feat: move TemporaryChat to the Header by **@berry-13** in [#6646](https://github.com/danny-avila/LibreChat/pull/6646)
|
||||
- 🚀 feat: Use Model Specs + Specific Endpoints, Limit Providers for Agents by **@danny-avila** in [#6650](https://github.com/danny-avila/LibreChat/pull/6650)
|
||||
- 🪙 feat: Sync Balance Config on Login by **@danny-avila** in [#6671](https://github.com/danny-avila/LibreChat/pull/6671)
|
||||
- 🔦 feat: MCP Support for Non-Agent Endpoints by **@danny-avila** in [#6775](https://github.com/danny-avila/LibreChat/pull/6775)
|
||||
- 🗃️ feat: Code Interpreter File Persistence between Sessions by **@danny-avila** in [#6790](https://github.com/danny-avila/LibreChat/pull/6790)
|
||||
- 🖥️ feat: Code Interpreter API for Non-Agent Endpoints by **@danny-avila** in [#6803](https://github.com/danny-avila/LibreChat/pull/6803)
|
||||
- ⚡ feat: Self-hosted Artifacts Static Bundler URL by **@danny-avila** in [#6827](https://github.com/danny-avila/LibreChat/pull/6827)
|
||||
- 🐳 feat: Add Jemalloc and UV to Docker Builds by **@danny-avila** in [#6836](https://github.com/danny-avila/LibreChat/pull/6836)
|
||||
- 🤖 feat: GPT-4.1 by **@danny-avila** in [#6880](https://github.com/danny-avila/LibreChat/pull/6880)
|
||||
- 👋 feat: remove Edge TTS by **@berry-13** in [#6885](https://github.com/danny-avila/LibreChat/pull/6885)
|
||||
- feat: nav optimization by **@berry-13** in [#5785](https://github.com/danny-avila/LibreChat/pull/5785)
|
||||
- 🗺️ feat: Add Parameter Location Mapping for OpenAPI actions by **@peeeteeer** in [#6858](https://github.com/danny-avila/LibreChat/pull/6858)
|
||||
- 🤖 feat: Support `o4-mini` and `o3` Models by **@danny-avila** in [#6928](https://github.com/danny-avila/LibreChat/pull/6928)
|
||||
- 🎨 feat: OpenAI Image Tools (GPT-Image-1) by **@danny-avila** in [#7079](https://github.com/danny-avila/LibreChat/pull/7079)
|
||||
- 🗓️ feat: Add Special Variables for Prompts & Agents, Prompt UI Improvements by **@danny-avila** in [#7123](https://github.com/danny-avila/LibreChat/pull/7123)
|
||||
|
||||
### 🌍 Internationalization
|
||||
|
||||
- 🌍 i18n: Add Thai Language Support and Update Translations by **@rubentalstra** in [#6219](https://github.com/danny-avila/LibreChat/pull/6219)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6220](https://github.com/danny-avila/LibreChat/pull/6220)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6240](https://github.com/danny-avila/LibreChat/pull/6240)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6241](https://github.com/danny-avila/LibreChat/pull/6241)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6277](https://github.com/danny-avila/LibreChat/pull/6277)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6414](https://github.com/danny-avila/LibreChat/pull/6414)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6505](https://github.com/danny-avila/LibreChat/pull/6505)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6530](https://github.com/danny-avila/LibreChat/pull/6530)
|
||||
- 🌍 i18n: Add Persian Localization Support by **@rubentalstra** in [#6669](https://github.com/danny-avila/LibreChat/pull/6669)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6667](https://github.com/danny-avila/LibreChat/pull/6667)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7126](https://github.com/danny-avila/LibreChat/pull/7126)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7148](https://github.com/danny-avila/LibreChat/pull/7148)
|
||||
|
||||
### 👐 Accessibility
|
||||
|
||||
- 🎨 a11y: Update Model Spec Description Text by **@berry-13** in [#6294](https://github.com/danny-avila/LibreChat/pull/6294)
|
||||
- 🗑️ a11y: Add Accessible Name to Button for File Attachment Removal by **@kangabell** in [#6709](https://github.com/danny-avila/LibreChat/pull/6709)
|
||||
- ⌨️ a11y: enhance accessibility & visual consistency by **@berry-13** in [#6866](https://github.com/danny-avila/LibreChat/pull/6866)
|
||||
- 🙌 a11y: Searchbar/Conversations List Focus by **@danny-avila** in [#7096](https://github.com/danny-avila/LibreChat/pull/7096)
|
||||
- 👐 a11y: Improve Fork and SplitText Accessibility by **@danny-avila** in [#7147](https://github.com/danny-avila/LibreChat/pull/7147)
|
||||
|
||||
### 🔧 Fixes
|
||||
|
||||
- 🐛 fix: Avatar Type Definitions in Agent/Assistant Schemas by **@danny-avila** in [#6235](https://github.com/danny-avila/LibreChat/pull/6235)
|
||||
- 🔧 fix: MeiliSearch Field Error and Patch Incorrect Import by #6210 by **@rubentalstra** in [#6245](https://github.com/danny-avila/LibreChat/pull/6245)
|
||||
- 🔏 fix: Enhance Two-Factor Authentication by **@rubentalstra** in [#6247](https://github.com/danny-avila/LibreChat/pull/6247)
|
||||
- 🐛 fix: Await saveMessage in abortMiddleware to ensure proper execution by **@sh4shii** in [#6248](https://github.com/danny-avila/LibreChat/pull/6248)
|
||||
- 🔧 fix: Axios Proxy Usage And Bump `mongoose` by **@danny-avila** in [#6298](https://github.com/danny-avila/LibreChat/pull/6298)
|
||||
- 🔧 fix: comment out MCP servers to resolve service run issues by **@KunalScriptz** in [#6316](https://github.com/danny-avila/LibreChat/pull/6316)
|
||||
- 🔧 fix: Update Token Calculations and Mapping, MCP `env` Initialization by **@danny-avila** in [#6406](https://github.com/danny-avila/LibreChat/pull/6406)
|
||||
- 🐞 fix: Agent "Resend" Message Attachments + Source Icon Styling by **@danny-avila** in [#6408](https://github.com/danny-avila/LibreChat/pull/6408)
|
||||
- 🐛 fix: Prevent Crash on Duplicate Message ID by **@Odrec** in [#6392](https://github.com/danny-avila/LibreChat/pull/6392)
|
||||
- 🔐 fix: Invalid Key Length in 2FA Encryption by **@rubentalstra** in [#6432](https://github.com/danny-avila/LibreChat/pull/6432)
|
||||
- 🏗️ fix: Fix Agents Token Spend Race Conditions, Expand Test Coverage by **@danny-avila** in [#6480](https://github.com/danny-avila/LibreChat/pull/6480)
|
||||
- 🔃 fix: Draft Clearing, Claude Titles, Remove Default Vision Max Tokens by **@danny-avila** in [#6501](https://github.com/danny-avila/LibreChat/pull/6501)
|
||||
- 🔧 fix: Update username reference to use user.name in greeting display by **@rubentalstra** in [#6534](https://github.com/danny-avila/LibreChat/pull/6534)
|
||||
- 🔧 fix: S3 Download Stream with Key Extraction and Blob Storage Encoding for Vision by **@danny-avila** in [#6557](https://github.com/danny-avila/LibreChat/pull/6557)
|
||||
- 🔧 fix: Mistral type strictness for `usage` & update token values/windows by **@danny-avila** in [#6562](https://github.com/danny-avila/LibreChat/pull/6562)
|
||||
- 🔧 fix: Consolidate Text Parsing and TTS Edge Initialization by **@danny-avila** in [#6582](https://github.com/danny-avila/LibreChat/pull/6582)
|
||||
- 🔧 fix: Ensure continuation in image processing on base64 encoding from Blob Storage by **@danny-avila** in [#6619](https://github.com/danny-avila/LibreChat/pull/6619)
|
||||
- ✉️ fix: Fallback For User Name In Email Templates by **@danny-avila** in [#6620](https://github.com/danny-avila/LibreChat/pull/6620)
|
||||
- 🔧 fix: Azure Blob Integration and File Source References by **@rubentalstra** in [#6575](https://github.com/danny-avila/LibreChat/pull/6575)
|
||||
- 🐛 fix: Safeguard against undefined addedEndpoints by **@wipash** in [#6654](https://github.com/danny-avila/LibreChat/pull/6654)
|
||||
- 🤖 fix: Gemini 2.5 Vision Support by **@danny-avila** in [#6663](https://github.com/danny-avila/LibreChat/pull/6663)
|
||||
- 🔄 fix: Avatar & Error Handling Enhancements by **@danny-avila** in [#6687](https://github.com/danny-avila/LibreChat/pull/6687)
|
||||
- 🔧 fix: Chat Middleware, Zod Conversion, Auto-Save and S3 URL Refresh by **@danny-avila** in [#6720](https://github.com/danny-avila/LibreChat/pull/6720)
|
||||
- 🔧 fix: Agent Capability Checks & DocumentDB Compatibility for Agent Resource Removal by **@danny-avila** in [#6726](https://github.com/danny-avila/LibreChat/pull/6726)
|
||||
- 🔄 fix: Improve audio MIME type detection and handling by **@berry-13** in [#6707](https://github.com/danny-avila/LibreChat/pull/6707)
|
||||
- 🪺 fix: Update Role Handling due to New Schema Shape by **@danny-avila** in [#6774](https://github.com/danny-avila/LibreChat/pull/6774)
|
||||
- 🗨️ fix: Show ModelSpec Greeting by **@berry-13** in [#6770](https://github.com/danny-avila/LibreChat/pull/6770)
|
||||
- 🔧 fix: Keyv and Proxy Issues, and More Memory Optimizations by **@danny-avila** in [#6867](https://github.com/danny-avila/LibreChat/pull/6867)
|
||||
- ✨ fix: Implement dynamic text sizing for greeting and name display by **@berry-13** in [#6833](https://github.com/danny-avila/LibreChat/pull/6833)
|
||||
- 📝 fix: Mistral OCR Image Support and Azure Agent Titles by **@danny-avila** in [#6901](https://github.com/danny-avila/LibreChat/pull/6901)
|
||||
- 📢 fix: Invalid `engineTTS` and Conversation State on Navigation by **@berry-13** in [#6904](https://github.com/danny-avila/LibreChat/pull/6904)
|
||||
- 🛠️ fix: Improve Accessibility and Display of Conversation Menu by **@danny-avila** in [#6913](https://github.com/danny-avila/LibreChat/pull/6913)
|
||||
- 🔧 fix: Agent Resource Form, Convo Menu Style, Ensure Draft Clears on Submission by **@danny-avila** in [#6925](https://github.com/danny-avila/LibreChat/pull/6925)
|
||||
- 🔀 fix: MCP Improvements, Auto-Save Drafts, Artifact Markup by **@danny-avila** in [#7040](https://github.com/danny-avila/LibreChat/pull/7040)
|
||||
- 🐋 fix: Improve Deepseek Compatbility by **@danny-avila** in [#7132](https://github.com/danny-avila/LibreChat/pull/7132)
|
||||
- 🐙 fix: Add Redis Ping Interval to Prevent Connection Drops by **@peeeteeer** in [#7127](https://github.com/danny-avila/LibreChat/pull/7127)
|
||||
|
||||
### ⚙️ Other Changes
|
||||
|
||||
- 📦 refactor: Move DB Models to `@librechat/data-schemas` by **@rubentalstra** in [#6210](https://github.com/danny-avila/LibreChat/pull/6210)
|
||||
- 📦 chore: Patch `axios` to address CVE-2025-27152 by **@danny-avila** in [#6222](https://github.com/danny-avila/LibreChat/pull/6222)
|
||||
- ⚠️ refactor: Use Error Content Part Instead Of Throwing Error for Agents by **@danny-avila** in [#6262](https://github.com/danny-avila/LibreChat/pull/6262)
|
||||
- 🏃♂️ refactor: Improve Agent Run Context & Misc. Changes by **@danny-avila** in [#6448](https://github.com/danny-avila/LibreChat/pull/6448)
|
||||
- 📝 docs: librechat.example.yaml by **@ineiti** in [#6442](https://github.com/danny-avila/LibreChat/pull/6442)
|
||||
- 🏃♂️ refactor: More Agent Context Improvements during Run by **@danny-avila** in [#6477](https://github.com/danny-avila/LibreChat/pull/6477)
|
||||
- 🔃 refactor: Allow streaming for `o1` models by **@danny-avila** in [#6509](https://github.com/danny-avila/LibreChat/pull/6509)
|
||||
- 🔧 chore: `Vite` Plugin Upgrades & Config Optimizations by **@rubentalstra** in [#6547](https://github.com/danny-avila/LibreChat/pull/6547)
|
||||
- 🔧 refactor: Consolidate Logging, Model Selection & Actions Optimizations, Minor Fixes by **@danny-avila** in [#6553](https://github.com/danny-avila/LibreChat/pull/6553)
|
||||
- 🎨 style: Address Minor UI Refresh Issues by **@berry-13** in [#6552](https://github.com/danny-avila/LibreChat/pull/6552)
|
||||
- 🔧 refactor: Enhance Model & Endpoint Configurations with Global Indicators 🌍 by **@berry-13** in [#6578](https://github.com/danny-avila/LibreChat/pull/6578)
|
||||
- 💬 style: Chat UI, Greeting, and Message adjustments by **@berry-13** in [#6612](https://github.com/danny-avila/LibreChat/pull/6612)
|
||||
- ⚡ refactor: DocumentDB Compatibility for Balance Updates by **@danny-avila** in [#6673](https://github.com/danny-avila/LibreChat/pull/6673)
|
||||
- 🧹 chore: Update ESLint rules for React hooks by **@rubentalstra** in [#6685](https://github.com/danny-avila/LibreChat/pull/6685)
|
||||
- 🪙 chore: Update Gemini Pricing by **@RedwindA** in [#6731](https://github.com/danny-avila/LibreChat/pull/6731)
|
||||
- 🪺 refactor: Nest Permission fields for Roles by **@rubentalstra** in [#6487](https://github.com/danny-avila/LibreChat/pull/6487)
|
||||
- 📦 chore: Update `caniuse-lite` dependency to version 1.0.30001706 by **@rubentalstra** in [#6482](https://github.com/danny-avila/LibreChat/pull/6482)
|
||||
- ⚙️ refactor: OAuth Flow Signal, Type Safety, Tool Progress & Updated Packages by **@danny-avila** in [#6752](https://github.com/danny-avila/LibreChat/pull/6752)
|
||||
- 📦 chore: bump vite from 6.2.3 to 6.2.5 by **@dependabot[bot]** in [#6745](https://github.com/danny-avila/LibreChat/pull/6745)
|
||||
- 💾 chore: Enhance Local Storage Handling and Update MCP SDK by **@danny-avila** in [#6809](https://github.com/danny-avila/LibreChat/pull/6809)
|
||||
- 🤖 refactor: Improve Agents Memory Usage, Bump Keyv, Grok 3 by **@danny-avila** in [#6850](https://github.com/danny-avila/LibreChat/pull/6850)
|
||||
- 💾 refactor: Enhance Memory In Image Encodings & Client Disposal by **@danny-avila** in [#6852](https://github.com/danny-avila/LibreChat/pull/6852)
|
||||
- 🔁 refactor: Token Event Handler and Standardize `maxTokens` Key by **@danny-avila** in [#6886](https://github.com/danny-avila/LibreChat/pull/6886)
|
||||
- 🔍 refactor: Search & Message Retrieval by **@berry-13** in [#6903](https://github.com/danny-avila/LibreChat/pull/6903)
|
||||
- 🎨 style: standardize dropdown styling & fix z-Index layering by **@berry-13** in [#6939](https://github.com/danny-avila/LibreChat/pull/6939)
|
||||
- 📙 docs: CONTRIBUTING.md by **@dblock** in [#6831](https://github.com/danny-avila/LibreChat/pull/6831)
|
||||
- 🧭 refactor: Modernize Nav/Header by **@danny-avila** in [#7094](https://github.com/danny-avila/LibreChat/pull/7094)
|
||||
- 🪶 refactor: Chat Input Focus for Conversation Navigations & ChatForm Optimizations by **@danny-avila** in [#7100](https://github.com/danny-avila/LibreChat/pull/7100)
|
||||
- 🔃 refactor: Streamline Navigation, Message Loading UX by **@danny-avila** in [#7118](https://github.com/danny-avila/LibreChat/pull/7118)
|
||||
- 📜 docs: Unreleased changelog by **@github-actions[bot]** in [#6265](https://github.com/danny-avila/LibreChat/pull/6265)
|
||||
|
||||
|
||||
|
||||
[See full release details][release-v0.7.8-rc1]
|
||||
|
||||
[release-v0.7.8-rc1]: https://github.com/danny-avila/LibreChat/releases/tag/v0.7.8-rc1
|
||||
- 🔄 chore: Enforce 18next Language Keys by **@rubentalstra** in [#5803](https://github.com/danny-avila/LibreChat/pull/5803)
|
||||
- 🔃 refactor: Parent Message ID Handling on Error, Update Translations, Bump Agents by **@danny-avila** in [#5833](https://github.com/danny-avila/LibreChat/pull/5833)
|
||||
|
||||
---
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
# v0.7.8
|
||||
# v0.7.7
|
||||
|
||||
# Base node image
|
||||
FROM node:20-alpine AS node
|
||||
|
||||
# Install jemalloc
|
||||
RUN apk add --no-cache jemalloc
|
||||
RUN apk add --no-cache python3 py3-pip uv
|
||||
|
||||
# Set environment variable to use jemalloc
|
||||
ENV LD_PRELOAD=/usr/lib/libjemalloc.so.2
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Dockerfile.multi
|
||||
# v0.7.8
|
||||
# v0.7.7
|
||||
|
||||
# Base for all builds
|
||||
FROM node:20-alpine AS base-min
|
||||
|
||||
@@ -71,11 +71,6 @@
|
||||
- [Model Context Protocol (MCP) Support](https://modelcontextprotocol.io/clients#librechat) for Tools
|
||||
- Use LibreChat Agents and OpenAI Assistants with Files, Code Interpreter, Tools, and API Actions
|
||||
|
||||
- 🔍 **Web Search**:
|
||||
- Search the internet and retrieve relevant information to enhance your AI context
|
||||
- Combines search providers, content scrapers, and result rerankers for optimal results
|
||||
- **[Learn More →](https://www.librechat.ai/docs/features/web_search)**
|
||||
|
||||
- 🪄 **Generative UI with Code Artifacts**:
|
||||
- [Code Artifacts](https://youtu.be/GfTj7O4gmd0?si=WJbdnemZpJzBrJo3) allow creation of React, HTML, and Mermaid diagrams directly in chat
|
||||
|
||||
|
||||
@@ -70,10 +70,13 @@ class AnthropicClient extends BaseClient {
|
||||
this.message_delta;
|
||||
/** Whether the model is part of the Claude 3 Family
|
||||
* @type {boolean} */
|
||||
this.isClaudeLatest;
|
||||
this.isClaude3;
|
||||
/** Whether to use Messages API or Completions API
|
||||
* @type {boolean} */
|
||||
this.useMessages;
|
||||
/** Whether or not the model is limited to the legacy amount of output tokens
|
||||
* @type {boolean} */
|
||||
this.isLegacyOutput;
|
||||
/** Whether or not the model supports Prompt Caching
|
||||
* @type {boolean} */
|
||||
this.supportsCacheControl;
|
||||
@@ -113,25 +116,21 @@ class AnthropicClient extends BaseClient {
|
||||
);
|
||||
|
||||
const modelMatch = matchModelName(this.modelOptions.model, EModelEndpoint.anthropic);
|
||||
this.isClaudeLatest =
|
||||
/claude-[3-9]/.test(modelMatch) || /claude-(?:sonnet|opus|haiku)-[4-9]/.test(modelMatch);
|
||||
const isLegacyOutput = !(
|
||||
/claude-3[-.]5-sonnet/.test(modelMatch) ||
|
||||
/claude-3[-.]7/.test(modelMatch) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(modelMatch) ||
|
||||
/claude-[4-9]/.test(modelMatch)
|
||||
this.isClaude3 = modelMatch.includes('claude-3');
|
||||
this.isLegacyOutput = !(
|
||||
/claude-3[-.]5-sonnet/.test(modelMatch) || /claude-3[-.]7/.test(modelMatch)
|
||||
);
|
||||
this.supportsCacheControl = this.options.promptCache && checkPromptCacheSupport(modelMatch);
|
||||
|
||||
if (
|
||||
isLegacyOutput &&
|
||||
this.isLegacyOutput &&
|
||||
this.modelOptions.maxOutputTokens &&
|
||||
this.modelOptions.maxOutputTokens > legacy.maxOutputTokens.default
|
||||
) {
|
||||
this.modelOptions.maxOutputTokens = legacy.maxOutputTokens.default;
|
||||
}
|
||||
|
||||
this.useMessages = this.isClaudeLatest || !!this.options.attachments;
|
||||
this.useMessages = this.isClaude3 || !!this.options.attachments;
|
||||
|
||||
this.defaultVisionModel = this.options.visionModel ?? 'claude-3-sonnet-20240229';
|
||||
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
|
||||
@@ -397,13 +396,13 @@ class AnthropicClient extends BaseClient {
|
||||
const formattedMessages = orderedMessages.map((message, i) => {
|
||||
const formattedMessage = this.useMessages
|
||||
? formatMessage({
|
||||
message,
|
||||
endpoint: EModelEndpoint.anthropic,
|
||||
})
|
||||
message,
|
||||
endpoint: EModelEndpoint.anthropic,
|
||||
})
|
||||
: {
|
||||
author: message.isCreatedByUser ? this.userLabel : this.assistantLabel,
|
||||
content: message?.content ?? message.text,
|
||||
};
|
||||
author: message.isCreatedByUser ? this.userLabel : this.assistantLabel,
|
||||
content: message?.content ?? message.text,
|
||||
};
|
||||
|
||||
const needsTokenCount = this.contextStrategy && !orderedMessages[i].tokenCount;
|
||||
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
|
||||
@@ -655,10 +654,7 @@ class AnthropicClient extends BaseClient {
|
||||
);
|
||||
};
|
||||
|
||||
if (
|
||||
/claude-[3-9]/.test(this.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(this.modelOptions.model)
|
||||
) {
|
||||
if (this.modelOptions.model.includes('claude-3')) {
|
||||
await buildMessagesPayload();
|
||||
processTokens();
|
||||
return {
|
||||
@@ -684,7 +680,7 @@ class AnthropicClient extends BaseClient {
|
||||
}
|
||||
|
||||
getCompletion() {
|
||||
logger.debug("AnthropicClient doesn't use getCompletion (all handled in sendCompletion)");
|
||||
logger.debug('AnthropicClient doesn\'t use getCompletion (all handled in sendCompletion)');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -892,7 +888,7 @@ class AnthropicClient extends BaseClient {
|
||||
}
|
||||
|
||||
getBuildMessagesOptions() {
|
||||
logger.debug("AnthropicClient doesn't use getBuildMessagesOptions");
|
||||
logger.debug('AnthropicClient doesn\'t use getBuildMessagesOptions');
|
||||
}
|
||||
|
||||
getEncoding() {
|
||||
|
||||
@@ -63,15 +63,15 @@ class BaseClient {
|
||||
}
|
||||
|
||||
setOptions() {
|
||||
throw new Error("Method 'setOptions' must be implemented.");
|
||||
throw new Error('Method \'setOptions\' must be implemented.');
|
||||
}
|
||||
|
||||
async getCompletion() {
|
||||
throw new Error("Method 'getCompletion' must be implemented.");
|
||||
throw new Error('Method \'getCompletion\' must be implemented.');
|
||||
}
|
||||
|
||||
async sendCompletion() {
|
||||
throw new Error("Method 'sendCompletion' must be implemented.");
|
||||
throw new Error('Method \'sendCompletion\' must be implemented.');
|
||||
}
|
||||
|
||||
getSaveOptions() {
|
||||
@@ -237,11 +237,11 @@ class BaseClient {
|
||||
const userMessage = opts.isEdited
|
||||
? this.currentMessages[this.currentMessages.length - 2]
|
||||
: this.createUserMessage({
|
||||
messageId: userMessageId,
|
||||
parentMessageId,
|
||||
conversationId,
|
||||
text: message,
|
||||
});
|
||||
messageId: userMessageId,
|
||||
parentMessageId,
|
||||
conversationId,
|
||||
text: message,
|
||||
});
|
||||
|
||||
if (typeof opts?.getReqData === 'function') {
|
||||
opts.getReqData({
|
||||
|
||||
@@ -140,7 +140,8 @@ class GoogleClient extends BaseClient {
|
||||
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
|
||||
|
||||
/** @type {boolean} Whether using a "GenerativeAI" Model */
|
||||
this.isGenerativeModel = /gemini|learnlm|gemma/.test(this.modelOptions.model);
|
||||
this.isGenerativeModel =
|
||||
this.modelOptions.model.includes('gemini') || this.modelOptions.model.includes('learnlm');
|
||||
|
||||
this.maxContextTokens =
|
||||
this.options.maxContextTokens ??
|
||||
|
||||
@@ -475,9 +475,7 @@ class OpenAIClient extends BaseClient {
|
||||
promptPrefix = this.augmentedPrompt + promptPrefix;
|
||||
}
|
||||
|
||||
const noSystemModelRegex = /\b(o1-preview|o1-mini)\b/i.test(this.modelOptions.model);
|
||||
|
||||
if (promptPrefix && !noSystemModelRegex) {
|
||||
if (promptPrefix && this.isOmni !== true) {
|
||||
promptPrefix = `Instructions:\n${promptPrefix.trim()}`;
|
||||
instructions = {
|
||||
role: 'system',
|
||||
@@ -505,7 +503,7 @@ class OpenAIClient extends BaseClient {
|
||||
};
|
||||
|
||||
/** EXPERIMENTAL */
|
||||
if (promptPrefix && noSystemModelRegex) {
|
||||
if (promptPrefix && this.isOmni === true) {
|
||||
const lastUserMessageIndex = payload.findLastIndex((message) => message.role === 'user');
|
||||
if (lastUserMessageIndex !== -1) {
|
||||
if (Array.isArray(payload[lastUserMessageIndex].content)) {
|
||||
@@ -1229,9 +1227,9 @@ ${convo}
|
||||
|
||||
opts.baseURL = this.langchainProxy
|
||||
? constructAzureURL({
|
||||
baseURL: this.langchainProxy,
|
||||
azureOptions: this.azure,
|
||||
})
|
||||
baseURL: this.langchainProxy,
|
||||
azureOptions: this.azure,
|
||||
})
|
||||
: this.azureEndpoint.split(/(?<!\/)\/(chat|completion)\//)[0];
|
||||
|
||||
opts.defaultQuery = { 'api-version': this.azure.azureOpenAIApiVersion };
|
||||
@@ -1285,14 +1283,6 @@ ${convo}
|
||||
modelOptions.messages[0].role = 'user';
|
||||
}
|
||||
|
||||
if (
|
||||
(this.options.endpoint === EModelEndpoint.openAI ||
|
||||
this.options.endpoint === EModelEndpoint.azureOpenAI) &&
|
||||
modelOptions.stream === true
|
||||
) {
|
||||
modelOptions.stream_options = { include_usage: true };
|
||||
}
|
||||
|
||||
if (this.options.addParams && typeof this.options.addParams === 'object') {
|
||||
const addParams = { ...this.options.addParams };
|
||||
modelOptions = {
|
||||
@@ -1395,6 +1385,12 @@ ${convo}
|
||||
...modelOptions,
|
||||
stream: true,
|
||||
};
|
||||
if (
|
||||
this.options.endpoint === EModelEndpoint.openAI ||
|
||||
this.options.endpoint === EModelEndpoint.azureOpenAI
|
||||
) {
|
||||
params.stream_options = { include_usage: true };
|
||||
}
|
||||
const stream = await openai.beta.chat.completions
|
||||
.stream(params)
|
||||
.on('abort', () => {
|
||||
|
||||
@@ -15,7 +15,7 @@ describe('AnthropicClient', () => {
|
||||
{
|
||||
role: 'user',
|
||||
isCreatedByUser: true,
|
||||
text: "What's up",
|
||||
text: 'What\'s up',
|
||||
messageId: '3',
|
||||
parentMessageId: '2',
|
||||
},
|
||||
@@ -170,7 +170,7 @@ describe('AnthropicClient', () => {
|
||||
client.options.modelLabel = 'Claude-2';
|
||||
const result = await client.buildMessages(messages, parentMessageId);
|
||||
const { prompt } = result;
|
||||
expect(prompt).toContain("Human's name: John");
|
||||
expect(prompt).toContain('Human\'s name: John');
|
||||
expect(prompt).toContain('You are Claude-2');
|
||||
});
|
||||
});
|
||||
@@ -244,64 +244,6 @@ describe('AnthropicClient', () => {
|
||||
);
|
||||
});
|
||||
|
||||
describe('Claude 4 model headers', () => {
|
||||
it('should add "prompt-caching" beta header for claude-sonnet-4 model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-opus-4 model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-opus-4-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-4-sonnet model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-4-sonnet-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-4-opus model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-4-opus-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not add beta header for claude-3-5-sonnet-latest model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
@@ -514,34 +456,6 @@ describe('AnthropicClient', () => {
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should not cap maxOutputTokens for Claude 4 Sonnet models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 10; // 40,960 tokens
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should not cap maxOutputTokens for Claude 4 Opus models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 6; // 24,576 tokens (under 32K limit)
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20250514',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should cap maxOutputTokens for Claude 3.5 Haiku models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 2;
|
||||
@@ -815,223 +729,4 @@ describe('AnthropicClient', () => {
|
||||
expect(capturedOptions).toHaveProperty('topK', 10);
|
||||
expect(capturedOptions).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
|
||||
describe('isClaudeLatest', () => {
|
||||
it('should set isClaudeLatest to true for claude-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-sonnet-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-3.5 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.5-sonnet-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-sonnet-4 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-opus-4 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-3.5-haiku models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.5-haiku-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-2 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-2',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-instant models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-instant',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-sonnet-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-opus-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-haiku-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-haiku-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('configureReasoning', () => {
|
||||
it('should enable thinking for claude-opus-4 and claude-sonnet-4 models', async () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
// Create a mock async generator function
|
||||
async function* mockAsyncGenerator() {
|
||||
yield { type: 'message_start', message: { usage: {} } };
|
||||
yield { delta: { text: 'Test response' } };
|
||||
yield { type: 'message_delta', usage: {} };
|
||||
}
|
||||
|
||||
// Mock createResponse to return the async generator
|
||||
jest.spyOn(client, 'createResponse').mockImplementation(() => {
|
||||
return mockAsyncGenerator();
|
||||
});
|
||||
|
||||
// Test claude-opus-4
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20250514',
|
||||
},
|
||||
thinking: true,
|
||||
thinkingBudget: 2000,
|
||||
});
|
||||
|
||||
let capturedOptions = null;
|
||||
jest.spyOn(client, 'getClient').mockImplementation((options) => {
|
||||
capturedOptions = options;
|
||||
return {};
|
||||
});
|
||||
|
||||
const payload = [{ role: 'user', content: 'Test message' }];
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('thinking');
|
||||
expect(capturedOptions.thinking).toEqual({
|
||||
type: 'enabled',
|
||||
budget_tokens: 2000,
|
||||
});
|
||||
|
||||
// Test claude-sonnet-4
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
},
|
||||
thinking: true,
|
||||
thinkingBudget: 2000,
|
||||
});
|
||||
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('thinking');
|
||||
expect(capturedOptions.thinking).toEqual({
|
||||
type: 'enabled',
|
||||
budget_tokens: 2000,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Claude Model Tests', () => {
|
||||
it('should handle Claude 3 and 4 series models correctly', () => {
|
||||
const client = new AnthropicClient('test-key');
|
||||
// Claude 3 series models
|
||||
const claude3Models = [
|
||||
'claude-3-opus-20240229',
|
||||
'claude-3-sonnet-20240229',
|
||||
'claude-3-haiku-20240307',
|
||||
'claude-3-5-sonnet-20240620',
|
||||
'claude-3-5-haiku-20240620',
|
||||
'claude-3.5-sonnet-20240620',
|
||||
'claude-3.5-haiku-20240620',
|
||||
'claude-3.7-sonnet-20240620',
|
||||
'claude-3.7-haiku-20240620',
|
||||
'anthropic/claude-3-opus-20240229',
|
||||
'claude-3-opus-20240229/anthropic',
|
||||
];
|
||||
|
||||
// Claude 4 series models
|
||||
const claude4Models = [
|
||||
'claude-sonnet-4-20250514',
|
||||
'claude-opus-4-20250514',
|
||||
'claude-4-sonnet-20250514',
|
||||
'claude-4-opus-20250514',
|
||||
'anthropic/claude-sonnet-4-20250514',
|
||||
'claude-sonnet-4-20250514/anthropic',
|
||||
];
|
||||
|
||||
// Test Claude 3 series
|
||||
claude3Models.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
// Test Claude 4 series
|
||||
claude4Models.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
// Test non-Claude 3/4 models
|
||||
const nonClaudeModels = ['claude-2', 'claude-instant', 'gpt-4', 'gpt-3.5-turbo'];
|
||||
|
||||
nonClaudeModels.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { initializeFakeClient } = require('./FakeClient');
|
||||
|
||||
jest.mock('~/db/connect');
|
||||
jest.mock('~/lib/db/connectDb');
|
||||
jest.mock('~/models', () => ({
|
||||
User: jest.fn(),
|
||||
Key: jest.fn(),
|
||||
@@ -52,7 +52,7 @@ const messageHistory = [
|
||||
{
|
||||
role: 'user',
|
||||
isCreatedByUser: true,
|
||||
text: "What's up",
|
||||
text: 'What\'s up',
|
||||
messageId: '3',
|
||||
parentMessageId: '2',
|
||||
},
|
||||
@@ -456,7 +456,7 @@ describe('BaseClient', () => {
|
||||
|
||||
const chatMessages2 = await TestClient.loadHistory(conversationId, '3');
|
||||
expect(TestClient.currentMessages).toHaveLength(3);
|
||||
expect(chatMessages2[chatMessages2.length - 1].text).toEqual("What's up");
|
||||
expect(chatMessages2[chatMessages2.length - 1].text).toEqual('What\'s up');
|
||||
});
|
||||
|
||||
/* Most of the new sendMessage logic revolving around edited/continued AI messages
|
||||
|
||||
@@ -5,7 +5,7 @@ const getLogStores = require('~/cache/getLogStores');
|
||||
const OpenAIClient = require('../OpenAIClient');
|
||||
jest.mock('meilisearch');
|
||||
|
||||
jest.mock('~/db/connect');
|
||||
jest.mock('~/lib/db/connectDb');
|
||||
jest.mock('~/models', () => ({
|
||||
User: jest.fn(),
|
||||
Key: jest.fn(),
|
||||
@@ -462,17 +462,17 @@ describe('OpenAIClient', () => {
|
||||
role: 'system',
|
||||
name: 'example_user',
|
||||
content:
|
||||
"Let's circle back when we have more bandwidth to touch base on opportunities for increased leverage.",
|
||||
'Let\'s circle back when we have more bandwidth to touch base on opportunities for increased leverage.',
|
||||
},
|
||||
{
|
||||
role: 'system',
|
||||
name: 'example_assistant',
|
||||
content: "Let's talk later when we're less busy about how to do better.",
|
||||
content: 'Let\'s talk later when we\'re less busy about how to do better.',
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content:
|
||||
"This late pivot means we don't have time to boil the ocean for the client deliverable.",
|
||||
'This late pivot means we don\'t have time to boil the ocean for the client deliverable.',
|
||||
},
|
||||
];
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ const { Constants } = require('librechat-data-provider');
|
||||
const { HumanMessage, AIMessage } = require('@langchain/core/messages');
|
||||
const PluginsClient = require('../PluginsClient');
|
||||
|
||||
jest.mock('~/db/connect');
|
||||
jest.mock('~/lib/db/connectDb');
|
||||
jest.mock('~/models/Conversation', () => {
|
||||
return function () {
|
||||
return {
|
||||
|
||||
@@ -30,7 +30,7 @@ const DEFAULT_IMAGE_EDIT_DESCRIPTION =
|
||||
|
||||
When to use \`image_edit_oai\`:
|
||||
- The user wants to modify, extend, or remix one **or more** uploaded images, either:
|
||||
- Previously generated, or in the current request (both to be included in the \`image_ids\` array).
|
||||
- Previously generated, or in the current request (both to be included in the \`image_ids\` array).
|
||||
- Always when the user refers to uploaded images for editing, enhancement, remixing, style transfer, or combining elements.
|
||||
- Any current or existing images are to be used as visual guides.
|
||||
- If there are any files in the current request, they are more likely than not expected as references for image edit requests.
|
||||
|
||||
@@ -43,39 +43,9 @@ class TavilySearchResults extends Tool {
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Whether to include answers in the search results. Default is False.'),
|
||||
include_raw_content: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Whether to include raw content in the search results. Default is False.'),
|
||||
include_domains: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe('A list of domains to specifically include in the search results.'),
|
||||
exclude_domains: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe('A list of domains to specifically exclude from the search results.'),
|
||||
topic: z
|
||||
.enum(['general', 'news', 'finance'])
|
||||
.optional()
|
||||
.describe(
|
||||
'The category of the search. Use news ONLY if query SPECIFCALLY mentions the word "news".',
|
||||
),
|
||||
time_range: z
|
||||
.enum(['day', 'week', 'month', 'year', 'd', 'w', 'm', 'y'])
|
||||
.optional()
|
||||
.describe('The time range back from the current date to filter results.'),
|
||||
days: z
|
||||
.number()
|
||||
.min(1)
|
||||
.optional()
|
||||
.describe('Number of days back from the current date to include. Only if topic is news.'),
|
||||
include_image_descriptions: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe(
|
||||
'When include_images is true, also add a descriptive text for each image. Default is false.',
|
||||
),
|
||||
// include_raw_content: z.boolean().optional().describe('Whether to include raw content in the search results. Default is False.'),
|
||||
// include_domains: z.array(z.string()).optional().describe('A list of domains to specifically include in the search results.'),
|
||||
// exclude_domains: z.array(z.string()).optional().describe('A list of domains to specifically exclude from the search results.'),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
30
api/app/clients/tools/util/addOpenAPISpecs.js
Normal file
30
api/app/clients/tools/util/addOpenAPISpecs.js
Normal file
@@ -0,0 +1,30 @@
|
||||
const { loadSpecs } = require('./loadSpecs');
|
||||
|
||||
function transformSpec(input) {
|
||||
return {
|
||||
name: input.name_for_human,
|
||||
pluginKey: input.name_for_model,
|
||||
description: input.description_for_human,
|
||||
icon: input?.logo_url ?? 'https://placehold.co/70x70.png',
|
||||
// TODO: add support for authentication
|
||||
isAuthRequired: 'false',
|
||||
authConfig: [],
|
||||
};
|
||||
}
|
||||
|
||||
async function addOpenAPISpecs(availableTools) {
|
||||
try {
|
||||
const specs = (await loadSpecs({})).map(transformSpec);
|
||||
if (specs.length > 0) {
|
||||
return [...specs, ...availableTools];
|
||||
}
|
||||
return availableTools;
|
||||
} catch (error) {
|
||||
return availableTools;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
transformSpec,
|
||||
addOpenAPISpecs,
|
||||
};
|
||||
76
api/app/clients/tools/util/addOpenAPISpecs.spec.js
Normal file
76
api/app/clients/tools/util/addOpenAPISpecs.spec.js
Normal file
@@ -0,0 +1,76 @@
|
||||
const { addOpenAPISpecs, transformSpec } = require('./addOpenAPISpecs');
|
||||
const { loadSpecs } = require('./loadSpecs');
|
||||
const { createOpenAPIPlugin } = require('../dynamic/OpenAPIPlugin');
|
||||
|
||||
jest.mock('./loadSpecs');
|
||||
jest.mock('../dynamic/OpenAPIPlugin');
|
||||
|
||||
describe('transformSpec', () => {
|
||||
it('should transform input spec to a desired format', () => {
|
||||
const input = {
|
||||
name_for_human: 'Human Name',
|
||||
name_for_model: 'Model Name',
|
||||
description_for_human: 'Human Description',
|
||||
logo_url: 'https://example.com/logo.png',
|
||||
};
|
||||
|
||||
const expectedOutput = {
|
||||
name: 'Human Name',
|
||||
pluginKey: 'Model Name',
|
||||
description: 'Human Description',
|
||||
icon: 'https://example.com/logo.png',
|
||||
isAuthRequired: 'false',
|
||||
authConfig: [],
|
||||
};
|
||||
|
||||
expect(transformSpec(input)).toEqual(expectedOutput);
|
||||
});
|
||||
|
||||
it('should use default icon if logo_url is not provided', () => {
|
||||
const input = {
|
||||
name_for_human: 'Human Name',
|
||||
name_for_model: 'Model Name',
|
||||
description_for_human: 'Human Description',
|
||||
};
|
||||
|
||||
const expectedOutput = {
|
||||
name: 'Human Name',
|
||||
pluginKey: 'Model Name',
|
||||
description: 'Human Description',
|
||||
icon: 'https://placehold.co/70x70.png',
|
||||
isAuthRequired: 'false',
|
||||
authConfig: [],
|
||||
};
|
||||
|
||||
expect(transformSpec(input)).toEqual(expectedOutput);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addOpenAPISpecs', () => {
|
||||
it('should add specs to available tools', async () => {
|
||||
const availableTools = ['Tool1', 'Tool2'];
|
||||
const specs = [
|
||||
{
|
||||
name_for_human: 'Human Name',
|
||||
name_for_model: 'Model Name',
|
||||
description_for_human: 'Human Description',
|
||||
logo_url: 'https://example.com/logo.png',
|
||||
},
|
||||
];
|
||||
|
||||
loadSpecs.mockResolvedValue(specs);
|
||||
createOpenAPIPlugin.mockReturnValue('Plugin');
|
||||
|
||||
const result = await addOpenAPISpecs(availableTools);
|
||||
expect(result).toEqual([...specs.map(transformSpec), ...availableTools]);
|
||||
});
|
||||
|
||||
it('should return available tools if specs loading fails', async () => {
|
||||
const availableTools = ['Tool1', 'Tool2'];
|
||||
|
||||
loadSpecs.mockRejectedValue(new Error('Failed to load specs'));
|
||||
|
||||
const result = await addOpenAPISpecs(availableTools);
|
||||
expect(result).toEqual(availableTools);
|
||||
});
|
||||
});
|
||||
@@ -1,13 +1,7 @@
|
||||
const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
EToolResources,
|
||||
loadWebSearchAuth,
|
||||
replaceSpecialVars,
|
||||
} = require('librechat-data-provider');
|
||||
const { createCodeExecutionTool, EnvVar } = require('@librechat/agents');
|
||||
const { Tools, Constants, EToolResources } = require('librechat-data-provider');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const {
|
||||
availableTools,
|
||||
@@ -30,6 +24,7 @@ const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/pro
|
||||
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { createMCPTool } = require('~/server/services/MCP');
|
||||
const { loadSpecs } = require('./loadSpecs');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const mcpToolPattern = new RegExp(`^.+${Constants.mcp_delimiter}.+$`);
|
||||
@@ -144,6 +139,7 @@ const loadTools = async ({
|
||||
agent,
|
||||
model,
|
||||
endpoint,
|
||||
useSpecs,
|
||||
tools = [],
|
||||
options = {},
|
||||
functions = true,
|
||||
@@ -236,6 +232,7 @@ const loadTools = async ({
|
||||
|
||||
/** @type {Record<string, string>} */
|
||||
const toolContextMap = {};
|
||||
const remainingTools = [];
|
||||
const appTools = options.req?.app?.locals?.availableTools ?? {};
|
||||
|
||||
for (const tool of tools) {
|
||||
@@ -268,33 +265,6 @@ const loadTools = async ({
|
||||
return createFileSearchTool({ req: options.req, files, entity_id: agent?.id });
|
||||
};
|
||||
continue;
|
||||
} else if (tool === Tools.web_search) {
|
||||
const webSearchConfig = options?.req?.app?.locals?.webSearch;
|
||||
const result = await loadWebSearchAuth({
|
||||
userId: user,
|
||||
loadAuthValues,
|
||||
webSearchConfig,
|
||||
});
|
||||
const { onSearchResults, onGetHighlights } = options?.[Tools.web_search] ?? {};
|
||||
requestedTools[tool] = async () => {
|
||||
toolContextMap[tool] = `# \`${tool}\`:
|
||||
Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
||||
1. **Execute immediately without preface** when using \`${tool}\`.
|
||||
2. **After the search, begin with a brief summary** that directly addresses the query without headers or explaining your process.
|
||||
3. **Structure your response clearly** using Markdown formatting (Level 2 headers for sections, lists for multiple points, tables for comparisons).
|
||||
4. **Cite sources properly** according to the citation anchor format, utilizing group anchors when appropriate.
|
||||
5. **Tailor your approach to the query type** (academic, news, coding, etc.) while maintaining an expert, journalistic, unbiased tone.
|
||||
6. **Provide comprehensive information** with specific details, examples, and as much relevant context as possible from search results.
|
||||
7. **Avoid moralizing language.**
|
||||
`.trim();
|
||||
return createSearchTool({
|
||||
...result.authResult,
|
||||
onSearchResults,
|
||||
onGetHighlights,
|
||||
logger,
|
||||
});
|
||||
};
|
||||
continue;
|
||||
} else if (tool && appTools[tool] && mcpToolPattern.test(tool)) {
|
||||
requestedTools[tool] = async () =>
|
||||
createMCPTool({
|
||||
@@ -322,6 +292,30 @@ Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
||||
requestedTools[tool] = toolInstance;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (functions === true) {
|
||||
remainingTools.push(tool);
|
||||
}
|
||||
}
|
||||
|
||||
let specs = null;
|
||||
if (useSpecs === true && functions === true && remainingTools.length > 0) {
|
||||
specs = await loadSpecs({
|
||||
llm: model,
|
||||
user,
|
||||
message: options.message,
|
||||
memory: options.memory,
|
||||
signal: options.signal,
|
||||
tools: remainingTools,
|
||||
map: true,
|
||||
verbose: false,
|
||||
});
|
||||
}
|
||||
|
||||
for (const tool of remainingTools) {
|
||||
if (specs && specs[tool]) {
|
||||
requestedTools[tool] = specs[tool];
|
||||
}
|
||||
}
|
||||
|
||||
if (returnMap) {
|
||||
|
||||
@@ -10,24 +10,18 @@ const mockPluginService = {
|
||||
getUserPluginAuthValue: jest.fn(),
|
||||
};
|
||||
|
||||
const mockModels = {
|
||||
User: mockUser,
|
||||
};
|
||||
jest.mock('~/db/connect', () => {
|
||||
return {
|
||||
connectDb: jest.fn(),
|
||||
User: mockModels.mockUser,
|
||||
jest.mock('~/models/User', () => {
|
||||
return function () {
|
||||
return mockUser;
|
||||
};
|
||||
});
|
||||
jest.mock('~/models/File', () => ({
|
||||
File: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/PluginService', () => mockPluginService);
|
||||
|
||||
const { BaseLLM } = require('@langchain/openai');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
|
||||
const User = require('~/models/User');
|
||||
const PluginService = require('~/server/services/PluginService');
|
||||
const { validateTools, loadTools, loadToolWithAuth } = require('./handleTools');
|
||||
const { StructuredSD, availableTools, DALLE3 } = require('../');
|
||||
@@ -58,7 +52,7 @@ describe('Tool Handlers', () => {
|
||||
},
|
||||
);
|
||||
|
||||
fakeUser = await mockModels.User.createUser({
|
||||
fakeUser = new User({
|
||||
name: 'Fake User',
|
||||
username: 'fakeuser',
|
||||
email: 'fakeuser@example.com',
|
||||
@@ -224,6 +218,7 @@ describe('Tool Handlers', () => {
|
||||
try {
|
||||
await loadTool2();
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line jest/no-conditional-expect
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
117
api/app/clients/tools/util/loadSpecs.js
Normal file
117
api/app/clients/tools/util/loadSpecs.js
Normal file
@@ -0,0 +1,117 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { z } = require('zod');
|
||||
const { logger } = require('~/config');
|
||||
const { createOpenAPIPlugin } = require('~/app/clients/tools/dynamic/OpenAPIPlugin');
|
||||
|
||||
// The minimum Manifest definition
|
||||
const ManifestDefinition = z.object({
|
||||
schema_version: z.string().optional(),
|
||||
name_for_human: z.string(),
|
||||
name_for_model: z.string(),
|
||||
description_for_human: z.string(),
|
||||
description_for_model: z.string(),
|
||||
auth: z.object({}).optional(),
|
||||
api: z.object({
|
||||
// Spec URL or can be the filename of the OpenAPI spec yaml file,
|
||||
// located in api\app\clients\tools\.well-known\openapi
|
||||
url: z.string(),
|
||||
type: z.string().optional(),
|
||||
is_user_authenticated: z.boolean().nullable().optional(),
|
||||
has_user_authentication: z.boolean().nullable().optional(),
|
||||
}),
|
||||
// use to override any params that the LLM will consistently get wrong
|
||||
params: z.object({}).optional(),
|
||||
logo_url: z.string().optional(),
|
||||
contact_email: z.string().optional(),
|
||||
legal_info_url: z.string().optional(),
|
||||
});
|
||||
|
||||
function validateJson(json) {
|
||||
try {
|
||||
return ManifestDefinition.parse(json);
|
||||
} catch (error) {
|
||||
logger.debug('[validateJson] manifest parsing error', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// omit the LLM to return the well known jsons as objects
|
||||
async function loadSpecs({ llm, user, message, tools = [], map = false, memory, signal }) {
|
||||
const directoryPath = path.join(__dirname, '..', '.well-known');
|
||||
let files = [];
|
||||
|
||||
for (let i = 0; i < tools.length; i++) {
|
||||
const filePath = path.join(directoryPath, tools[i] + '.json');
|
||||
|
||||
try {
|
||||
// If the access Promise is resolved, it means that the file exists
|
||||
// Then we can add it to the files array
|
||||
await fs.promises.access(filePath, fs.constants.F_OK);
|
||||
files.push(tools[i] + '.json');
|
||||
} catch (err) {
|
||||
logger.error(`[loadSpecs] File ${tools[i] + '.json'} does not exist`, err);
|
||||
}
|
||||
}
|
||||
|
||||
if (files.length === 0) {
|
||||
files = (await fs.promises.readdir(directoryPath)).filter(
|
||||
(file) => path.extname(file) === '.json',
|
||||
);
|
||||
}
|
||||
|
||||
const validJsons = [];
|
||||
const constructorMap = {};
|
||||
|
||||
logger.debug('[validateJson] files', files);
|
||||
|
||||
for (const file of files) {
|
||||
if (path.extname(file) === '.json') {
|
||||
const filePath = path.join(directoryPath, file);
|
||||
const fileContent = await fs.promises.readFile(filePath, 'utf8');
|
||||
const json = JSON.parse(fileContent);
|
||||
|
||||
if (!validateJson(json)) {
|
||||
logger.debug('[validateJson] Invalid json', json);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (llm && map) {
|
||||
constructorMap[json.name_for_model] = async () =>
|
||||
await createOpenAPIPlugin({
|
||||
data: json,
|
||||
llm,
|
||||
message,
|
||||
memory,
|
||||
signal,
|
||||
user,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (llm) {
|
||||
validJsons.push(createOpenAPIPlugin({ data: json, llm }));
|
||||
continue;
|
||||
}
|
||||
|
||||
validJsons.push(json);
|
||||
}
|
||||
}
|
||||
|
||||
if (map) {
|
||||
return constructorMap;
|
||||
}
|
||||
|
||||
const plugins = (await Promise.all(validJsons)).filter((plugin) => plugin);
|
||||
|
||||
// logger.debug('[validateJson] plugins', plugins);
|
||||
// logger.debug(plugins[0].name);
|
||||
|
||||
return plugins;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
loadSpecs,
|
||||
validateJson,
|
||||
ManifestDefinition,
|
||||
};
|
||||
101
api/app/clients/tools/util/loadSpecs.spec.js
Normal file
101
api/app/clients/tools/util/loadSpecs.spec.js
Normal file
@@ -0,0 +1,101 @@
|
||||
const fs = require('fs');
|
||||
const { validateJson, loadSpecs, ManifestDefinition } = require('./loadSpecs');
|
||||
const { createOpenAPIPlugin } = require('../dynamic/OpenAPIPlugin');
|
||||
|
||||
jest.mock('../dynamic/OpenAPIPlugin');
|
||||
|
||||
describe('ManifestDefinition', () => {
|
||||
it('should validate correct json', () => {
|
||||
const json = {
|
||||
name_for_human: 'Test',
|
||||
name_for_model: 'Test',
|
||||
description_for_human: 'Test',
|
||||
description_for_model: 'Test',
|
||||
api: {
|
||||
url: 'http://test.com',
|
||||
},
|
||||
};
|
||||
|
||||
expect(() => ManifestDefinition.parse(json)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should not validate incorrect json', () => {
|
||||
const json = {
|
||||
name_for_human: 'Test',
|
||||
name_for_model: 'Test',
|
||||
description_for_human: 'Test',
|
||||
description_for_model: 'Test',
|
||||
api: {
|
||||
url: 123, // incorrect type
|
||||
},
|
||||
};
|
||||
|
||||
expect(() => ManifestDefinition.parse(json)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateJson', () => {
|
||||
it('should return parsed json if valid', () => {
|
||||
const json = {
|
||||
name_for_human: 'Test',
|
||||
name_for_model: 'Test',
|
||||
description_for_human: 'Test',
|
||||
description_for_model: 'Test',
|
||||
api: {
|
||||
url: 'http://test.com',
|
||||
},
|
||||
};
|
||||
|
||||
expect(validateJson(json)).toEqual(json);
|
||||
});
|
||||
|
||||
it('should return false if json is not valid', () => {
|
||||
const json = {
|
||||
name_for_human: 'Test',
|
||||
name_for_model: 'Test',
|
||||
description_for_human: 'Test',
|
||||
description_for_model: 'Test',
|
||||
api: {
|
||||
url: 123, // incorrect type
|
||||
},
|
||||
};
|
||||
|
||||
expect(validateJson(json)).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadSpecs', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(fs.promises, 'readdir').mockResolvedValue(['test.json']);
|
||||
jest.spyOn(fs.promises, 'readFile').mockResolvedValue(
|
||||
JSON.stringify({
|
||||
name_for_human: 'Test',
|
||||
name_for_model: 'Test',
|
||||
description_for_human: 'Test',
|
||||
description_for_model: 'Test',
|
||||
api: {
|
||||
url: 'http://test.com',
|
||||
},
|
||||
}),
|
||||
);
|
||||
createOpenAPIPlugin.mockResolvedValue({});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should return plugins', async () => {
|
||||
const plugins = await loadSpecs({ llm: true, verbose: false });
|
||||
|
||||
expect(plugins).toHaveLength(1);
|
||||
expect(createOpenAPIPlugin).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should return constructorMap if map is true', async () => {
|
||||
const plugins = await loadSpecs({ llm: {}, map: true, verbose: false });
|
||||
|
||||
expect(plugins).toHaveProperty('Test');
|
||||
expect(createOpenAPIPlugin).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
4
api/cache/banViolation.js
vendored
4
api/cache/banViolation.js
vendored
@@ -1,8 +1,8 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { isEnabled, math, removePorts } = require('~/server/utils');
|
||||
const { deleteAllUserSessions } = require('~/models');
|
||||
const getLogStores = require('./getLogStores');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {};
|
||||
const interval = math(BAN_INTERVAL, 20);
|
||||
@@ -32,6 +32,7 @@ const banViolation = async (req, res, errorMessage) => {
|
||||
if (!isEnabled(BAN_VIOLATIONS)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!errorMessage) {
|
||||
return;
|
||||
}
|
||||
@@ -50,6 +51,7 @@ const banViolation = async (req, res, errorMessage) => {
|
||||
|
||||
const banLogs = getLogStores(ViolationTypes.BAN);
|
||||
const duration = errorMessage.duration || banLogs.opts.ttl;
|
||||
|
||||
if (duration <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
23
api/cache/banViolation.spec.js
vendored
23
api/cache/banViolation.spec.js
vendored
@@ -1,28 +1,7 @@
|
||||
const banViolation = require('./banViolation');
|
||||
|
||||
const mockModels = {
|
||||
Session: {
|
||||
deleteAllUserSessions: jest.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
jest.mock('~/db/connect', () => {
|
||||
return {
|
||||
connectDb: jest.fn(),
|
||||
get models() {
|
||||
return mockModels;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('~/server/utils', () => ({
|
||||
isEnabled: jest.fn(() => true), // default to false, override per test if needed
|
||||
math: jest.fn(() => 20), // default to false, override per test if needed
|
||||
removePorts: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('keyv');
|
||||
// jest.mock('../models/Session');
|
||||
jest.mock('../models/Session');
|
||||
// Mocking the getLogStores function
|
||||
jest.mock('./getLogStores', () => {
|
||||
return jest.fn().mockImplementation(() => {
|
||||
|
||||
5
api/cache/getLogStores.js
vendored
5
api/cache/getLogStores.js
vendored
@@ -61,10 +61,6 @@ const abortKeys = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: CacheKeys.ABORT_KEYS, ttl: Time.TEN_MINUTES });
|
||||
|
||||
const openIdExchangedTokensCache = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.TEN_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.OPENID_EXCHANGED_TOKENS, ttl: Time.TEN_MINUTES });
|
||||
|
||||
const namespaces = {
|
||||
[CacheKeys.ROLES]: roles,
|
||||
[CacheKeys.CONFIG_STORE]: config,
|
||||
@@ -102,7 +98,6 @@ const namespaces = {
|
||||
[CacheKeys.AUDIO_RUNS]: audioRuns,
|
||||
[CacheKeys.MESSAGES]: messages,
|
||||
[CacheKeys.FLOWS]: flows,
|
||||
[CacheKeys.OPENID_EXCHANGED_TOKENS]: openIdExchangedTokensCache,
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
10
api/cache/keyvRedis.js
vendored
10
api/cache/keyvRedis.js
vendored
@@ -75,15 +75,6 @@ if (REDIS_URI && isEnabled(USE_REDIS)) {
|
||||
} else {
|
||||
keyvRedis = new KeyvRedis(REDIS_URI, keyvOpts);
|
||||
}
|
||||
|
||||
const pingInterval = setInterval(
|
||||
() => {
|
||||
logger.debug('KeyvRedis ping');
|
||||
keyvRedis.client.ping().catch((err) => logger.error('Redis keep-alive ping failed:', err));
|
||||
},
|
||||
5 * 60 * 1000,
|
||||
);
|
||||
|
||||
keyvRedis.on('ready', () => {
|
||||
logger.info('KeyvRedis connection ready');
|
||||
});
|
||||
@@ -94,7 +85,6 @@ if (REDIS_URI && isEnabled(USE_REDIS)) {
|
||||
logger.info('KeyvRedis connection ended');
|
||||
});
|
||||
keyvRedis.on('close', () => {
|
||||
clearInterval(pingInterval);
|
||||
logger.info('KeyvRedis connection closed');
|
||||
});
|
||||
keyvRedis.on('error', (err) => logger.error('KeyvRedis connection error:', err));
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { createModels } = require('@librechat/data-schemas');
|
||||
const { connectDb } = require('./connect');
|
||||
const indexSync = require('./indexSync');
|
||||
|
||||
createModels(mongoose);
|
||||
|
||||
module.exports = { connectDb, indexSync };
|
||||
@@ -1,5 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { createModels } = require('@librechat/data-schemas');
|
||||
const models = createModels(mongoose);
|
||||
|
||||
module.exports = { ...models };
|
||||
@@ -11,8 +11,5 @@ module.exports = {
|
||||
moduleNameMapper: {
|
||||
'~/(.*)': '<rootDir>/$1',
|
||||
'~/data/auth.json': '<rootDir>/__mocks__/auth.mock.json',
|
||||
'^openid-client/passport$': '<rootDir>/test/__mocks__/openid-client-passport.js', // Mock for the passport strategy part
|
||||
'^openid-client$': '<rootDir>/test/__mocks__/openid-client.js',
|
||||
},
|
||||
transformIgnorePatterns: ['/node_modules/(?!(openid-client|oauth4webapi|jose)/).*/'],
|
||||
};
|
||||
|
||||
@@ -39,10 +39,7 @@ async function connectDb() {
|
||||
});
|
||||
}
|
||||
cached.conn = await cached.promise;
|
||||
|
||||
return cached.conn;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
connectDb,
|
||||
};
|
||||
module.exports = connectDb;
|
||||
4
api/lib/db/index.js
Normal file
4
api/lib/db/index.js
Normal file
@@ -0,0 +1,4 @@
|
||||
const connectDb = require('./connectDb');
|
||||
const indexSync = require('./indexSync');
|
||||
|
||||
module.exports = { connectDb, indexSync };
|
||||
@@ -1,11 +1,8 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MeiliSearch } = require('meilisearch');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
|
||||
const { Conversation } = require('~/models/Conversation');
|
||||
const { Message } = require('~/models/Message');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
const Conversation = mongoose.models.Conversation;
|
||||
const Message = mongoose.models.Message;
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const searchEnabled = isEnabled(process.env.SEARCH);
|
||||
const indexingDisabled = isEnabled(process.env.MEILI_NO_SYNC);
|
||||
@@ -32,6 +29,7 @@ async function indexSync() {
|
||||
if (!searchEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const client = MeiliSearchClient.getInstance();
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const Action = require('~/db/models').Action;
|
||||
const { actionSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Action = mongoose.model('action', actionSchema);
|
||||
|
||||
/**
|
||||
* Update an action with new data without overwriting existing properties,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const crypto = require('node:crypto');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
|
||||
const { agentSchema } = require('@librechat/data-schemas');
|
||||
const { SystemRoles, Tools } = require('librechat-data-provider');
|
||||
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_delimiter } =
|
||||
require('librechat-data-provider').Constants;
|
||||
const { CONFIG_STORE, STARTUP_CONFIG } = require('librechat-data-provider').CacheKeys;
|
||||
@@ -12,9 +11,8 @@ const {
|
||||
removeAgentFromAllProjects,
|
||||
} = require('./Project');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { getActions } = require('./Action');
|
||||
|
||||
const Agent = require('~/db/models').Agent;
|
||||
const Agent = mongoose.model('agent', agentSchema);
|
||||
|
||||
/**
|
||||
* Create an agent with the provided data.
|
||||
@@ -23,19 +21,7 @@ const Agent = require('~/db/models').Agent;
|
||||
* @throws {Error} If the agent creation fails.
|
||||
*/
|
||||
const createAgent = async (agentData) => {
|
||||
const { author, ...versionData } = agentData;
|
||||
const timestamp = new Date();
|
||||
const initialAgentData = {
|
||||
...agentData,
|
||||
versions: [
|
||||
{
|
||||
...versionData,
|
||||
createdAt: timestamp,
|
||||
updatedAt: timestamp,
|
||||
},
|
||||
],
|
||||
};
|
||||
return (await Agent.create(initialAgentData)).toObject();
|
||||
return (await Agent.create(agentData)).toObject();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -62,17 +48,12 @@ const loadEphemeralAgent = ({ req, agent_id, endpoint, model_parameters: _m }) =
|
||||
const { model, ...model_parameters } = _m;
|
||||
/** @type {Record<string, FunctionTool>} */
|
||||
const availableTools = req.app.locals.availableTools;
|
||||
/** @type {TEphemeralAgent | null} */
|
||||
const ephemeralAgent = req.body.ephemeralAgent;
|
||||
const mcpServers = new Set(ephemeralAgent?.mcp);
|
||||
const mcpServers = new Set(req.body.ephemeralAgent?.mcp);
|
||||
/** @type {string[]} */
|
||||
const tools = [];
|
||||
if (ephemeralAgent?.execute_code === true) {
|
||||
if (req.body.ephemeralAgent?.execute_code === true) {
|
||||
tools.push(Tools.execute_code);
|
||||
}
|
||||
if (ephemeralAgent?.web_search === true) {
|
||||
tools.push(Tools.web_search);
|
||||
}
|
||||
|
||||
if (mcpServers.size > 0) {
|
||||
for (const toolName of Object.keys(availableTools)) {
|
||||
@@ -122,8 +103,6 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
||||
return null;
|
||||
}
|
||||
|
||||
agent.version = agent.versions ? agent.versions.length : 0;
|
||||
|
||||
if (agent.author.toString() === req.user.id) {
|
||||
return agent;
|
||||
}
|
||||
@@ -148,207 +127,19 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if a version already exists in the versions array, excluding timestamp and author fields
|
||||
* @param {Object} updateData - The update data to compare
|
||||
* @param {Object} currentData - The current agent data
|
||||
* @param {Array} versions - The existing versions array
|
||||
* @param {string} [actionsHash] - Hash of current action metadata
|
||||
* @returns {Object|null} - The matching version if found, null otherwise
|
||||
*/
|
||||
const isDuplicateVersion = (updateData, currentData, versions, actionsHash = null) => {
|
||||
if (!versions || versions.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const excludeFields = [
|
||||
'_id',
|
||||
'id',
|
||||
'createdAt',
|
||||
'updatedAt',
|
||||
'author',
|
||||
'updatedBy',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'__v',
|
||||
'agent_ids',
|
||||
'versions',
|
||||
'actionsHash', // Exclude actionsHash from direct comparison
|
||||
];
|
||||
|
||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||
|
||||
if (Object.keys(directUpdates).length === 0 && !actionsHash) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const wouldBeVersion = { ...currentData, ...directUpdates };
|
||||
const lastVersion = versions[versions.length - 1];
|
||||
|
||||
if (actionsHash && lastVersion.actionsHash !== actionsHash) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const allFields = new Set([...Object.keys(wouldBeVersion), ...Object.keys(lastVersion)]);
|
||||
|
||||
const importantFields = Array.from(allFields).filter((field) => !excludeFields.includes(field));
|
||||
|
||||
let isMatch = true;
|
||||
for (const field of importantFields) {
|
||||
if (!wouldBeVersion[field] && !lastVersion[field]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Array.isArray(wouldBeVersion[field]) && Array.isArray(lastVersion[field])) {
|
||||
if (wouldBeVersion[field].length !== lastVersion[field].length) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
|
||||
// Special handling for projectIds (MongoDB ObjectIds)
|
||||
if (field === 'projectIds') {
|
||||
const wouldBeIds = wouldBeVersion[field].map((id) => id.toString()).sort();
|
||||
const versionIds = lastVersion[field].map((id) => id.toString()).sort();
|
||||
|
||||
if (!wouldBeIds.every((id, i) => id === versionIds[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Handle arrays of objects like tool_kwargs
|
||||
else if (typeof wouldBeVersion[field][0] === 'object' && wouldBeVersion[field][0] !== null) {
|
||||
const sortedWouldBe = [...wouldBeVersion[field]].map((item) => JSON.stringify(item)).sort();
|
||||
const sortedVersion = [...lastVersion[field]].map((item) => JSON.stringify(item)).sort();
|
||||
|
||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
const sortedWouldBe = [...wouldBeVersion[field]].sort();
|
||||
const sortedVersion = [...lastVersion[field]].sort();
|
||||
|
||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (field === 'model_parameters') {
|
||||
const wouldBeParams = wouldBeVersion[field] || {};
|
||||
const lastVersionParams = lastVersion[field] || {};
|
||||
if (JSON.stringify(wouldBeParams) !== JSON.stringify(lastVersionParams)) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
} else if (wouldBeVersion[field] !== lastVersion[field]) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return isMatch ? lastVersion : null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Update an agent with new data without overwriting existing
|
||||
* properties, or create a new agent if it doesn't exist.
|
||||
* When an agent is updated, a copy of the current state will be saved to the versions array.
|
||||
*
|
||||
* @param {Object} searchParameter - The search parameters to find the agent to update.
|
||||
* @param {string} searchParameter.id - The ID of the agent to update.
|
||||
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @param {Object} [options] - Optional configuration object.
|
||||
* @param {string} [options.updatingUserId] - The ID of the user performing the update (used for tracking non-author updates).
|
||||
* @param {boolean} [options.forceVersion] - Force creation of a new version even if no fields changed.
|
||||
* @returns {Promise<Agent>} The updated or newly created agent document as a plain object.
|
||||
* @throws {Error} If the update would create a duplicate version
|
||||
*/
|
||||
const updateAgent = async (searchParameter, updateData, options = {}) => {
|
||||
const { updatingUserId = null, forceVersion = false } = options;
|
||||
const mongoOptions = { new: true, upsert: false };
|
||||
|
||||
const currentAgent = await Agent.findOne(searchParameter);
|
||||
if (currentAgent) {
|
||||
const { __v, _id, id, versions, author, ...versionData } = currentAgent.toObject();
|
||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||
|
||||
let actionsHash = null;
|
||||
|
||||
// Generate actions hash if agent has actions
|
||||
if (currentAgent.actions && currentAgent.actions.length > 0) {
|
||||
// Extract action IDs from the format "domain_action_id"
|
||||
const actionIds = currentAgent.actions
|
||||
.map((action) => {
|
||||
const parts = action.split(actionDelimiter);
|
||||
return parts[1]; // Get just the action ID part
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
if (actionIds.length > 0) {
|
||||
try {
|
||||
const actions = await getActions(
|
||||
{
|
||||
action_id: { $in: actionIds },
|
||||
},
|
||||
true,
|
||||
); // Include sensitive data for hash
|
||||
|
||||
actionsHash = await generateActionMetadataHash(currentAgent.actions, actions);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching actions for hash generation:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shouldCreateVersion =
|
||||
forceVersion ||
|
||||
(versions &&
|
||||
versions.length > 0 &&
|
||||
(Object.keys(directUpdates).length > 0 || $push || $pull || $addToSet));
|
||||
|
||||
if (shouldCreateVersion) {
|
||||
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash);
|
||||
if (duplicateVersion && !forceVersion) {
|
||||
const error = new Error(
|
||||
'Duplicate version: This would create a version identical to an existing one',
|
||||
);
|
||||
error.statusCode = 409;
|
||||
error.details = {
|
||||
duplicateVersion,
|
||||
versionIndex: versions.findIndex(
|
||||
(v) => JSON.stringify(duplicateVersion) === JSON.stringify(v),
|
||||
),
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const versionEntry = {
|
||||
...versionData,
|
||||
...directUpdates,
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
// Include actions hash in version if available
|
||||
if (actionsHash) {
|
||||
versionEntry.actionsHash = actionsHash;
|
||||
}
|
||||
|
||||
// Always store updatedBy field to track who made the change
|
||||
if (updatingUserId) {
|
||||
versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId);
|
||||
}
|
||||
|
||||
if (shouldCreateVersion || forceVersion) {
|
||||
updateData.$push = {
|
||||
...($push || {}),
|
||||
versions: versionEntry,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, mongoOptions).lean();
|
||||
const updateAgent = async (searchParameter, updateData) => {
|
||||
const options = { new: true, upsert: false };
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, options).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -360,7 +151,7 @@ const updateAgent = async (searchParameter, updateData, options = {}) => {
|
||||
* @param {string} params.file_id
|
||||
* @returns {Promise<Agent>} The updated agent.
|
||||
*/
|
||||
const addAgentResourceFile = async ({ req, agent_id, tool_resource, file_id }) => {
|
||||
const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
|
||||
const searchParameter = { id: agent_id };
|
||||
let agent = await getAgent(searchParameter);
|
||||
if (!agent) {
|
||||
@@ -386,9 +177,7 @@ const addAgentResourceFile = async ({ req, agent_id, tool_resource, file_id }) =
|
||||
},
|
||||
};
|
||||
|
||||
const updatedAgent = await updateAgent(searchParameter, updateData, {
|
||||
updatingUserId: req?.user?.id,
|
||||
});
|
||||
const updatedAgent = await updateAgent(searchParameter, updateData);
|
||||
if (updatedAgent) {
|
||||
return updatedAgent;
|
||||
} else {
|
||||
@@ -480,6 +269,7 @@ const getListAgents = async (searchParameter) => {
|
||||
delete globalQuery.author;
|
||||
query = { $or: [globalQuery, query] };
|
||||
}
|
||||
|
||||
const agents = (
|
||||
await Agent.find(query, {
|
||||
id: 1,
|
||||
@@ -518,7 +308,7 @@ const getListAgents = async (searchParameter) => {
|
||||
* This function also updates the corresponding projects to include or exclude the agent ID.
|
||||
*
|
||||
* @param {Object} params - Parameters for updating the agent's projects.
|
||||
* @param {MongoUser} params.user - Parameters for updating the agent's projects.
|
||||
* @param {import('librechat-data-provider').TUser} params.user - Parameters for updating the agent's projects.
|
||||
* @param {string} params.agentId - The ID of the agent to update.
|
||||
* @param {string[]} [params.projectIds] - Array of project IDs to add to the agent.
|
||||
* @param {string[]} [params.removeProjectIds] - Array of project IDs to remove from the agent.
|
||||
@@ -551,7 +341,7 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
||||
delete updateQuery.author;
|
||||
}
|
||||
|
||||
const updatedAgent = await updateAgent(updateQuery, updateOps, { updatingUserId: user.id });
|
||||
const updatedAgent = await updateAgent(updateQuery, updateOps);
|
||||
if (updatedAgent) {
|
||||
return updatedAgent;
|
||||
}
|
||||
@@ -568,107 +358,15 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
||||
return await getAgent({ id: agentId });
|
||||
};
|
||||
|
||||
/**
|
||||
* Reverts an agent to a specific version in its version history.
|
||||
* @param {Object} searchParameter - The search parameters to find the agent to revert.
|
||||
* @param {string} searchParameter.id - The ID of the agent to revert.
|
||||
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
||||
* @param {number} versionIndex - The index of the version to revert to in the versions array.
|
||||
* @returns {Promise<MongoAgent>} The updated agent document after reverting.
|
||||
* @throws {Error} If the agent is not found or the specified version does not exist.
|
||||
*/
|
||||
const revertAgentVersion = async (searchParameter, versionIndex) => {
|
||||
const agent = await Agent.findOne(searchParameter);
|
||||
if (!agent) {
|
||||
throw new Error('Agent not found');
|
||||
}
|
||||
|
||||
if (!agent.versions || !agent.versions[versionIndex]) {
|
||||
throw new Error(`Version ${versionIndex} not found`);
|
||||
}
|
||||
|
||||
const revertToVersion = agent.versions[versionIndex];
|
||||
|
||||
const updateData = {
|
||||
...revertToVersion,
|
||||
};
|
||||
|
||||
delete updateData._id;
|
||||
delete updateData.id;
|
||||
delete updateData.versions;
|
||||
delete updateData.author;
|
||||
delete updateData.updatedBy;
|
||||
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, { new: true }).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates a hash of action metadata for version comparison
|
||||
* @param {string[]} actionIds - Array of action IDs in format "domain_action_id"
|
||||
* @param {Action[]} actions - Array of action documents
|
||||
* @returns {Promise<string>} - SHA256 hash of the action metadata
|
||||
*/
|
||||
const generateActionMetadataHash = async (actionIds, actions) => {
|
||||
if (!actionIds || actionIds.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create a map of action_id to metadata for quick lookup
|
||||
const actionMap = new Map();
|
||||
actions.forEach((action) => {
|
||||
actionMap.set(action.action_id, action.metadata);
|
||||
});
|
||||
|
||||
// Sort action IDs for consistent hashing
|
||||
const sortedActionIds = [...actionIds].sort();
|
||||
|
||||
// Build a deterministic string representation of all action metadata
|
||||
const metadataString = sortedActionIds
|
||||
.map((actionFullId) => {
|
||||
// Extract just the action_id part (after the delimiter)
|
||||
const parts = actionFullId.split(actionDelimiter);
|
||||
const actionId = parts[1];
|
||||
|
||||
const metadata = actionMap.get(actionId);
|
||||
if (!metadata) {
|
||||
return `${actionId}:null`;
|
||||
}
|
||||
|
||||
// Sort metadata keys for deterministic output
|
||||
const sortedKeys = Object.keys(metadata).sort();
|
||||
const metadataStr = sortedKeys
|
||||
.map((key) => `${key}:${JSON.stringify(metadata[key])}`)
|
||||
.join(',');
|
||||
return `${actionId}:{${metadataStr}}`;
|
||||
})
|
||||
.join(';');
|
||||
|
||||
// Use Web Crypto API to generate hash
|
||||
const encoder = new TextEncoder();
|
||||
const data = encoder.encode(metadataString);
|
||||
const hashBuffer = await crypto.webcrypto.subtle.digest('SHA-256', data);
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||
const hashHex = hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
||||
|
||||
return hashHex;
|
||||
};
|
||||
|
||||
/**
|
||||
* Load a default agent based on the endpoint
|
||||
* @param {string} endpoint
|
||||
* @returns {Agent | null}
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
Agent,
|
||||
getAgent,
|
||||
loadAgent,
|
||||
createAgent,
|
||||
updateAgent,
|
||||
deleteAgent,
|
||||
getListAgents,
|
||||
revertAgentVersion,
|
||||
updateAgentProjects,
|
||||
addAgentResourceFile,
|
||||
removeAgentResourceFiles,
|
||||
generateActionMetadataHash,
|
||||
};
|
||||
|
||||
@@ -1,27 +1,7 @@
|
||||
const originalEnv = {
|
||||
CREDS_KEY: process.env.CREDS_KEY,
|
||||
CREDS_IV: process.env.CREDS_IV,
|
||||
};
|
||||
|
||||
process.env.CREDS_KEY = '0123456789abcdef0123456789abcdef';
|
||||
process.env.CREDS_IV = '0123456789abcdef';
|
||||
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const {
|
||||
getAgent,
|
||||
updateAgent,
|
||||
deleteAgent,
|
||||
createAgent,
|
||||
getListAgents,
|
||||
updateAgentProjects,
|
||||
addAgentResourceFile,
|
||||
removeAgentResourceFiles,
|
||||
} = require('./Agent');
|
||||
|
||||
const Agent = require('~/db/models').Agent;
|
||||
const { Agent, addAgentResourceFile, removeAgentResourceFiles } = require('./Agent');
|
||||
|
||||
describe('Agent Resource File Operations', () => {
|
||||
let mongoServer;
|
||||
@@ -35,8 +15,6 @@ describe('Agent Resource File Operations', () => {
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
process.env.CREDS_KEY = originalEnv.CREDS_KEY;
|
||||
process.env.CREDS_IV = originalEnv.CREDS_IV;
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -57,7 +35,6 @@ describe('Agent Resource File Operations', () => {
|
||||
|
||||
test('should add tool_resource to tools if missing', async () => {
|
||||
const agent = await createBasicAgent();
|
||||
|
||||
const fileId = uuidv4();
|
||||
const toolResource = 'file_search';
|
||||
|
||||
@@ -355,725 +332,3 @@ describe('Agent Resource File Operations', () => {
|
||||
expect(finalFileIds).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Agent CRUD Operations', () => {
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
});
|
||||
|
||||
test('should create and get an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
const newAgent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
description: 'Test description',
|
||||
});
|
||||
|
||||
expect(newAgent).toBeDefined();
|
||||
expect(newAgent.id).toBe(agentId);
|
||||
expect(newAgent.name).toBe('Test Agent');
|
||||
|
||||
const retrievedAgent = await getAgent({ id: agentId });
|
||||
expect(retrievedAgent).toBeDefined();
|
||||
expect(retrievedAgent.id).toBe(agentId);
|
||||
expect(retrievedAgent.name).toBe('Test Agent');
|
||||
expect(retrievedAgent.description).toBe('Test description');
|
||||
});
|
||||
|
||||
test('should delete an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Agent To Delete',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
});
|
||||
|
||||
const agentBeforeDelete = await getAgent({ id: agentId });
|
||||
expect(agentBeforeDelete).toBeDefined();
|
||||
|
||||
await deleteAgent({ id: agentId });
|
||||
|
||||
const agentAfterDelete = await getAgent({ id: agentId });
|
||||
expect(agentAfterDelete).toBeNull();
|
||||
});
|
||||
|
||||
test('should list agents by author', async () => {
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const otherAuthorId = new mongoose.Types.ObjectId();
|
||||
|
||||
const agentIds = [];
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const id = `agent_${uuidv4()}`;
|
||||
agentIds.push(id);
|
||||
await createAgent({
|
||||
id,
|
||||
name: `Agent ${i}`,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
await createAgent({
|
||||
id: `other_agent_${uuidv4()}`,
|
||||
name: `Other Agent ${i}`,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: otherAuthorId,
|
||||
});
|
||||
}
|
||||
|
||||
const result = await getListAgents({ author: authorId.toString() });
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.data).toBeDefined();
|
||||
expect(result.data).toHaveLength(5);
|
||||
expect(result.has_more).toBe(true);
|
||||
|
||||
for (const agent of result.data) {
|
||||
expect(agent.author).toBe(authorId.toString());
|
||||
}
|
||||
});
|
||||
|
||||
test('should update agent projects', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId1 = new mongoose.Types.ObjectId();
|
||||
const projectId2 = new mongoose.Types.ObjectId();
|
||||
const projectId3 = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Project Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
projectIds: [projectId1],
|
||||
});
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ $addToSet: { projectIds: { $each: [projectId2, projectId3] } } },
|
||||
);
|
||||
|
||||
await updateAgent({ id: agentId }, { $pull: { projectIds: projectId1 } });
|
||||
|
||||
await updateAgent({ id: agentId }, { projectIds: [projectId2, projectId3] });
|
||||
|
||||
const updatedAgent = await getAgent({ id: agentId });
|
||||
expect(updatedAgent.projectIds).toHaveLength(2);
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).toContain(projectId2.toString());
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).toContain(projectId3.toString());
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).not.toContain(projectId1.toString());
|
||||
|
||||
await updateAgent({ id: agentId }, { projectIds: [] });
|
||||
|
||||
const emptyProjectsAgent = await getAgent({ id: agentId });
|
||||
expect(emptyProjectsAgent.projectIds).toHaveLength(0);
|
||||
|
||||
const nonExistentId = `agent_${uuidv4()}`;
|
||||
await expect(
|
||||
updateAgentProjects({
|
||||
id: nonExistentId,
|
||||
projectIds: [projectId1],
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
test('should handle ephemeral agent loading', async () => {
|
||||
const agentId = 'ephemeral_test';
|
||||
const endpoint = 'openai';
|
||||
|
||||
const originalModule = jest.requireActual('librechat-data-provider');
|
||||
|
||||
const mockDataProvider = {
|
||||
...originalModule,
|
||||
Constants: {
|
||||
...originalModule.Constants,
|
||||
EPHEMERAL_AGENT_ID: 'ephemeral_test',
|
||||
},
|
||||
};
|
||||
|
||||
jest.doMock('librechat-data-provider', () => mockDataProvider);
|
||||
|
||||
const mockReq = {
|
||||
user: { id: 'user123' },
|
||||
body: {
|
||||
promptPrefix: 'This is a test instruction',
|
||||
ephemeralAgent: {
|
||||
execute_code: true,
|
||||
mcp: ['server1', 'server2'],
|
||||
},
|
||||
},
|
||||
app: {
|
||||
locals: {
|
||||
availableTools: {
|
||||
tool__server1: {},
|
||||
tool__server2: {},
|
||||
another_tool: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const params = {
|
||||
req: mockReq,
|
||||
agent_id: agentId,
|
||||
endpoint,
|
||||
model_parameters: {
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
},
|
||||
};
|
||||
|
||||
expect(agentId).toBeDefined();
|
||||
expect(endpoint).toBeDefined();
|
||||
|
||||
jest.dontMock('librechat-data-provider');
|
||||
});
|
||||
|
||||
test('should handle loadAgent functionality and errors', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Load Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
tools: ['tool1', 'tool2'],
|
||||
});
|
||||
|
||||
const agent = await getAgent({ id: agentId });
|
||||
|
||||
expect(agent).toBeDefined();
|
||||
expect(agent.id).toBe(agentId);
|
||||
expect(agent.name).toBe('Test Load Agent');
|
||||
expect(agent.tools).toEqual(expect.arrayContaining(['tool1', 'tool2']));
|
||||
|
||||
const mockLoadAgent = jest.fn().mockResolvedValue(agent);
|
||||
const loadedAgent = await mockLoadAgent();
|
||||
expect(loadedAgent).toBeDefined();
|
||||
expect(loadedAgent.id).toBe(agentId);
|
||||
|
||||
const nonExistentId = `agent_${uuidv4()}`;
|
||||
const nonExistentAgent = await getAgent({ id: nonExistentId });
|
||||
expect(nonExistentAgent).toBeNull();
|
||||
|
||||
const mockLoadAgentError = jest.fn().mockRejectedValue(new Error('No agent found with ID'));
|
||||
await expect(mockLoadAgentError()).rejects.toThrow('No agent found with ID');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Agent Version History', () => {
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
});
|
||||
|
||||
test('should create an agent with a single entry in versions array', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const agent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
expect(agent.versions).toBeDefined();
|
||||
expect(Array.isArray(agent.versions)).toBe(true);
|
||||
expect(agent.versions).toHaveLength(1);
|
||||
expect(agent.versions[0].name).toBe('Test Agent');
|
||||
expect(agent.versions[0].provider).toBe('test');
|
||||
expect(agent.versions[0].model).toBe('test-model');
|
||||
});
|
||||
|
||||
test('should accumulate version history across multiple updates', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const author = new mongoose.Types.ObjectId();
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'First Name',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author,
|
||||
description: 'First description',
|
||||
});
|
||||
|
||||
await updateAgent({ id: agentId }, { name: 'Second Name', description: 'Second description' });
|
||||
await updateAgent({ id: agentId }, { name: 'Third Name', model: 'new-model' });
|
||||
const finalAgent = await updateAgent({ id: agentId }, { description: 'Final description' });
|
||||
|
||||
expect(finalAgent.versions).toBeDefined();
|
||||
expect(Array.isArray(finalAgent.versions)).toBe(true);
|
||||
expect(finalAgent.versions).toHaveLength(4);
|
||||
|
||||
expect(finalAgent.versions[0].name).toBe('First Name');
|
||||
expect(finalAgent.versions[0].description).toBe('First description');
|
||||
expect(finalAgent.versions[0].model).toBe('test-model');
|
||||
|
||||
expect(finalAgent.versions[1].name).toBe('Second Name');
|
||||
expect(finalAgent.versions[1].description).toBe('Second description');
|
||||
expect(finalAgent.versions[1].model).toBe('test-model');
|
||||
|
||||
expect(finalAgent.versions[2].name).toBe('Third Name');
|
||||
expect(finalAgent.versions[2].description).toBe('Second description');
|
||||
expect(finalAgent.versions[2].model).toBe('new-model');
|
||||
|
||||
expect(finalAgent.versions[3].name).toBe('Third Name');
|
||||
expect(finalAgent.versions[3].description).toBe('Final description');
|
||||
expect(finalAgent.versions[3].model).toBe('new-model');
|
||||
|
||||
expect(finalAgent.name).toBe('Third Name');
|
||||
expect(finalAgent.description).toBe('Final description');
|
||||
expect(finalAgent.model).toBe('new-model');
|
||||
});
|
||||
|
||||
test('should not include metadata fields in version history', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent({ id: agentId }, { description: 'New description' });
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.versions[0]._id).toBeUndefined();
|
||||
expect(updatedAgent.versions[0].__v).toBeUndefined();
|
||||
expect(updatedAgent.versions[0].name).toBe('Test Agent');
|
||||
expect(updatedAgent.versions[0].author).toBeUndefined();
|
||||
|
||||
expect(updatedAgent.versions[1]._id).toBeUndefined();
|
||||
expect(updatedAgent.versions[1].__v).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should not recursively include previous versions', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
await updateAgent({ id: agentId }, { name: 'Updated Name 1' });
|
||||
await updateAgent({ id: agentId }, { name: 'Updated Name 2' });
|
||||
const finalAgent = await updateAgent({ id: agentId }, { name: 'Updated Name 3' });
|
||||
|
||||
expect(finalAgent.versions).toHaveLength(4);
|
||||
|
||||
finalAgent.versions.forEach((version) => {
|
||||
expect(version.versions).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle MongoDB operators and field updates correctly', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'MongoDB Operator Test',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
tools: ['tool1'],
|
||||
});
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
description: 'Updated description',
|
||||
$push: { tools: 'tool2' },
|
||||
$addToSet: { projectIds: projectId },
|
||||
},
|
||||
);
|
||||
|
||||
const firstUpdate = await getAgent({ id: agentId });
|
||||
expect(firstUpdate.description).toBe('Updated description');
|
||||
expect(firstUpdate.tools).toContain('tool1');
|
||||
expect(firstUpdate.tools).toContain('tool2');
|
||||
expect(firstUpdate.projectIds.map((id) => id.toString())).toContain(projectId.toString());
|
||||
expect(firstUpdate.versions).toHaveLength(2);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
tools: ['tool2', 'tool3'],
|
||||
},
|
||||
);
|
||||
|
||||
const secondUpdate = await getAgent({ id: agentId });
|
||||
expect(secondUpdate.tools).toHaveLength(2);
|
||||
expect(secondUpdate.tools).toContain('tool2');
|
||||
expect(secondUpdate.tools).toContain('tool3');
|
||||
expect(secondUpdate.tools).not.toContain('tool1');
|
||||
expect(secondUpdate.versions).toHaveLength(3);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
$push: { tools: 'tool3' },
|
||||
},
|
||||
);
|
||||
|
||||
const thirdUpdate = await getAgent({ id: agentId });
|
||||
const toolCount = thirdUpdate.tools.filter((t) => t === 'tool3').length;
|
||||
expect(toolCount).toBe(2);
|
||||
expect(thirdUpdate.versions).toHaveLength(4);
|
||||
});
|
||||
|
||||
test('should handle parameter objects correctly', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Parameters Test',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
model_parameters: { temperature: 0.7 },
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ model_parameters: { temperature: 0.8 } },
|
||||
);
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.model_parameters.temperature).toBe(0.8);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
model_parameters: {
|
||||
temperature: 0.8,
|
||||
max_tokens: 1000,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const complexAgent = await getAgent({ id: agentId });
|
||||
expect(complexAgent.versions).toHaveLength(3);
|
||||
expect(complexAgent.model_parameters.temperature).toBe(0.8);
|
||||
expect(complexAgent.model_parameters.max_tokens).toBe(1000);
|
||||
|
||||
await updateAgent({ id: agentId }, { model_parameters: {} });
|
||||
|
||||
const emptyParamsAgent = await getAgent({ id: agentId });
|
||||
expect(emptyParamsAgent.versions).toHaveLength(4);
|
||||
expect(emptyParamsAgent.model_parameters).toEqual({});
|
||||
});
|
||||
|
||||
test('should detect duplicate versions and reject updates', async () => {
|
||||
const originalConsoleError = console.error;
|
||||
console.error = jest.fn();
|
||||
|
||||
try {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId1 = new mongoose.Types.ObjectId();
|
||||
const projectId2 = new mongoose.Types.ObjectId();
|
||||
|
||||
const testCases = [
|
||||
{
|
||||
name: 'simple field update',
|
||||
initial: {
|
||||
name: 'Test Agent',
|
||||
description: 'Initial description',
|
||||
},
|
||||
update: { name: 'Updated Name' },
|
||||
duplicate: { name: 'Updated Name' },
|
||||
},
|
||||
{
|
||||
name: 'object field update',
|
||||
initial: {
|
||||
model_parameters: { temperature: 0.7 },
|
||||
},
|
||||
update: { model_parameters: { temperature: 0.8 } },
|
||||
duplicate: { model_parameters: { temperature: 0.8 } },
|
||||
},
|
||||
{
|
||||
name: 'array field update',
|
||||
initial: {
|
||||
tools: ['tool1', 'tool2'],
|
||||
},
|
||||
update: { tools: ['tool2', 'tool3'] },
|
||||
duplicate: { tools: ['tool2', 'tool3'] },
|
||||
},
|
||||
{
|
||||
name: 'projectIds update',
|
||||
initial: {
|
||||
projectIds: [projectId1],
|
||||
},
|
||||
update: { projectIds: [projectId1, projectId2] },
|
||||
duplicate: { projectIds: [projectId2, projectId1] },
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const testAgentId = `agent_${uuidv4()}`;
|
||||
|
||||
await createAgent({
|
||||
id: testAgentId,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
...testCase.initial,
|
||||
});
|
||||
|
||||
await updateAgent({ id: testAgentId }, testCase.update);
|
||||
|
||||
let error;
|
||||
try {
|
||||
await updateAgent({ id: testAgentId }, testCase.duplicate);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('Duplicate version');
|
||||
expect(error.statusCode).toBe(409);
|
||||
expect(error.details).toBeDefined();
|
||||
expect(error.details.duplicateVersion).toBeDefined();
|
||||
|
||||
const agent = await getAgent({ id: testAgentId });
|
||||
expect(agent.versions).toHaveLength(2);
|
||||
}
|
||||
} finally {
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
|
||||
test('should track updatedBy when a different user updates an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
const updatingUser = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated Agent', description: 'Updated description' },
|
||||
{ updatingUserId: updatingUser.toString() },
|
||||
);
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.versions[1].updatedBy.toString()).toBe(updatingUser.toString());
|
||||
expect(updatedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
});
|
||||
|
||||
test('should include updatedBy even when the original author updates the agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated Agent', description: 'Updated description' },
|
||||
{ updatingUserId: originalAuthor.toString() },
|
||||
);
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.versions[1].updatedBy.toString()).toBe(originalAuthor.toString());
|
||||
expect(updatedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
});
|
||||
|
||||
test('should track multiple different users updating the same agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
const user1 = new mongoose.Types.ObjectId();
|
||||
const user2 = new mongoose.Types.ObjectId();
|
||||
const user3 = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
// User 1 makes an update
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated by User 1', description: 'First update' },
|
||||
{ updatingUserId: user1.toString() },
|
||||
);
|
||||
|
||||
// Original author makes an update
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ description: 'Updated by original author' },
|
||||
{ updatingUserId: originalAuthor.toString() },
|
||||
);
|
||||
|
||||
// User 2 makes an update
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated by User 2', model: 'new-model' },
|
||||
{ updatingUserId: user2.toString() },
|
||||
);
|
||||
|
||||
// User 3 makes an update
|
||||
const finalAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ description: 'Final update by User 3' },
|
||||
{ updatingUserId: user3.toString() },
|
||||
);
|
||||
|
||||
expect(finalAgent.versions).toHaveLength(5);
|
||||
expect(finalAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
|
||||
// Check that each version has the correct updatedBy
|
||||
expect(finalAgent.versions[0].updatedBy).toBeUndefined(); // Initial creation has no updatedBy
|
||||
expect(finalAgent.versions[1].updatedBy.toString()).toBe(user1.toString());
|
||||
expect(finalAgent.versions[2].updatedBy.toString()).toBe(originalAuthor.toString());
|
||||
expect(finalAgent.versions[3].updatedBy.toString()).toBe(user2.toString());
|
||||
expect(finalAgent.versions[4].updatedBy.toString()).toBe(user3.toString());
|
||||
|
||||
// Verify the final state
|
||||
expect(finalAgent.name).toBe('Updated by User 2');
|
||||
expect(finalAgent.description).toBe('Final update by User 3');
|
||||
expect(finalAgent.model).toBe('new-model');
|
||||
});
|
||||
|
||||
test('should preserve original author during agent restoration', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
const updatingUser = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated Agent', description: 'Updated description' },
|
||||
{ updatingUserId: updatingUser.toString() },
|
||||
);
|
||||
|
||||
const { revertAgentVersion } = require('./Agent');
|
||||
const revertedAgent = await revertAgentVersion({ id: agentId }, 0);
|
||||
|
||||
expect(revertedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
expect(revertedAgent.name).toBe('Original Agent');
|
||||
expect(revertedAgent.description).toBe('Original description');
|
||||
});
|
||||
|
||||
test('should detect action metadata changes and force version update', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const actionId = 'testActionId123';
|
||||
|
||||
// Create agent with actions
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Agent with Actions',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
actions: [`test.com_action_${actionId}`],
|
||||
tools: ['listEvents_action_test.com', 'createEvent_action_test.com'],
|
||||
});
|
||||
|
||||
// First update with forceVersion should create a version
|
||||
const firstUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
|
||||
{ updatingUserId: authorId.toString(), forceVersion: true },
|
||||
);
|
||||
|
||||
expect(firstUpdate.versions).toHaveLength(2);
|
||||
|
||||
// Second update with same data but forceVersion should still create a version
|
||||
const secondUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
|
||||
{ updatingUserId: authorId.toString(), forceVersion: true },
|
||||
);
|
||||
|
||||
expect(secondUpdate.versions).toHaveLength(3);
|
||||
|
||||
// Update without forceVersion and no changes should not create a version
|
||||
let error;
|
||||
try {
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
|
||||
{ updatingUserId: authorId.toString(), forceVersion: false },
|
||||
);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('Duplicate version');
|
||||
expect(error.statusCode).toBe(409);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const Assistant = require('~/db/models').Assistant;
|
||||
const { assistantSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Assistant = mongoose.model('assistant', assistantSchema);
|
||||
|
||||
/**
|
||||
* Update an assistant with new data without overwriting existing properties,
|
||||
|
||||
4
api/models/Balance.js
Normal file
4
api/models/Balance.js
Normal file
@@ -0,0 +1,4 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { balanceSchema } = require('@librechat/data-schemas');
|
||||
|
||||
module.exports = mongoose.model('Balance', balanceSchema);
|
||||
@@ -1,7 +1,8 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const logger = require('~/config/winston');
|
||||
const { bannerSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Banner = require('~/db/models').Banner;
|
||||
const Banner = mongoose.model('Banner', bannerSchema);
|
||||
|
||||
/**
|
||||
* Retrieves the current active banner.
|
||||
@@ -27,4 +28,4 @@ const getBanner = async (user) => {
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = { getBanner };
|
||||
module.exports = { Banner, getBanner };
|
||||
|
||||
86
api/models/Config.js
Normal file
86
api/models/Config.js
Normal file
@@ -0,0 +1,86 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const major = [0, 0];
|
||||
const minor = [0, 0];
|
||||
const patch = [0, 5];
|
||||
|
||||
const configSchema = mongoose.Schema(
|
||||
{
|
||||
tag: {
|
||||
type: String,
|
||||
required: true,
|
||||
validate: {
|
||||
validator: function (tag) {
|
||||
const [part1, part2, part3] = tag.replace('v', '').split('.').map(Number);
|
||||
|
||||
// Check if all parts are numbers
|
||||
if (isNaN(part1) || isNaN(part2) || isNaN(part3)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if all parts are within their respective ranges
|
||||
if (part1 < major[0] || part1 > major[1]) {
|
||||
return false;
|
||||
}
|
||||
if (part2 < minor[0] || part2 > minor[1]) {
|
||||
return false;
|
||||
}
|
||||
if (part3 < patch[0] || part3 > patch[1]) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
message: 'Invalid tag value',
|
||||
},
|
||||
},
|
||||
searchEnabled: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
usersEnabled: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
startupCounts: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
// Instance method
|
||||
configSchema.methods.incrementCount = function () {
|
||||
this.startupCounts += 1;
|
||||
};
|
||||
|
||||
// Static methods
|
||||
configSchema.statics.findByTag = async function (tag) {
|
||||
return await this.findOne({ tag }).lean();
|
||||
};
|
||||
|
||||
configSchema.statics.updateByTag = async function (tag, update) {
|
||||
return await this.findOneAndUpdate({ tag }, update, { new: true });
|
||||
};
|
||||
|
||||
const Config = mongoose.models.Config || mongoose.model('Config', configSchema);
|
||||
|
||||
module.exports = {
|
||||
getConfigs: async (filter) => {
|
||||
try {
|
||||
return await Config.find(filter).lean();
|
||||
} catch (error) {
|
||||
logger.error('Error getting configs', error);
|
||||
return { config: 'Error getting configs' };
|
||||
}
|
||||
},
|
||||
deleteConfigs: async (filter) => {
|
||||
try {
|
||||
return await Config.deleteMany(filter);
|
||||
} catch (error) {
|
||||
logger.error('Error deleting configs', error);
|
||||
return { config: 'Error deleting configs' };
|
||||
}
|
||||
},
|
||||
};
|
||||
@@ -1,8 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const Conversation = require('./schema/convoSchema');
|
||||
const { getMessages, deleteMessages } = require('./Message');
|
||||
|
||||
const Conversation = require('~/db/models').Conversation;
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
/**
|
||||
* Searches for a conversation by conversationId and returns a lean document with only conversationId and user.
|
||||
@@ -77,6 +75,7 @@ const getConvoFiles = async (conversationId) => {
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Conversation,
|
||||
getConvoFiles,
|
||||
searchConversation,
|
||||
deleteNullOrEmptyConversations,
|
||||
@@ -156,6 +155,7 @@ module.exports = {
|
||||
{ cursor, limit = 25, isArchived = false, tags, search, order = 'desc' } = {},
|
||||
) => {
|
||||
const filters = [{ user }];
|
||||
|
||||
if (isArchived) {
|
||||
filters.push({ isArchived: true });
|
||||
} else {
|
||||
@@ -288,6 +288,7 @@ module.exports = {
|
||||
deleteConvos: async (user, filter) => {
|
||||
try {
|
||||
const userFilter = { ...filter, user };
|
||||
|
||||
const conversations = await Conversation.find(userFilter).select('conversationId');
|
||||
const conversationIds = conversations.map((c) => c.conversationId);
|
||||
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const Conversation = require('./schema/convoSchema');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
const ConversationTag = require('~/db/models').ConversationTag;
|
||||
const Conversation = require('~/db/models').Conversation;
|
||||
const { conversationTagSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const ConversationTag = mongoose.model('ConversationTag', conversationTagSchema);
|
||||
|
||||
/**
|
||||
* Retrieves all conversation tags for a user.
|
||||
@@ -138,13 +140,13 @@ const adjustPositions = async (user, oldPosition, newPosition) => {
|
||||
const position =
|
||||
oldPosition < newPosition
|
||||
? {
|
||||
$gt: Math.min(oldPosition, newPosition),
|
||||
$lte: Math.max(oldPosition, newPosition),
|
||||
}
|
||||
$gt: Math.min(oldPosition, newPosition),
|
||||
$lte: Math.max(oldPosition, newPosition),
|
||||
}
|
||||
: {
|
||||
$gte: Math.min(oldPosition, newPosition),
|
||||
$lt: Math.max(oldPosition, newPosition),
|
||||
};
|
||||
$gte: Math.min(oldPosition, newPosition),
|
||||
$lt: Math.max(oldPosition, newPosition),
|
||||
};
|
||||
|
||||
await ConversationTag.updateMany(
|
||||
{
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { EToolResources } = require('librechat-data-provider');
|
||||
const { fileSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const File = require('~/db/models').File;
|
||||
const File = mongoose.model('File', fileSchema);
|
||||
|
||||
/**
|
||||
* Finds a file by its file_id with additional query options.
|
||||
@@ -168,6 +169,7 @@ async function batchUpdateFiles(updates) {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
File,
|
||||
findFileById,
|
||||
getFiles,
|
||||
getToolFilesByIds,
|
||||
|
||||
4
api/models/Key.js
Normal file
4
api/models/Key.js
Normal file
@@ -0,0 +1,4 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { keySchema } = require('@librechat/data-schemas');
|
||||
|
||||
module.exports = mongoose.model('Key', keySchema);
|
||||
@@ -1,7 +1,7 @@
|
||||
const { z } = require('zod');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const Message = require('./schema/messageSchema');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Message = require('~/db/models').Message;
|
||||
const idSchema = z.string().uuid();
|
||||
|
||||
/**
|
||||
@@ -68,6 +68,7 @@ async function saveMessage(req, params, metadata) {
|
||||
logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
|
||||
update.tokenCount = 0;
|
||||
}
|
||||
|
||||
const message = await Message.findOneAndUpdate(
|
||||
{ messageId: params.messageId, user: req.user.id },
|
||||
update,
|
||||
@@ -139,6 +140,7 @@ async function bulkSaveMessages(messages, overrideTimestamp = false) {
|
||||
upsert: true,
|
||||
},
|
||||
}));
|
||||
|
||||
const result = await Message.bulkWrite(bulkOps);
|
||||
return result;
|
||||
} catch (err) {
|
||||
@@ -353,6 +355,7 @@ async function deleteMessages(filter) {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Message,
|
||||
saveMessage,
|
||||
bulkSaveMessages,
|
||||
recordMessage,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
|
||||
jest.mock('mongoose');
|
||||
@@ -19,20 +20,14 @@ const mockSchema = {
|
||||
deleteMany: jest.fn(),
|
||||
};
|
||||
|
||||
mongoose.model.mockReturnValue(mockSchema);
|
||||
|
||||
jest.mock('~/models/schema/messageSchema', () => mockSchema);
|
||||
|
||||
jest.mock('~/config/winston', () => ({
|
||||
error: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockModels = {
|
||||
Message: {
|
||||
findOneAndUpdate: mockSchema.findOneAndUpdate,
|
||||
updateOne: mockSchema.updateOne,
|
||||
findOne: mockSchema.findOne,
|
||||
find: mockSchema.find,
|
||||
deleteMany: mockSchema.deleteMany,
|
||||
},
|
||||
};
|
||||
|
||||
const {
|
||||
saveMessage,
|
||||
getMessages,
|
||||
@@ -158,7 +153,7 @@ describe('Message Operations', () => {
|
||||
});
|
||||
|
||||
describe('Conversation Hijacking Prevention', () => {
|
||||
it("should not allow editing a message in another user's conversation", async () => {
|
||||
it('should not allow editing a message in another user\'s conversation', async () => {
|
||||
const attackerReq = { user: { id: 'attacker123' } };
|
||||
const victimConversationId = 'victim-convo-123';
|
||||
const victimMessageId = 'victim-msg-123';
|
||||
@@ -180,7 +175,7 @@ describe('Message Operations', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it("should not allow deleting messages from another user's conversation", async () => {
|
||||
it('should not allow deleting messages from another user\'s conversation', async () => {
|
||||
const attackerReq = { user: { id: 'attacker123' } };
|
||||
const victimConversationId = 'victim-convo-123';
|
||||
const victimMessageId = 'victim-msg-123';
|
||||
@@ -198,7 +193,7 @@ describe('Message Operations', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("should not allow inserting a new message into another user's conversation", async () => {
|
||||
it('should not allow inserting a new message into another user\'s conversation', async () => {
|
||||
const attackerReq = { user: { id: 'attacker123' } };
|
||||
const victimConversationId = uuidv4(); // Use a valid UUID
|
||||
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
|
||||
const Preset = require('~/db/models').Preset;
|
||||
const Preset = require('./schema/presetSchema');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const getPreset = async (user, presetId) => {
|
||||
try {
|
||||
@@ -13,6 +11,7 @@ const getPreset = async (user, presetId) => {
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Preset,
|
||||
getPreset,
|
||||
getPresets: async (user, filter) => {
|
||||
try {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { model } = require('mongoose');
|
||||
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
||||
const { projectSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Project = require('~/db/models').Project;
|
||||
const Project = model('Project', projectSchema);
|
||||
|
||||
/**
|
||||
* Retrieve a project by ID and convert the found project document to a plain object.
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { ObjectId } = require('mongodb');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
getProjectByName,
|
||||
@@ -8,10 +7,12 @@ const {
|
||||
removeGroupIdsFromProject,
|
||||
removeGroupFromAllProjects,
|
||||
} = require('./Project');
|
||||
const { promptGroupSchema, promptSchema } = require('@librechat/data-schemas');
|
||||
const { escapeRegExp } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const PromptGroup = require('~/db/models').PromptGroup;
|
||||
const Prompt = require('~/db/models').Prompt;
|
||||
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
|
||||
const Prompt = mongoose.model('Prompt', promptSchema);
|
||||
|
||||
/**
|
||||
* Create a pipeline for the aggregation to get prompt groups
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
const mongoose = require('mongoose');
|
||||
const {
|
||||
CacheKeys,
|
||||
SystemRoles,
|
||||
@@ -6,10 +7,11 @@ const {
|
||||
permissionsSchema,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { roleSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Role = require('~/db/models').Role;
|
||||
const Role = mongoose.model('Role', roleSchema);
|
||||
|
||||
/**
|
||||
* Retrieve a role by name and convert the found role document to a plain object.
|
||||
@@ -280,6 +282,7 @@ const migrateRoleSchema = async function (roleName) {
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Role,
|
||||
getRoleByName,
|
||||
initializeRoles,
|
||||
updateRoleByName,
|
||||
|
||||
@@ -6,11 +6,9 @@ const {
|
||||
roleDefaults,
|
||||
PermissionTypes,
|
||||
} = require('librechat-data-provider');
|
||||
const { getRoleByName, updateAccessPermissions, initializeRoles } = require('~/models/Role');
|
||||
const { Role, getRoleByName, updateAccessPermissions, initializeRoles } = require('~/models/Role');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
const Role = require('~/db/models').Role;
|
||||
|
||||
// Mock the cache
|
||||
jest.mock('~/cache/getLogStores', () =>
|
||||
jest.fn().mockReturnValue({
|
||||
|
||||
275
api/models/Session.js
Normal file
275
api/models/Session.js
Normal file
@@ -0,0 +1,275 @@
|
||||
const mongoose = require('mongoose');
|
||||
const signPayload = require('~/server/services/signPayload');
|
||||
const { hashToken } = require('~/server/utils/crypto');
|
||||
const { sessionSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Session = mongoose.model('Session', sessionSchema);
|
||||
|
||||
const { REFRESH_TOKEN_EXPIRY } = process.env ?? {};
|
||||
const expires = eval(REFRESH_TOKEN_EXPIRY) ?? 1000 * 60 * 60 * 24 * 7; // 7 days default
|
||||
|
||||
/**
|
||||
* Error class for Session-related errors
|
||||
*/
|
||||
class SessionError extends Error {
|
||||
constructor(message, code = 'SESSION_ERROR') {
|
||||
super(message);
|
||||
this.name = 'SessionError';
|
||||
this.code = code;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new session for a user
|
||||
* @param {string} userId - The ID of the user
|
||||
* @param {Object} options - Additional options for session creation
|
||||
* @param {Date} options.expiration - Custom expiration date
|
||||
* @returns {Promise<{session: Session, refreshToken: string}>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const createSession = async (userId, options = {}) => {
|
||||
if (!userId) {
|
||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
||||
}
|
||||
|
||||
try {
|
||||
const session = new Session({
|
||||
user: userId,
|
||||
expiration: options.expiration || new Date(Date.now() + expires),
|
||||
});
|
||||
const refreshToken = await generateRefreshToken(session);
|
||||
return { session, refreshToken };
|
||||
} catch (error) {
|
||||
logger.error('[createSession] Error creating session:', error);
|
||||
throw new SessionError('Failed to create session', 'CREATE_SESSION_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds a session by various parameters
|
||||
* @param {Object} params - Search parameters
|
||||
* @param {string} [params.refreshToken] - The refresh token to search by
|
||||
* @param {string} [params.userId] - The user ID to search by
|
||||
* @param {string} [params.sessionId] - The session ID to search by
|
||||
* @param {Object} [options] - Additional options
|
||||
* @param {boolean} [options.lean=true] - Whether to return plain objects instead of documents
|
||||
* @returns {Promise<Session|null>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const findSession = async (params, options = { lean: true }) => {
|
||||
try {
|
||||
const query = {};
|
||||
|
||||
if (!params.refreshToken && !params.userId && !params.sessionId) {
|
||||
throw new SessionError('At least one search parameter is required', 'INVALID_SEARCH_PARAMS');
|
||||
}
|
||||
|
||||
if (params.refreshToken) {
|
||||
const tokenHash = await hashToken(params.refreshToken);
|
||||
query.refreshTokenHash = tokenHash;
|
||||
}
|
||||
|
||||
if (params.userId) {
|
||||
query.user = params.userId;
|
||||
}
|
||||
|
||||
if (params.sessionId) {
|
||||
const sessionId = params.sessionId.sessionId || params.sessionId;
|
||||
if (!mongoose.Types.ObjectId.isValid(sessionId)) {
|
||||
throw new SessionError('Invalid session ID format', 'INVALID_SESSION_ID');
|
||||
}
|
||||
query._id = sessionId;
|
||||
}
|
||||
|
||||
// Add expiration check to only return valid sessions
|
||||
query.expiration = { $gt: new Date() };
|
||||
|
||||
const sessionQuery = Session.findOne(query);
|
||||
|
||||
if (options.lean) {
|
||||
return await sessionQuery.lean();
|
||||
}
|
||||
|
||||
return await sessionQuery.exec();
|
||||
} catch (error) {
|
||||
logger.error('[findSession] Error finding session:', error);
|
||||
throw new SessionError('Failed to find session', 'FIND_SESSION_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Updates session expiration
|
||||
* @param {Session|string} session - The session or session ID to update
|
||||
* @param {Date} [newExpiration] - Optional new expiration date
|
||||
* @returns {Promise<Session>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const updateExpiration = async (session, newExpiration) => {
|
||||
try {
|
||||
const sessionDoc = typeof session === 'string' ? await Session.findById(session) : session;
|
||||
|
||||
if (!sessionDoc) {
|
||||
throw new SessionError('Session not found', 'SESSION_NOT_FOUND');
|
||||
}
|
||||
|
||||
sessionDoc.expiration = newExpiration || new Date(Date.now() + expires);
|
||||
return await sessionDoc.save();
|
||||
} catch (error) {
|
||||
logger.error('[updateExpiration] Error updating session:', error);
|
||||
throw new SessionError('Failed to update session expiration', 'UPDATE_EXPIRATION_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes a session by refresh token or session ID
|
||||
* @param {Object} params - Delete parameters
|
||||
* @param {string} [params.refreshToken] - The refresh token of the session to delete
|
||||
* @param {string} [params.sessionId] - The ID of the session to delete
|
||||
* @returns {Promise<Object>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const deleteSession = async (params) => {
|
||||
try {
|
||||
if (!params.refreshToken && !params.sessionId) {
|
||||
throw new SessionError(
|
||||
'Either refreshToken or sessionId is required',
|
||||
'INVALID_DELETE_PARAMS',
|
||||
);
|
||||
}
|
||||
|
||||
const query = {};
|
||||
|
||||
if (params.refreshToken) {
|
||||
query.refreshTokenHash = await hashToken(params.refreshToken);
|
||||
}
|
||||
|
||||
if (params.sessionId) {
|
||||
query._id = params.sessionId;
|
||||
}
|
||||
|
||||
const result = await Session.deleteOne(query);
|
||||
|
||||
if (result.deletedCount === 0) {
|
||||
logger.warn('[deleteSession] No session found to delete');
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[deleteSession] Error deleting session:', error);
|
||||
throw new SessionError('Failed to delete session', 'DELETE_SESSION_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes all sessions for a user
|
||||
* @param {string} userId - The ID of the user
|
||||
* @param {Object} [options] - Additional options
|
||||
* @param {boolean} [options.excludeCurrentSession] - Whether to exclude the current session
|
||||
* @param {string} [options.currentSessionId] - The ID of the current session to exclude
|
||||
* @returns {Promise<Object>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const deleteAllUserSessions = async (userId, options = {}) => {
|
||||
try {
|
||||
if (!userId) {
|
||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
||||
}
|
||||
|
||||
// Extract userId if it's passed as an object
|
||||
const userIdString = userId.userId || userId;
|
||||
|
||||
if (!mongoose.Types.ObjectId.isValid(userIdString)) {
|
||||
throw new SessionError('Invalid user ID format', 'INVALID_USER_ID_FORMAT');
|
||||
}
|
||||
|
||||
const query = { user: userIdString };
|
||||
|
||||
if (options.excludeCurrentSession && options.currentSessionId) {
|
||||
query._id = { $ne: options.currentSessionId };
|
||||
}
|
||||
|
||||
const result = await Session.deleteMany(query);
|
||||
|
||||
if (result.deletedCount > 0) {
|
||||
logger.debug(
|
||||
`[deleteAllUserSessions] Deleted ${result.deletedCount} sessions for user ${userIdString}.`,
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[deleteAllUserSessions] Error deleting user sessions:', error);
|
||||
throw new SessionError('Failed to delete user sessions', 'DELETE_ALL_SESSIONS_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates a refresh token for a session
|
||||
* @param {Session} session - The session to generate a token for
|
||||
* @returns {Promise<string>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const generateRefreshToken = async (session) => {
|
||||
if (!session || !session.user) {
|
||||
throw new SessionError('Invalid session object', 'INVALID_SESSION');
|
||||
}
|
||||
|
||||
try {
|
||||
const expiresIn = session.expiration ? session.expiration.getTime() : Date.now() + expires;
|
||||
|
||||
if (!session.expiration) {
|
||||
session.expiration = new Date(expiresIn);
|
||||
}
|
||||
|
||||
const refreshToken = await signPayload({
|
||||
payload: {
|
||||
id: session.user,
|
||||
sessionId: session._id,
|
||||
},
|
||||
secret: process.env.JWT_REFRESH_SECRET,
|
||||
expirationTime: Math.floor((expiresIn - Date.now()) / 1000),
|
||||
});
|
||||
|
||||
session.refreshTokenHash = await hashToken(refreshToken);
|
||||
await session.save();
|
||||
|
||||
return refreshToken;
|
||||
} catch (error) {
|
||||
logger.error('[generateRefreshToken] Error generating refresh token:', error);
|
||||
throw new SessionError('Failed to generate refresh token', 'GENERATE_TOKEN_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Counts active sessions for a user
|
||||
* @param {string} userId - The ID of the user
|
||||
* @returns {Promise<number>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const countActiveSessions = async (userId) => {
|
||||
try {
|
||||
if (!userId) {
|
||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
||||
}
|
||||
|
||||
return await Session.countDocuments({
|
||||
user: userId,
|
||||
expiration: { $gt: new Date() },
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[countActiveSessions] Error counting active sessions:', error);
|
||||
throw new SessionError('Failed to count active sessions', 'COUNT_SESSIONS_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
createSession,
|
||||
findSession,
|
||||
updateExpiration,
|
||||
deleteSession,
|
||||
deleteAllUserSessions,
|
||||
generateRefreshToken,
|
||||
countActiveSessions,
|
||||
SessionError,
|
||||
};
|
||||
@@ -1,11 +1,11 @@
|
||||
const { nanoid } = require('nanoid');
|
||||
const mongoose = require('mongoose');
|
||||
const { nanoid } = require('nanoid');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Conversation } = require('~/models/Conversation');
|
||||
const { shareSchema } = require('@librechat/data-schemas');
|
||||
const SharedLink = mongoose.model('SharedLink', shareSchema);
|
||||
const { getMessages } = require('./Message');
|
||||
|
||||
const Conversation = require('~/db/models').Conversation;
|
||||
const SharedLink = require('~/db/models').SharedLink;
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
class ShareServiceError extends Error {
|
||||
constructor(message, code) {
|
||||
@@ -202,6 +202,7 @@ async function createSharedLink(user, conversationId) {
|
||||
if (!user || !conversationId) {
|
||||
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
|
||||
}
|
||||
|
||||
try {
|
||||
const [existingShare, conversationMessages] = await Promise.all([
|
||||
SharedLink.findOne({ conversationId, isPublic: true }).select('-_id -__v -user').lean(),
|
||||
@@ -339,6 +340,7 @@ async function deleteSharedLink(user, shareId) {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
SharedLink,
|
||||
getSharedLink,
|
||||
getSharedLinks,
|
||||
createSharedLink,
|
||||
|
||||
@@ -1,5 +1,158 @@
|
||||
const { findToken, updateToken, createToken } = require('~/models');
|
||||
const mongoose = require('mongoose');
|
||||
const { encryptV2 } = require('~/server/utils/crypto');
|
||||
const { tokenSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Token model.
|
||||
* @type {mongoose.Model}
|
||||
*/
|
||||
const Token = mongoose.model('Token', tokenSchema);
|
||||
/**
|
||||
* Fixes the indexes for the Token collection from legacy TTL indexes to the new expiresAt index.
|
||||
*/
|
||||
async function fixIndexes() {
|
||||
try {
|
||||
if (
|
||||
process.env.NODE_ENV === 'CI' ||
|
||||
process.env.NODE_ENV === 'development' ||
|
||||
process.env.NODE_ENV === 'test'
|
||||
) {
|
||||
return;
|
||||
}
|
||||
const indexes = await Token.collection.indexes();
|
||||
logger.debug('Existing Token Indexes:', JSON.stringify(indexes, null, 2));
|
||||
const unwantedTTLIndexes = indexes.filter(
|
||||
(index) => index.key.createdAt === 1 && index.expireAfterSeconds !== undefined,
|
||||
);
|
||||
if (unwantedTTLIndexes.length === 0) {
|
||||
logger.debug('No unwanted Token indexes found.');
|
||||
return;
|
||||
}
|
||||
for (const index of unwantedTTLIndexes) {
|
||||
logger.debug(`Dropping unwanted Token index: ${index.name}`);
|
||||
await Token.collection.dropIndex(index.name);
|
||||
logger.debug(`Dropped Token index: ${index.name}`);
|
||||
}
|
||||
logger.debug('Token index cleanup completed successfully.');
|
||||
} catch (error) {
|
||||
logger.error('An error occurred while fixing Token indexes:', error);
|
||||
}
|
||||
}
|
||||
|
||||
fixIndexes();
|
||||
|
||||
/**
|
||||
* Creates a new Token instance.
|
||||
* @param {Object} tokenData - The data for the new Token.
|
||||
* @param {mongoose.Types.ObjectId} tokenData.userId - The user's ID. It is required.
|
||||
* @param {String} tokenData.email - The user's email.
|
||||
* @param {String} tokenData.token - The token. It is required.
|
||||
* @param {Number} tokenData.expiresIn - The number of seconds until the token expires.
|
||||
* @returns {Promise<mongoose.Document>} The new Token instance.
|
||||
* @throws Will throw an error if token creation fails.
|
||||
*/
|
||||
async function createToken(tokenData) {
|
||||
try {
|
||||
const currentTime = new Date();
|
||||
const expiresAt = new Date(currentTime.getTime() + tokenData.expiresIn * 1000);
|
||||
|
||||
const newTokenData = {
|
||||
...tokenData,
|
||||
createdAt: currentTime,
|
||||
expiresAt,
|
||||
};
|
||||
|
||||
return await Token.create(newTokenData);
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while creating token:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds a Token document that matches the provided query.
|
||||
* @param {Object} query - The query to match against.
|
||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
||||
* @param {String} query.token - The token value.
|
||||
* @param {String} [query.email] - The email of the user.
|
||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
||||
* @returns {Promise<Object|null>} The matched Token document, or null if not found.
|
||||
* @throws Will throw an error if the find operation fails.
|
||||
*/
|
||||
async function findToken(query) {
|
||||
try {
|
||||
const conditions = [];
|
||||
|
||||
if (query.userId) {
|
||||
conditions.push({ userId: query.userId });
|
||||
}
|
||||
if (query.token) {
|
||||
conditions.push({ token: query.token });
|
||||
}
|
||||
if (query.email) {
|
||||
conditions.push({ email: query.email });
|
||||
}
|
||||
if (query.identifier) {
|
||||
conditions.push({ identifier: query.identifier });
|
||||
}
|
||||
|
||||
const token = await Token.findOne({
|
||||
$and: conditions,
|
||||
}).lean();
|
||||
|
||||
return token;
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while finding token:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Token document that matches the provided query.
|
||||
* @param {Object} query - The query to match against.
|
||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
||||
* @param {String} query.token - The token value.
|
||||
* @param {String} [query.email] - The email of the user.
|
||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
||||
* @param {Object} updateData - The data to update the Token with.
|
||||
* @returns {Promise<mongoose.Document|null>} The updated Token document, or null if not found.
|
||||
* @throws Will throw an error if the update operation fails.
|
||||
*/
|
||||
async function updateToken(query, updateData) {
|
||||
try {
|
||||
return await Token.findOneAndUpdate(query, updateData, { new: true });
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while updating token:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes all Token documents that match the provided token, user ID, or email.
|
||||
* @param {Object} query - The query to match against.
|
||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
||||
* @param {String} query.token - The token value.
|
||||
* @param {String} [query.email] - The email of the user.
|
||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
||||
* @returns {Promise<Object>} The result of the delete operation.
|
||||
* @throws Will throw an error if the delete operation fails.
|
||||
*/
|
||||
async function deleteTokens(query) {
|
||||
try {
|
||||
return await Token.deleteMany({
|
||||
$or: [
|
||||
{ userId: query.userId },
|
||||
{ token: query.token },
|
||||
{ email: query.email },
|
||||
{ identifier: query.identifier },
|
||||
],
|
||||
});
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while deleting tokens:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles the OAuth token by creating or updating the token.
|
||||
@@ -38,5 +191,9 @@ async function handleOAuthToken({
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
findToken,
|
||||
createToken,
|
||||
updateToken,
|
||||
deleteTokens,
|
||||
handleOAuthToken,
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const ToolCall = require('~/db/models').ToolCall;
|
||||
const { toolCallSchema } = require('@librechat/data-schemas');
|
||||
const ToolCall = mongoose.model('ToolCall', toolCallSchema);
|
||||
|
||||
/**
|
||||
* Create a new tool call
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { transactionSchema } = require('@librechat/data-schemas');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||
|
||||
const Transaction = require('~/db/models').Transaction;
|
||||
const Balance = require('~/db/models').Balance;
|
||||
const { logger } = require('~/config');
|
||||
const Balance = require('./Balance');
|
||||
|
||||
const cancelRate = 1.15;
|
||||
|
||||
@@ -141,19 +140,19 @@ const updateBalance = async ({ user, incrementValue, setValues }) => {
|
||||
};
|
||||
|
||||
/** Method to calculate and set the tokenValue for a transaction */
|
||||
function calculateTokenValue(txn) {
|
||||
if (!txn.valueKey || !txn.tokenType) {
|
||||
txn.tokenValue = txn.rawAmount;
|
||||
transactionSchema.methods.calculateTokenValue = function () {
|
||||
if (!this.valueKey || !this.tokenType) {
|
||||
this.tokenValue = this.rawAmount;
|
||||
}
|
||||
const { valueKey, tokenType, model, endpointTokenConfig } = txn;
|
||||
const { valueKey, tokenType, model, endpointTokenConfig } = this;
|
||||
const multiplier = Math.abs(getMultiplier({ valueKey, tokenType, model, endpointTokenConfig }));
|
||||
txn.rate = multiplier;
|
||||
txn.tokenValue = txn.rawAmount * multiplier;
|
||||
if (txn.context && txn.tokenType === 'completion' && txn.context === 'incomplete') {
|
||||
txn.tokenValue = Math.ceil(txn.tokenValue * cancelRate);
|
||||
txn.rate *= cancelRate;
|
||||
this.rate = multiplier;
|
||||
this.tokenValue = this.rawAmount * multiplier;
|
||||
if (this.context && this.tokenType === 'completion' && this.context === 'incomplete') {
|
||||
this.tokenValue = Math.ceil(this.tokenValue * cancelRate);
|
||||
this.rate *= cancelRate;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* New static method to create an auto-refill transaction that does NOT trigger a balance update.
|
||||
@@ -164,13 +163,13 @@ function calculateTokenValue(txn) {
|
||||
* @param {number} txData.rawAmount - The raw amount of tokens.
|
||||
* @returns {Promise<object>} - The created transaction.
|
||||
*/
|
||||
async function createAutoRefillTransaction(txData) {
|
||||
transactionSchema.statics.createAutoRefillTransaction = async function (txData) {
|
||||
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
||||
return;
|
||||
}
|
||||
const transaction = new Transaction(txData);
|
||||
const transaction = new this(txData);
|
||||
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
||||
calculateTokenValue(transaction);
|
||||
transaction.calculateTokenValue();
|
||||
await transaction.save();
|
||||
|
||||
const balanceResponse = await updateBalance({
|
||||
@@ -186,20 +185,21 @@ async function createAutoRefillTransaction(txData) {
|
||||
logger.debug('[Balance.check] Auto-refill performed', result);
|
||||
result.transaction = transaction;
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static method to create a transaction and update the balance
|
||||
* @param {txData} txData - Transaction data.
|
||||
*/
|
||||
async function createTransaction(txData) {
|
||||
transactionSchema.statics.create = async function (txData) {
|
||||
const Transaction = this;
|
||||
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const transaction = new Transaction(txData);
|
||||
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
||||
calculateTokenValue(transaction);
|
||||
transaction.calculateTokenValue();
|
||||
|
||||
await transaction.save();
|
||||
|
||||
@@ -209,6 +209,7 @@ async function createTransaction(txData) {
|
||||
}
|
||||
|
||||
let incrementValue = transaction.tokenValue;
|
||||
|
||||
const balanceResponse = await updateBalance({
|
||||
user: transaction.user,
|
||||
incrementValue,
|
||||
@@ -220,19 +221,21 @@ async function createTransaction(txData) {
|
||||
balance: balanceResponse.tokenCredits,
|
||||
[transaction.tokenType]: incrementValue,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static method to create a structured transaction and update the balance
|
||||
* @param {txData} txData - Transaction data.
|
||||
*/
|
||||
async function createStructuredTransaction(txData) {
|
||||
transactionSchema.statics.createStructured = async function (txData) {
|
||||
const Transaction = this;
|
||||
|
||||
const transaction = new Transaction({
|
||||
...txData,
|
||||
endpointTokenConfig: txData.endpointTokenConfig,
|
||||
});
|
||||
|
||||
calculateStructuredTokenValue(transaction);
|
||||
transaction.calculateStructuredTokenValue();
|
||||
|
||||
await transaction.save();
|
||||
|
||||
@@ -254,69 +257,71 @@ async function createStructuredTransaction(txData) {
|
||||
balance: balanceResponse.tokenCredits,
|
||||
[transaction.tokenType]: incrementValue,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/** Method to calculate token value for structured tokens */
|
||||
function calculateStructuredTokenValue(txn) {
|
||||
if (!txn.tokenType) {
|
||||
txn.tokenValue = txn.rawAmount;
|
||||
transactionSchema.methods.calculateStructuredTokenValue = function () {
|
||||
if (!this.tokenType) {
|
||||
this.tokenValue = this.rawAmount;
|
||||
return;
|
||||
}
|
||||
|
||||
const { model, endpointTokenConfig } = txn;
|
||||
const { model, endpointTokenConfig } = this;
|
||||
|
||||
if (txn.tokenType === 'prompt') {
|
||||
if (this.tokenType === 'prompt') {
|
||||
const inputMultiplier = getMultiplier({ tokenType: 'prompt', model, endpointTokenConfig });
|
||||
const writeMultiplier =
|
||||
getCacheMultiplier({ cacheType: 'write', model, endpointTokenConfig }) ?? inputMultiplier;
|
||||
const readMultiplier =
|
||||
getCacheMultiplier({ cacheType: 'read', model, endpointTokenConfig }) ?? inputMultiplier;
|
||||
|
||||
txn.rateDetail = {
|
||||
this.rateDetail = {
|
||||
input: inputMultiplier,
|
||||
write: writeMultiplier,
|
||||
read: readMultiplier,
|
||||
};
|
||||
|
||||
const totalPromptTokens =
|
||||
Math.abs(txn.inputTokens || 0) +
|
||||
Math.abs(txn.writeTokens || 0) +
|
||||
Math.abs(txn.readTokens || 0);
|
||||
Math.abs(this.inputTokens || 0) +
|
||||
Math.abs(this.writeTokens || 0) +
|
||||
Math.abs(this.readTokens || 0);
|
||||
|
||||
if (totalPromptTokens > 0) {
|
||||
txn.rate =
|
||||
(Math.abs(inputMultiplier * (txn.inputTokens || 0)) +
|
||||
Math.abs(writeMultiplier * (txn.writeTokens || 0)) +
|
||||
Math.abs(readMultiplier * (txn.readTokens || 0))) /
|
||||
this.rate =
|
||||
(Math.abs(inputMultiplier * (this.inputTokens || 0)) +
|
||||
Math.abs(writeMultiplier * (this.writeTokens || 0)) +
|
||||
Math.abs(readMultiplier * (this.readTokens || 0))) /
|
||||
totalPromptTokens;
|
||||
} else {
|
||||
txn.rate = Math.abs(inputMultiplier); // Default to input rate if no tokens
|
||||
this.rate = Math.abs(inputMultiplier); // Default to input rate if no tokens
|
||||
}
|
||||
|
||||
txn.tokenValue = -(
|
||||
Math.abs(txn.inputTokens || 0) * inputMultiplier +
|
||||
Math.abs(txn.writeTokens || 0) * writeMultiplier +
|
||||
Math.abs(txn.readTokens || 0) * readMultiplier
|
||||
this.tokenValue = -(
|
||||
Math.abs(this.inputTokens || 0) * inputMultiplier +
|
||||
Math.abs(this.writeTokens || 0) * writeMultiplier +
|
||||
Math.abs(this.readTokens || 0) * readMultiplier
|
||||
);
|
||||
|
||||
txn.rawAmount = -totalPromptTokens;
|
||||
} else if (txn.tokenType === 'completion') {
|
||||
const multiplier = getMultiplier({ tokenType: txn.tokenType, model, endpointTokenConfig });
|
||||
txn.rate = Math.abs(multiplier);
|
||||
txn.tokenValue = -Math.abs(txn.rawAmount) * multiplier;
|
||||
txn.rawAmount = -Math.abs(txn.rawAmount);
|
||||
this.rawAmount = -totalPromptTokens;
|
||||
} else if (this.tokenType === 'completion') {
|
||||
const multiplier = getMultiplier({ tokenType: this.tokenType, model, endpointTokenConfig });
|
||||
this.rate = Math.abs(multiplier);
|
||||
this.tokenValue = -Math.abs(this.rawAmount) * multiplier;
|
||||
this.rawAmount = -Math.abs(this.rawAmount);
|
||||
}
|
||||
|
||||
if (txn.context && txn.tokenType === 'completion' && txn.context === 'incomplete') {
|
||||
txn.tokenValue = Math.ceil(txn.tokenValue * cancelRate);
|
||||
txn.rate *= cancelRate;
|
||||
if (txn.rateDetail) {
|
||||
txn.rateDetail = Object.fromEntries(
|
||||
Object.entries(txn.rateDetail).map(([k, v]) => [k, v * cancelRate]),
|
||||
if (this.context && this.tokenType === 'completion' && this.context === 'incomplete') {
|
||||
this.tokenValue = Math.ceil(this.tokenValue * cancelRate);
|
||||
this.rate *= cancelRate;
|
||||
if (this.rateDetail) {
|
||||
this.rateDetail = Object.fromEntries(
|
||||
Object.entries(this.rateDetail).map(([k, v]) => [k, v * cancelRate]),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const Transaction = mongoose.model('Transaction', transactionSchema);
|
||||
|
||||
/**
|
||||
* Queries and retrieves transactions based on a given filter.
|
||||
@@ -335,9 +340,4 @@ async function getTransactions(filter) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getTransactions,
|
||||
createTransaction,
|
||||
createAutoRefillTransaction,
|
||||
createStructuredTransaction,
|
||||
};
|
||||
module.exports = { Transaction, getTransactions };
|
||||
|
||||
@@ -3,13 +3,14 @@ const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||
const { createTransaction } = require('./Transaction');
|
||||
const Balance = require('~/db/models').Balance;
|
||||
const { Transaction } = require('./Transaction');
|
||||
const Balance = require('./Balance');
|
||||
|
||||
// Mock the custom config module so we can control the balance flag.
|
||||
jest.mock('~/server/services/Config');
|
||||
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
@@ -367,7 +368,7 @@ describe('NaN Handling Tests', () => {
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await createTransaction(txData);
|
||||
const result = await Transaction.create(txData);
|
||||
|
||||
// Assert: No transaction should be created and balance remains unchanged.
|
||||
expect(result).toBeUndefined();
|
||||
|
||||
6
api/models/User.js
Normal file
6
api/models/User.js
Normal file
@@ -0,0 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { userSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const User = mongoose.model('User', userSchema);
|
||||
|
||||
module.exports = User;
|
||||
@@ -1,11 +1,9 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { createAutoRefillTransaction } = require('./Transaction');
|
||||
const { Transaction } = require('./Transaction');
|
||||
const { logViolation } = require('~/cache');
|
||||
const { getMultiplier } = require('./tx');
|
||||
|
||||
const Balance = require('~/db/models').Balance;
|
||||
const { logger } = require('~/config');
|
||||
const Balance = require('./Balance');
|
||||
|
||||
function isInvalidDate(date) {
|
||||
return isNaN(date);
|
||||
@@ -62,7 +60,7 @@ const checkBalanceRecord = async function ({
|
||||
) {
|
||||
try {
|
||||
/** @type {{ rate: number, user: string, balance: number, transaction: import('@librechat/data-schemas').ITransaction}} */
|
||||
const result = await createAutoRefillTransaction({
|
||||
const result = await Transaction.createAutoRefillTransaction({
|
||||
user: user,
|
||||
tokenType: 'credits',
|
||||
context: 'autoRefill',
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { getMessages, bulkSaveMessages } = require('./Message');
|
||||
|
||||
const Message = require('~/db/models').Message;
|
||||
const { Message, getMessages, bulkSaveMessages } = require('./Message');
|
||||
|
||||
// Original version of buildTree function
|
||||
function buildTree({ messages, fileMap }) {
|
||||
@@ -44,6 +42,7 @@ function buildTree({ messages, fileMap }) {
|
||||
}
|
||||
|
||||
let mongod;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongod = await MongoMemoryServer.create();
|
||||
const uri = mongod.getUri();
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { createMethods } = require('@librechat/data-schemas');
|
||||
const methods = createMethods(mongoose);
|
||||
const { comparePassword } = require('./userMethods');
|
||||
const {
|
||||
comparePassword,
|
||||
deleteUserById,
|
||||
generateToken,
|
||||
getUserById,
|
||||
updateUser,
|
||||
createUser,
|
||||
countUsers,
|
||||
findUser,
|
||||
} = require('./userMethods');
|
||||
const {
|
||||
findFileById,
|
||||
createFile,
|
||||
@@ -20,12 +26,32 @@ const {
|
||||
deleteMessagesSince,
|
||||
deleteMessages,
|
||||
} = require('./Message');
|
||||
const {
|
||||
createSession,
|
||||
findSession,
|
||||
updateExpiration,
|
||||
deleteSession,
|
||||
deleteAllUserSessions,
|
||||
generateRefreshToken,
|
||||
countActiveSessions,
|
||||
} = require('./Session');
|
||||
const { getConvoTitle, getConvo, saveConvo, deleteConvos } = require('./Conversation');
|
||||
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
|
||||
const { createToken, findToken, updateToken, deleteTokens } = require('./Token');
|
||||
const Balance = require('./Balance');
|
||||
const User = require('./User');
|
||||
const Key = require('./Key');
|
||||
|
||||
module.exports = {
|
||||
...methods,
|
||||
comparePassword,
|
||||
deleteUserById,
|
||||
generateToken,
|
||||
getUserById,
|
||||
updateUser,
|
||||
createUser,
|
||||
countUsers,
|
||||
findUser,
|
||||
|
||||
findFileById,
|
||||
createFile,
|
||||
updateFile,
|
||||
@@ -51,4 +77,21 @@ module.exports = {
|
||||
getPresets,
|
||||
savePreset,
|
||||
deletePresets,
|
||||
|
||||
createToken,
|
||||
findToken,
|
||||
updateToken,
|
||||
deleteTokens,
|
||||
|
||||
createSession,
|
||||
findSession,
|
||||
updateExpiration,
|
||||
deleteSession,
|
||||
deleteAllUserSessions,
|
||||
generateRefreshToken,
|
||||
countActiveSessions,
|
||||
|
||||
User,
|
||||
Key,
|
||||
Balance,
|
||||
};
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { logger, hashToken } = require('@librechat/data-schemas');
|
||||
const { getRandomValues } = require('~/server/utils/crypto');
|
||||
const { createToken, findToken } = require('~/models');
|
||||
const { getRandomValues, hashToken } = require('~/server/utils/crypto');
|
||||
const { createToken, findToken } = require('./Token');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
/**
|
||||
* @module inviteUser
|
||||
|
||||
@@ -1,89 +1,34 @@
|
||||
import _ from 'lodash';
|
||||
import { MeiliSearch, Index } from 'meilisearch';
|
||||
import mongoose, { Schema, Document, Model, Query } from 'mongoose';
|
||||
import logger from '~/config/meiliLogger';
|
||||
|
||||
interface MongoMeiliOptions {
|
||||
host: string;
|
||||
apiKey: string;
|
||||
indexName: string;
|
||||
primaryKey: string;
|
||||
}
|
||||
|
||||
interface MeiliIndexable {
|
||||
[key: string]: unknown;
|
||||
_meiliIndex?: boolean;
|
||||
}
|
||||
|
||||
interface ContentItem {
|
||||
type: string;
|
||||
text?: string;
|
||||
}
|
||||
|
||||
interface DocumentWithMeiliIndex extends Document {
|
||||
_meiliIndex?: boolean;
|
||||
preprocessObjectForIndex?: () => Record<string, unknown>;
|
||||
addObjectToMeili?: () => Promise<void>;
|
||||
updateObjectToMeili?: () => Promise<void>;
|
||||
deleteObjectFromMeili?: () => Promise<void>;
|
||||
postSaveHook?: () => void;
|
||||
postUpdateHook?: () => void;
|
||||
postRemoveHook?: () => void;
|
||||
conversationId?: string;
|
||||
content?: ContentItem[];
|
||||
messageId?: string;
|
||||
unfinished?: boolean;
|
||||
messages?: unknown[];
|
||||
title?: string;
|
||||
toJSON(): Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface SchemaWithMeiliMethods extends Model<DocumentWithMeiliIndex> {
|
||||
syncWithMeili(): Promise<void>;
|
||||
setMeiliIndexSettings(settings: Record<string, unknown>): Promise<unknown>;
|
||||
meiliSearch(q: string, params: Record<string, unknown>, populate: boolean): Promise<unknown>;
|
||||
}
|
||||
const _ = require('lodash');
|
||||
const mongoose = require('mongoose');
|
||||
const { MeiliSearch } = require('meilisearch');
|
||||
const { parseTextParts, ContentTypes } = require('librechat-data-provider');
|
||||
const { cleanUpPrimaryKeyValue } = require('~/lib/utils/misc');
|
||||
const logger = require('~/config/meiliLogger');
|
||||
|
||||
// Environment flags
|
||||
/**
|
||||
* Flag to indicate if search is enabled based on environment variables.
|
||||
* @type {boolean}
|
||||
*/
|
||||
const searchEnabled = process.env.SEARCH != null && process.env.SEARCH.toLowerCase() === 'true';
|
||||
const searchEnabled = process.env.SEARCH && process.env.SEARCH.toLowerCase() === 'true';
|
||||
|
||||
/**
|
||||
* Flag to indicate if MeiliSearch is enabled based on required environment variables.
|
||||
* @type {boolean}
|
||||
*/
|
||||
const meiliEnabled =
|
||||
process.env.MEILI_HOST != null && process.env.MEILI_MASTER_KEY != null && searchEnabled;
|
||||
|
||||
/**
|
||||
* Local implementation of parseTextParts to avoid dependency on librechat-data-provider
|
||||
* Extracts text content from an array of content items
|
||||
*/
|
||||
const parseTextParts = (content: ContentItem[]): string => {
|
||||
if (!Array.isArray(content)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return content
|
||||
.filter((item) => item.type === 'text' && typeof item.text === 'string')
|
||||
.map((item) => item.text)
|
||||
.join(' ')
|
||||
.trim();
|
||||
};
|
||||
|
||||
/**
|
||||
* Local implementation to handle Bing convoId conversion
|
||||
*/
|
||||
const cleanUpPrimaryKeyValue = (value: string): string => {
|
||||
return value.replace(/--/g, '|');
|
||||
};
|
||||
const meiliEnabled = process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY && searchEnabled;
|
||||
|
||||
/**
|
||||
* Validates the required options for configuring the mongoMeili plugin.
|
||||
*
|
||||
* @param {Object} options - The configuration options.
|
||||
* @param {string} options.host - The MeiliSearch host.
|
||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
||||
* @param {string} options.indexName - The name of the index.
|
||||
* @throws {Error} Throws an error if any required option is missing.
|
||||
*/
|
||||
const validateOptions = (options: Partial<MongoMeiliOptions>): void => {
|
||||
const requiredKeys: (keyof MongoMeiliOptions)[] = ['host', 'apiKey', 'indexName'];
|
||||
const validateOptions = function (options) {
|
||||
const requiredKeys = ['host', 'apiKey', 'indexName'];
|
||||
requiredKeys.forEach((key) => {
|
||||
if (!options[key]) {
|
||||
throw new Error(`Missing mongoMeili Option: ${key}`);
|
||||
@@ -96,18 +41,13 @@ const validateOptions = (options: Partial<MongoMeiliOptions>): void => {
|
||||
* This class contains static and instance methods to synchronize and manage the MeiliSearch index
|
||||
* corresponding to the MongoDB collection.
|
||||
*
|
||||
* @param config - Configuration object.
|
||||
* @param config.index - The MeiliSearch index object.
|
||||
* @param config.attributesToIndex - List of attributes to index.
|
||||
* @returns A class definition that will be loaded into the Mongoose schema.
|
||||
* @param {Object} config - Configuration object.
|
||||
* @param {Object} config.index - The MeiliSearch index object.
|
||||
* @param {Array<string>} config.attributesToIndex - List of attributes to index.
|
||||
* @returns {Function} A class definition that will be loaded into the Mongoose schema.
|
||||
*/
|
||||
const createMeiliMongooseModel = ({
|
||||
index,
|
||||
attributesToIndex,
|
||||
}: {
|
||||
index: Index<MeiliIndexable>;
|
||||
attributesToIndex: string[];
|
||||
}) => {
|
||||
const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
// The primary key is assumed to be the first attribute in the attributesToIndex array.
|
||||
const primaryKey = attributesToIndex[0];
|
||||
|
||||
class MeiliMongooseModel {
|
||||
@@ -128,24 +68,24 @@ const createMeiliMongooseModel = ({
|
||||
*
|
||||
* @returns {Promise<void>} Resolves when the synchronization is complete.
|
||||
*/
|
||||
static async syncWithMeili(this: SchemaWithMeiliMethods): Promise<void> {
|
||||
static async syncWithMeili() {
|
||||
try {
|
||||
let moreDocuments = true;
|
||||
// Retrieve all MongoDB documents from the collection as plain JavaScript objects.
|
||||
const mongoDocuments = await this.find().lean();
|
||||
|
||||
const format = (doc: Record<string, unknown>) =>
|
||||
// Helper function to format a document by selecting only the attributes to index
|
||||
// and omitting keys starting with '$'.
|
||||
const format = (doc) =>
|
||||
_.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$'));
|
||||
|
||||
const mongoMap = new Map(
|
||||
mongoDocuments.map((doc) => {
|
||||
const typedDoc = doc as Record<string, unknown>;
|
||||
return [typedDoc[primaryKey], format(typedDoc)];
|
||||
}),
|
||||
);
|
||||
const indexMap = new Map<unknown, Record<string, unknown>>();
|
||||
// Build a map of MongoDB documents for quick lookup based on the primary key.
|
||||
const mongoMap = new Map(mongoDocuments.map((doc) => [doc[primaryKey], format(doc)]));
|
||||
const indexMap = new Map();
|
||||
let offset = 0;
|
||||
const batchSize = 1000;
|
||||
|
||||
// Fetch documents from the MeiliSearch index in batches.
|
||||
while (moreDocuments) {
|
||||
const batch = await index.getDocuments({ limit: batchSize, offset });
|
||||
if (batch.results.length === 0) {
|
||||
@@ -159,22 +99,17 @@ const createMeiliMongooseModel = ({
|
||||
|
||||
logger.debug('[syncWithMeili]', { indexMap: indexMap.size, mongoMap: mongoMap.size });
|
||||
|
||||
const updateOps: Array<{
|
||||
updateOne: {
|
||||
filter: Record<string, unknown>;
|
||||
update: { $set: { _meiliIndex: boolean } };
|
||||
};
|
||||
}> = [];
|
||||
const updateOps = [];
|
||||
|
||||
// Process documents present in the MeiliSearch index
|
||||
// Process documents present in the MeiliSearch index.
|
||||
for (const [id, doc] of indexMap) {
|
||||
const update: Record<string, unknown> = {};
|
||||
const update = {};
|
||||
update[primaryKey] = id;
|
||||
if (mongoMap.has(id)) {
|
||||
const mongoDoc = mongoMap.get(id);
|
||||
// If document exists in MongoDB, check for discrepancies in key fields.
|
||||
if (
|
||||
(doc.text && doc.text !== mongoDoc?.text) ||
|
||||
(doc.title && doc.title !== mongoDoc?.title)
|
||||
(doc.text && doc.text !== mongoMap.get(id).text) ||
|
||||
(doc.title && doc.title !== mongoMap.get(id).title)
|
||||
) {
|
||||
logger.debug(
|
||||
`[syncWithMeili] ${id} had document discrepancy in ${
|
||||
@@ -187,29 +122,33 @@ const createMeiliMongooseModel = ({
|
||||
await index.addDocuments([doc]);
|
||||
}
|
||||
} else {
|
||||
await index.deleteDocument(id as string);
|
||||
// If the document does not exist in MongoDB, delete it from MeiliSearch.
|
||||
await index.deleteDocument(id);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: false } } },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Process documents present in MongoDB
|
||||
// Process documents present in MongoDB.
|
||||
for (const [id, doc] of mongoMap) {
|
||||
const update: Record<string, unknown> = {};
|
||||
const update = {};
|
||||
update[primaryKey] = id;
|
||||
// If the document is missing in the Meili index, add it.
|
||||
if (!indexMap.has(id)) {
|
||||
await index.addDocuments([doc]);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
} else if (doc._meiliIndex === false) {
|
||||
// If the document exists but is marked as not indexed, update the flag.
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Execute bulk update operations in MongoDB to update the _meiliIndex flags.
|
||||
if (updateOps.length > 0) {
|
||||
await this.collection.bulkWrite(updateOps);
|
||||
logger.debug(
|
||||
@@ -224,30 +163,33 @@ const createMeiliMongooseModel = ({
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates settings for the MeiliSearch index
|
||||
* Updates settings for the MeiliSearch index.
|
||||
*
|
||||
* @param {Object} settings - The settings to update on the MeiliSearch index.
|
||||
* @returns {Promise<Object>} Promise resolving to the update result.
|
||||
*/
|
||||
static async setMeiliIndexSettings(settings: Record<string, unknown>): Promise<unknown> {
|
||||
static async setMeiliIndexSettings(settings) {
|
||||
return await index.updateSettings(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the MeiliSearch index and optionally populates results
|
||||
* Searches the MeiliSearch index and optionally populates the results with data from MongoDB.
|
||||
*
|
||||
* @param {string} q - The search query.
|
||||
* @param {Object} params - Additional search parameters for MeiliSearch.
|
||||
* @param {boolean} populate - Whether to populate search hits with full MongoDB documents.
|
||||
* @returns {Promise<Object>} The search results with populated hits if requested.
|
||||
*/
|
||||
static async meiliSearch(
|
||||
this: SchemaWithMeiliMethods,
|
||||
q: string,
|
||||
params: Record<string, unknown>,
|
||||
populate: boolean,
|
||||
): Promise<unknown> {
|
||||
static async meiliSearch(q, params, populate) {
|
||||
const data = await index.search(q, params);
|
||||
|
||||
if (populate) {
|
||||
const query: Record<string, unknown> = {};
|
||||
query[primaryKey] = _.map(data.hits, (hit) =>
|
||||
cleanUpPrimaryKeyValue(hit[primaryKey] as string),
|
||||
);
|
||||
// Build a query using the primary key values from the search hits.
|
||||
const query = {};
|
||||
query[primaryKey] = _.map(data.hits, (hit) => cleanUpPrimaryKeyValue(hit[primaryKey]));
|
||||
|
||||
const projection = Object.keys(this.schema.obj).reduce<Record<string, number>>(
|
||||
// Build a projection object, including only keys that do not start with '$'.
|
||||
const projection = Object.keys(this.schema.obj).reduce(
|
||||
(results, key) => {
|
||||
if (!key.startsWith('$')) {
|
||||
results[key] = 1;
|
||||
@@ -257,18 +199,17 @@ const createMeiliMongooseModel = ({
|
||||
{ _id: 1, __v: 1 },
|
||||
);
|
||||
|
||||
// Retrieve the full documents from MongoDB.
|
||||
const hitsFromMongoose = await this.find(query, projection).lean();
|
||||
|
||||
const populatedHits = data.hits.map((hit) => {
|
||||
const queryObj: Record<string, unknown> = {};
|
||||
queryObj[primaryKey] = hit[primaryKey];
|
||||
const originalHit = _.find(hitsFromMongoose, (item) => {
|
||||
const typedItem = item as Record<string, unknown>;
|
||||
return typedItem[primaryKey] === hit[primaryKey];
|
||||
});
|
||||
// Merge the MongoDB documents with the search hits.
|
||||
const populatedHits = data.hits.map(function (hit) {
|
||||
const query = {};
|
||||
query[primaryKey] = hit[primaryKey];
|
||||
const originalHit = _.find(hitsFromMongoose, query);
|
||||
|
||||
return {
|
||||
...(originalHit && typeof originalHit === 'object' ? originalHit : {}),
|
||||
...(originalHit ?? {}),
|
||||
...hit,
|
||||
};
|
||||
});
|
||||
@@ -279,18 +220,21 @@ const createMeiliMongooseModel = ({
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocesses the current document for indexing
|
||||
* Preprocesses the current document for indexing.
|
||||
*
|
||||
* This method:
|
||||
* - Picks only the defined attributes to index.
|
||||
* - Omits any keys starting with '$'.
|
||||
* - Replaces pipe characters ('|') in `conversationId` with '--'.
|
||||
* - Extracts and concatenates text from an array of content items.
|
||||
*
|
||||
* @returns {Object} The preprocessed object ready for indexing.
|
||||
*/
|
||||
preprocessObjectForIndex(this: DocumentWithMeiliIndex): Record<string, unknown> {
|
||||
preprocessObjectForIndex() {
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
|
||||
if (
|
||||
object.conversationId &&
|
||||
typeof object.conversationId === 'string' &&
|
||||
object.conversationId.includes('|')
|
||||
) {
|
||||
if (object.conversationId && object.conversationId.includes('|')) {
|
||||
object.conversationId = object.conversationId.replace(/\|/g, '--');
|
||||
}
|
||||
|
||||
@@ -303,26 +247,31 @@ const createMeiliMongooseModel = ({
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the current document to the MeiliSearch index
|
||||
* Adds the current document to the MeiliSearch index.
|
||||
*
|
||||
* The method preprocesses the document, adds it to MeiliSearch, and then updates
|
||||
* the MongoDB document's `_meiliIndex` flag to true.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async addObjectToMeili(this: DocumentWithMeiliIndex): Promise<void> {
|
||||
const object = this.preprocessObjectForIndex!();
|
||||
async addObjectToMeili() {
|
||||
const object = this.preprocessObjectForIndex();
|
||||
try {
|
||||
await index.addDocuments([object]);
|
||||
} catch (error) {
|
||||
// Error handling can be enhanced as needed.
|
||||
logger.error('[addObjectToMeili] Error adding document to Meili', error);
|
||||
}
|
||||
|
||||
await this.collection.updateMany(
|
||||
{ _id: this._id as mongoose.Types.ObjectId },
|
||||
{ $set: { _meiliIndex: true } },
|
||||
);
|
||||
await this.collection.updateMany({ _id: this._id }, { $set: { _meiliIndex: true } });
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the current document in the MeiliSearch index
|
||||
* Updates the current document in the MeiliSearch index.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async updateObjectToMeili(this: DocumentWithMeiliIndex): Promise<void> {
|
||||
async updateObjectToMeili() {
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
@@ -334,8 +283,8 @@ const createMeiliMongooseModel = ({
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async deleteObjectFromMeili(this: DocumentWithMeiliIndex): Promise<void> {
|
||||
await index.deleteDocument(this._id as string);
|
||||
async deleteObjectFromMeili() {
|
||||
await index.deleteDocument(this._id);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -344,11 +293,11 @@ const createMeiliMongooseModel = ({
|
||||
* If the document is already indexed (i.e. `_meiliIndex` is true), it updates it;
|
||||
* otherwise, it adds the document to the index.
|
||||
*/
|
||||
postSaveHook(this: DocumentWithMeiliIndex): void {
|
||||
postSaveHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili!();
|
||||
this.updateObjectToMeili();
|
||||
} else {
|
||||
this.addObjectToMeili!();
|
||||
this.addObjectToMeili();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -358,9 +307,9 @@ const createMeiliMongooseModel = ({
|
||||
* This hook is triggered after a document update, ensuring that changes are
|
||||
* propagated to the MeiliSearch index if the document is indexed.
|
||||
*/
|
||||
postUpdateHook(this: DocumentWithMeiliIndex): void {
|
||||
postUpdateHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili!();
|
||||
this.updateObjectToMeili();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -370,9 +319,9 @@ const createMeiliMongooseModel = ({
|
||||
* This hook is triggered after a document is removed, ensuring that the document
|
||||
* is also removed from the MeiliSearch index if it was previously indexed.
|
||||
*/
|
||||
postRemoveHook(this: DocumentWithMeiliIndex): void {
|
||||
postRemoveHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.deleteObjectFromMeili!();
|
||||
this.deleteObjectFromMeili();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -390,14 +339,14 @@ const createMeiliMongooseModel = ({
|
||||
* - Loads class methods for syncing, searching, and managing documents in MeiliSearch.
|
||||
* - Registers Mongoose hooks (post-save, post-update, post-remove, etc.) to maintain index consistency.
|
||||
*
|
||||
* @param schema - The Mongoose schema to which the plugin is applied.
|
||||
* @param options - Configuration options.
|
||||
* @param options.host - The MeiliSearch host.
|
||||
* @param options.apiKey - The MeiliSearch API key.
|
||||
* @param options.indexName - The name of the MeiliSearch index.
|
||||
* @param options.primaryKey - The primary key field for indexing.
|
||||
* @param {mongoose.Schema} schema - The Mongoose schema to which the plugin is applied.
|
||||
* @param {Object} options - Configuration options.
|
||||
* @param {string} options.host - The MeiliSearch host.
|
||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
||||
* @param {string} options.indexName - The name of the MeiliSearch index.
|
||||
* @param {string} options.primaryKey - The primary key field for indexing.
|
||||
*/
|
||||
export default function mongoMeili(schema: Schema, options: MongoMeiliOptions): void {
|
||||
module.exports = function mongoMeili(schema, options) {
|
||||
validateOptions(options);
|
||||
|
||||
// Add _meiliIndex field to the schema to track if a document has been indexed in MeiliSearch.
|
||||
@@ -412,31 +361,44 @@ export default function mongoMeili(schema: Schema, options: MongoMeiliOptions):
|
||||
|
||||
const { host, apiKey, indexName, primaryKey } = options;
|
||||
|
||||
// Setup the MeiliSearch client.
|
||||
const client = new MeiliSearch({ host, apiKey });
|
||||
client.createIndex(indexName, { primaryKey });
|
||||
const index = client.index<MeiliIndexable>(indexName);
|
||||
|
||||
// Collect attributes from the schema that should be indexed
|
||||
const attributesToIndex: string[] = [
|
||||
...Object.entries(schema.obj).reduce<string[]>((results, [key, value]) => {
|
||||
const schemaValue = value as { meiliIndex?: boolean };
|
||||
return schemaValue.meiliIndex ? [...results, key] : results;
|
||||
}, []),
|
||||
// Create the index asynchronously if it doesn't exist.
|
||||
client.createIndex(indexName, { primaryKey });
|
||||
|
||||
// Setup the MeiliSearch index for this schema.
|
||||
const index = client.index(indexName);
|
||||
|
||||
// Collect attributes from the schema that should be indexed.
|
||||
const attributesToIndex = [
|
||||
..._.reduce(
|
||||
schema.obj,
|
||||
function (results, value, key) {
|
||||
return value.meiliIndex ? [...results, key] : results;
|
||||
},
|
||||
[],
|
||||
),
|
||||
];
|
||||
|
||||
schema.loadClass(createMeiliMongooseModel({ index, attributesToIndex }));
|
||||
// Load the class methods into the schema.
|
||||
schema.loadClass(createMeiliMongooseModel({ index, indexName, client, attributesToIndex }));
|
||||
|
||||
// Register Mongoose hooks
|
||||
schema.post('save', function (doc: DocumentWithMeiliIndex) {
|
||||
doc.postSaveHook?.();
|
||||
// Register Mongoose hooks to synchronize with MeiliSearch.
|
||||
|
||||
// Post-save: synchronize after a document is saved.
|
||||
schema.post('save', function (doc) {
|
||||
doc.postSaveHook();
|
||||
});
|
||||
|
||||
schema.post('updateOne', function (doc: DocumentWithMeiliIndex) {
|
||||
doc.postUpdateHook?.();
|
||||
// Post-update: synchronize after a document is updated.
|
||||
schema.post('update', function (doc) {
|
||||
doc.postUpdateHook();
|
||||
});
|
||||
|
||||
schema.post('deleteOne', function (doc: DocumentWithMeiliIndex) {
|
||||
doc.postRemoveHook?.();
|
||||
// Post-remove: synchronize after a document is removed.
|
||||
schema.post('remove', function (doc) {
|
||||
doc.postRemoveHook();
|
||||
});
|
||||
|
||||
// Pre-deleteMany hook: remove corresponding documents from MeiliSearch when multiple documents are deleted.
|
||||
@@ -446,28 +408,22 @@ export default function mongoMeili(schema: Schema, options: MongoMeiliOptions):
|
||||
}
|
||||
|
||||
try {
|
||||
const conditions = (this as Query<unknown, unknown>).getQuery();
|
||||
|
||||
// Check if the schema has a "messages" field to determine if it's a conversation schema.
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messages')) {
|
||||
const convoIndex = client.index('convos');
|
||||
const deletedConvos = await mongoose
|
||||
.model('Conversation')
|
||||
.find(conditions as mongoose.FilterQuery<unknown>)
|
||||
.lean();
|
||||
const promises = deletedConvos.map((convo: Record<string, unknown>) =>
|
||||
convoIndex.deleteDocument(convo.conversationId as string),
|
||||
const deletedConvos = await mongoose.model('Conversation').find(this._conditions).lean();
|
||||
const promises = deletedConvos.map((convo) =>
|
||||
convoIndex.deleteDocument(convo.conversationId),
|
||||
);
|
||||
await Promise.all(promises);
|
||||
}
|
||||
|
||||
// Check if the schema has a "messageId" field to determine if it's a message schema.
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messageId')) {
|
||||
const messageIndex = client.index('messages');
|
||||
const deletedMessages = await mongoose
|
||||
.model('Message')
|
||||
.find(conditions as mongoose.FilterQuery<unknown>)
|
||||
.lean();
|
||||
const promises = deletedMessages.map((message: Record<string, unknown>) =>
|
||||
messageIndex.deleteDocument(message.messageId as string),
|
||||
const deletedMessages = await mongoose.model('Message').find(this._conditions).lean();
|
||||
const promises = deletedMessages.map((message) =>
|
||||
messageIndex.deleteDocument(message.messageId),
|
||||
);
|
||||
await Promise.all(promises);
|
||||
}
|
||||
@@ -483,33 +439,37 @@ export default function mongoMeili(schema: Schema, options: MongoMeiliOptions):
|
||||
}
|
||||
});
|
||||
|
||||
// Post-findOneAndUpdate hook
|
||||
schema.post('findOneAndUpdate', async function (doc: DocumentWithMeiliIndex) {
|
||||
// Post-findOneAndUpdate hook: update MeiliSearch index after a document is updated via findOneAndUpdate.
|
||||
schema.post('findOneAndUpdate', async function (doc) {
|
||||
if (!meiliEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the document is unfinished, do not update the index.
|
||||
if (doc.unfinished) {
|
||||
return;
|
||||
}
|
||||
|
||||
let meiliDoc: Record<string, unknown> | undefined;
|
||||
let meiliDoc;
|
||||
// For conversation documents, try to fetch the document from the "convos" index.
|
||||
if (doc.messages) {
|
||||
try {
|
||||
meiliDoc = await client.index('convos').getDocument(doc.conversationId as string);
|
||||
} catch (error: unknown) {
|
||||
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
|
||||
} catch (error) {
|
||||
logger.debug(
|
||||
'[MeiliMongooseModel.findOneAndUpdate] Convo not found in MeiliSearch and will index ' +
|
||||
doc.conversationId,
|
||||
error as Record<string, unknown>,
|
||||
error,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// If the MeiliSearch document exists and the title is unchanged, do nothing.
|
||||
if (meiliDoc && meiliDoc.title === doc.title) {
|
||||
return;
|
||||
}
|
||||
|
||||
doc.postSaveHook?.();
|
||||
// Otherwise, trigger a post-save hook to synchronize the document.
|
||||
doc.postSaveHook();
|
||||
});
|
||||
}
|
||||
};
|
||||
18
api/models/schema/convoSchema.js
Normal file
18
api/models/schema/convoSchema.js
Normal file
@@ -0,0 +1,18 @@
|
||||
const mongoose = require('mongoose');
|
||||
const mongoMeili = require('../plugins/mongoMeili');
|
||||
|
||||
const { convoSchema } = require('@librechat/data-schemas');
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
convoSchema.plugin(mongoMeili, {
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
/** Note: Will get created automatically if it doesn't exist already */
|
||||
indexName: 'convos',
|
||||
primaryKey: 'conversationId',
|
||||
});
|
||||
}
|
||||
|
||||
const Conversation = mongoose.models.Conversation || mongoose.model('Conversation', convoSchema);
|
||||
|
||||
module.exports = Conversation;
|
||||
16
api/models/schema/messageSchema.js
Normal file
16
api/models/schema/messageSchema.js
Normal file
@@ -0,0 +1,16 @@
|
||||
const mongoose = require('mongoose');
|
||||
const mongoMeili = require('~/models/plugins/mongoMeili');
|
||||
const { messageSchema } = require('@librechat/data-schemas');
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
messageSchema.plugin(mongoMeili, {
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
indexName: 'messages',
|
||||
primaryKey: 'messageId',
|
||||
});
|
||||
}
|
||||
|
||||
const Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
|
||||
|
||||
module.exports = Message;
|
||||
6
api/models/schema/pluginAuthSchema.js
Normal file
6
api/models/schema/pluginAuthSchema.js
Normal file
@@ -0,0 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { pluginAuthSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const PluginAuth = mongoose.models.Plugin || mongoose.model('PluginAuth', pluginAuthSchema);
|
||||
|
||||
module.exports = PluginAuth;
|
||||
6
api/models/schema/presetSchema.js
Normal file
6
api/models/schema/presetSchema.js
Normal file
@@ -0,0 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { presetSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Preset = mongoose.models.Preset || mongoose.model('Preset', presetSchema);
|
||||
|
||||
module.exports = Preset;
|
||||
@@ -1,5 +1,6 @@
|
||||
const { Transaction } = require('./Transaction');
|
||||
const { logger } = require('~/config');
|
||||
const { createTransaction, createStructuredTransaction } = require('./Transaction');
|
||||
|
||||
/**
|
||||
* Creates up to two transactions to record the spending of tokens.
|
||||
*
|
||||
@@ -32,7 +33,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
||||
let prompt, completion;
|
||||
try {
|
||||
if (promptTokens !== undefined) {
|
||||
prompt = await createTransaction({
|
||||
prompt = await Transaction.create({
|
||||
...txData,
|
||||
tokenType: 'prompt',
|
||||
rawAmount: promptTokens === 0 ? 0 : -Math.max(promptTokens, 0),
|
||||
@@ -40,7 +41,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
||||
}
|
||||
|
||||
if (completionTokens !== undefined) {
|
||||
completion = await createTransaction({
|
||||
completion = await Transaction.create({
|
||||
...txData,
|
||||
tokenType: 'completion',
|
||||
rawAmount: completionTokens === 0 ? 0 : -Math.max(completionTokens, 0),
|
||||
@@ -100,7 +101,7 @@ const spendStructuredTokens = async (txData, tokenUsage) => {
|
||||
try {
|
||||
if (promptTokens) {
|
||||
const { input = 0, write = 0, read = 0 } = promptTokens;
|
||||
prompt = await createStructuredTransaction({
|
||||
prompt = await Transaction.createStructured({
|
||||
...txData,
|
||||
tokenType: 'prompt',
|
||||
inputTokens: -input,
|
||||
@@ -110,7 +111,7 @@ const spendStructuredTokens = async (txData, tokenUsage) => {
|
||||
}
|
||||
|
||||
if (completionTokens) {
|
||||
completion = await createTransaction({
|
||||
completion = await Transaction.create({
|
||||
...txData,
|
||||
tokenType: 'completion',
|
||||
rawAmount: -completionTokens,
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { Transaction } = require('./Transaction');
|
||||
const Balance = require('./Balance');
|
||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||
const { createTransaction, createAutoRefillTransaction } = require('./Transaction');
|
||||
const Transaction = require('~/db/models').Transaction;
|
||||
const Balance = require('~/db/models').Balance;
|
||||
|
||||
// Mock the logger to prevent console output during tests
|
||||
jest.mock('~/config', () => ({
|
||||
@@ -23,7 +22,8 @@ describe('spendTokens', () => {
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
await mongoose.connect(mongoServer.getUri());
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -197,7 +197,7 @@ describe('spendTokens', () => {
|
||||
// Check that the transaction records show the adjusted values
|
||||
const transactionResults = await Promise.all(
|
||||
transactions.map((t) =>
|
||||
createTransaction({
|
||||
Transaction.create({
|
||||
...txData,
|
||||
tokenType: t.tokenType,
|
||||
rawAmount: t.rawAmount,
|
||||
@@ -280,7 +280,7 @@ describe('spendTokens', () => {
|
||||
|
||||
// Check the return values from Transaction.create directly
|
||||
// This is to verify that the incrementValue is not becoming positive
|
||||
const directResult = await createTransaction({
|
||||
const directResult = await Transaction.create({
|
||||
user: userId,
|
||||
conversationId: 'test-convo-3',
|
||||
model: 'gpt-4',
|
||||
@@ -607,7 +607,7 @@ describe('spendTokens', () => {
|
||||
const promises = [];
|
||||
for (let i = 0; i < numberOfRefills; i++) {
|
||||
promises.push(
|
||||
createAutoRefillTransaction({
|
||||
Transaction.createAutoRefillTransaction({
|
||||
user: userId,
|
||||
tokenType: 'credits',
|
||||
context: 'concurrent-refill-test',
|
||||
|
||||
@@ -100,8 +100,6 @@ const tokenValues = Object.assign(
|
||||
'claude-3-5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3.5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3-haiku': { prompt: 0.25, completion: 1.25 },
|
||||
'claude-sonnet-4': { prompt: 3, completion: 15 },
|
||||
'claude-opus-4': { prompt: 15, completion: 75 },
|
||||
'claude-2.1': { prompt: 8, completion: 24 },
|
||||
'claude-2': { prompt: 8, completion: 24 },
|
||||
'claude-instant': { prompt: 0.8, completion: 2.4 },
|
||||
@@ -113,15 +111,10 @@ const tokenValues = Object.assign(
|
||||
/* cohere doesn't have rates for the older command models,
|
||||
so this was from https://artificialanalysis.ai/models/command-light/providers */
|
||||
command: { prompt: 0.38, completion: 0.38 },
|
||||
gemma: { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemma-2': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemma-3': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemma-3-27b': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemini-2.0-flash-lite': { prompt: 0.075, completion: 0.3 },
|
||||
'gemini-2.0-flash': { prompt: 0.1, completion: 0.4 },
|
||||
'gemini-2.0': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemini-2.5-pro': { prompt: 1.25, completion: 10 },
|
||||
'gemini-2.5-flash': { prompt: 0.15, completion: 3.5 },
|
||||
'gemini-2.5-pro-preview-03-25': { prompt: 1.25, completion: 10 },
|
||||
'gemini-2.5': { prompt: 0, completion: 0 }, // Free for a period of time
|
||||
'gemini-1.5-flash-8b': { prompt: 0.075, completion: 0.3 },
|
||||
'gemini-1.5-flash': { prompt: 0.15, completion: 0.6 },
|
||||
@@ -164,8 +157,6 @@ const cacheTokenValues = {
|
||||
'claude-3.5-haiku': { write: 1, read: 0.08 },
|
||||
'claude-3-5-haiku': { write: 1, read: 0.08 },
|
||||
'claude-3-haiku': { write: 0.3, read: 0.03 },
|
||||
'claude-sonnet-4': { write: 3.75, read: 0.3 },
|
||||
'claude-opus-4': { write: 18.75, read: 1.5 },
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -488,9 +488,6 @@ describe('getCacheMultiplier', () => {
|
||||
|
||||
describe('Google Model Tests', () => {
|
||||
const googleModels = [
|
||||
'gemini-2.5-pro-preview-05-06',
|
||||
'gemini-2.5-flash-preview-04-17',
|
||||
'gemini-2.5-exp',
|
||||
'gemini-2.0-flash-lite-preview-02-05',
|
||||
'gemini-2.0-flash-001',
|
||||
'gemini-2.0-flash-exp',
|
||||
@@ -528,9 +525,6 @@ describe('Google Model Tests', () => {
|
||||
|
||||
it('should map to the correct model keys', () => {
|
||||
const expected = {
|
||||
'gemini-2.5-pro-preview-05-06': 'gemini-2.5-pro',
|
||||
'gemini-2.5-flash-preview-04-17': 'gemini-2.5-flash',
|
||||
'gemini-2.5-exp': 'gemini-2.5',
|
||||
'gemini-2.0-flash-lite-preview-02-05': 'gemini-2.0-flash-lite',
|
||||
'gemini-2.0-flash-001': 'gemini-2.0-flash',
|
||||
'gemini-2.0-flash-exp': 'gemini-2.0-flash',
|
||||
@@ -664,97 +658,3 @@ describe('Grok Model Tests - Pricing', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Claude Model Tests', () => {
|
||||
it('should return correct prompt and completion rates for Claude 4 models', () => {
|
||||
expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['claude-sonnet-4'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'completion' })).toBe(
|
||||
tokenValues['claude-sonnet-4'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-opus-4', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['claude-opus-4'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-opus-4', tokenType: 'completion' })).toBe(
|
||||
tokenValues['claude-opus-4'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle Claude 4 model name variations with different prefixes and suffixes', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const valueKey = getValueKey(model);
|
||||
const isSonnet = model.includes('sonnet');
|
||||
const expectedKey = isSonnet ? 'claude-sonnet-4' : 'claude-opus-4';
|
||||
|
||||
expect(valueKey).toBe(expectedKey);
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(tokenValues[expectedKey].prompt);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(
|
||||
tokenValues[expectedKey].completion,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should return correct cache rates for Claude 4 models', () => {
|
||||
expect(getCacheMultiplier({ model: 'claude-sonnet-4', cacheType: 'write' })).toBe(
|
||||
cacheTokenValues['claude-sonnet-4'].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-sonnet-4', cacheType: 'read' })).toBe(
|
||||
cacheTokenValues['claude-sonnet-4'].read,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-opus-4', cacheType: 'write' })).toBe(
|
||||
cacheTokenValues['claude-opus-4'].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-opus-4', cacheType: 'read' })).toBe(
|
||||
cacheTokenValues['claude-opus-4'].read,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle Claude 4 model cache rates with different prefixes and suffixes', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const isSonnet = model.includes('sonnet');
|
||||
const expectedKey = isSonnet ? 'claude-sonnet-4' : 'claude-opus-4';
|
||||
|
||||
expect(getCacheMultiplier({ model, cacheType: 'write' })).toBe(
|
||||
cacheTokenValues[expectedKey].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model, cacheType: 'read' })).toBe(
|
||||
cacheTokenValues[expectedKey].read,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,159 @@
|
||||
const bcrypt = require('bcryptjs');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const signPayload = require('~/server/services/signPayload');
|
||||
const Balance = require('./Balance');
|
||||
const User = require('./User');
|
||||
|
||||
/**
|
||||
* Retrieve a user by ID and convert the found user document to a plain object.
|
||||
*
|
||||
* @param {string} userId - The ID of the user to find and return as a plain object.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<MongoUser>} A plain object representing the user document, or `null` if no user is found.
|
||||
*/
|
||||
const getUserById = async function (userId, fieldsToSelect = null) {
|
||||
const query = User.findById(userId);
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
return await query.lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Search for a single user based on partial data and return matching user document as plain object.
|
||||
* @param {Partial<MongoUser>} searchCriteria - The partial data to use for searching the user.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<MongoUser>} A plain object representing the user document, or `null` if no user is found.
|
||||
*/
|
||||
const findUser = async function (searchCriteria, fieldsToSelect = null) {
|
||||
const query = User.findOne(searchCriteria);
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
return await query.lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Update a user with new data without overwriting existing properties.
|
||||
*
|
||||
* @param {string} userId - The ID of the user to update.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @returns {Promise<MongoUser>} The updated user document as a plain object, or `null` if no user is found.
|
||||
*/
|
||||
const updateUser = async function (userId, updateData) {
|
||||
const updateOperation = {
|
||||
$set: updateData,
|
||||
$unset: { expiresAt: '' }, // Remove the expiresAt field to prevent TTL
|
||||
};
|
||||
return await User.findByIdAndUpdate(userId, updateOperation, {
|
||||
new: true,
|
||||
runValidators: true,
|
||||
}).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a new user, optionally with a TTL of 1 week.
|
||||
* @param {MongoUser} data - The user data to be created, must contain user_id.
|
||||
* @param {boolean} [disableTTL=true] - Whether to disable the TTL. Defaults to `true`.
|
||||
* @param {boolean} [returnUser=false] - Whether to return the created user object.
|
||||
* @returns {Promise<ObjectId|MongoUser>} A promise that resolves to the created user document ID or user object.
|
||||
* @throws {Error} If a user with the same user_id already exists.
|
||||
*/
|
||||
const createUser = async (data, disableTTL = true, returnUser = false) => {
|
||||
const balance = await getBalanceConfig();
|
||||
const userData = {
|
||||
...data,
|
||||
expiresAt: disableTTL ? null : new Date(Date.now() + 604800 * 1000), // 1 week in milliseconds
|
||||
};
|
||||
|
||||
if (disableTTL) {
|
||||
delete userData.expiresAt;
|
||||
}
|
||||
|
||||
const user = await User.create(userData);
|
||||
|
||||
// If balance is enabled, create or update a balance record for the user using global.interfaceConfig.balance
|
||||
if (balance?.enabled && balance?.startBalance) {
|
||||
const update = {
|
||||
$inc: { tokenCredits: balance.startBalance },
|
||||
};
|
||||
|
||||
if (
|
||||
balance.autoRefillEnabled &&
|
||||
balance.refillIntervalValue != null &&
|
||||
balance.refillIntervalUnit != null &&
|
||||
balance.refillAmount != null
|
||||
) {
|
||||
update.$set = {
|
||||
autoRefillEnabled: true,
|
||||
refillIntervalValue: balance.refillIntervalValue,
|
||||
refillIntervalUnit: balance.refillIntervalUnit,
|
||||
refillAmount: balance.refillAmount,
|
||||
};
|
||||
}
|
||||
|
||||
await Balance.findOneAndUpdate({ user: user._id }, update, { upsert: true, new: true }).lean();
|
||||
}
|
||||
|
||||
if (returnUser) {
|
||||
return user.toObject();
|
||||
}
|
||||
return user._id;
|
||||
};
|
||||
|
||||
/**
|
||||
* Count the number of user documents in the collection based on the provided filter.
|
||||
*
|
||||
* @param {Object} [filter={}] - The filter to apply when counting the documents.
|
||||
* @returns {Promise<number>} The count of documents that match the filter.
|
||||
*/
|
||||
const countUsers = async function (filter = {}) {
|
||||
return await User.countDocuments(filter);
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a user by their unique ID.
|
||||
*
|
||||
* @param {string} userId - The ID of the user to delete.
|
||||
* @returns {Promise<{ deletedCount: number }>} An object indicating the number of deleted documents.
|
||||
*/
|
||||
const deleteUserById = async function (userId) {
|
||||
try {
|
||||
const result = await User.deleteOne({ _id: userId });
|
||||
if (result.deletedCount === 0) {
|
||||
return { deletedCount: 0, message: 'No user found with that ID.' };
|
||||
}
|
||||
return { deletedCount: result.deletedCount, message: 'User was deleted successfully.' };
|
||||
} catch (error) {
|
||||
throw new Error('Error deleting user: ' + error.message);
|
||||
}
|
||||
};
|
||||
|
||||
const { SESSION_EXPIRY } = process.env ?? {};
|
||||
const expires = eval(SESSION_EXPIRY) ?? 1000 * 60 * 15;
|
||||
|
||||
/**
|
||||
* Generates a JWT token for a given user.
|
||||
*
|
||||
* @param {MongoUser} user - The user for whom the token is being generated.
|
||||
* @returns {Promise<string>} A promise that resolves to a JWT token.
|
||||
*/
|
||||
const generateToken = async (user) => {
|
||||
if (!user) {
|
||||
throw new Error('No user provided');
|
||||
}
|
||||
|
||||
return await signPayload({
|
||||
payload: {
|
||||
id: user._id,
|
||||
username: user.username,
|
||||
provider: user.provider,
|
||||
email: user.email,
|
||||
},
|
||||
secret: process.env.JWT_SECRET,
|
||||
expirationTime: expires / 1000,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Compares the provided password with the user's password.
|
||||
@@ -24,4 +179,11 @@ const comparePassword = async (user, candidatePassword) => {
|
||||
|
||||
module.exports = {
|
||||
comparePassword,
|
||||
deleteUserById,
|
||||
generateToken,
|
||||
getUserById,
|
||||
countUsers,
|
||||
createUser,
|
||||
updateUser,
|
||||
findUser,
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "v0.7.8",
|
||||
"version": "v0.7.7",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
@@ -43,14 +43,13 @@
|
||||
"@google/generative-ai": "^0.23.0",
|
||||
"@googleapis/youtube": "^20.0.0",
|
||||
"@keyv/redis": "^4.3.3",
|
||||
"@langchain/community": "^0.3.44",
|
||||
"@langchain/core": "^0.3.57",
|
||||
"@langchain/google-genai": "^0.2.9",
|
||||
"@langchain/google-vertexai": "^0.2.9",
|
||||
"@langchain/community": "^0.3.39",
|
||||
"@langchain/core": "^0.3.43",
|
||||
"@langchain/google-genai": "^0.2.2",
|
||||
"@langchain/google-vertexai": "^0.2.3",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.4.37",
|
||||
"@librechat/agents": "^2.4.22",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@node-saml/passport-saml": "^5.0.0",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
"bcryptjs": "^2.4.3",
|
||||
@@ -76,7 +75,6 @@
|
||||
"ioredis": "^5.3.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"jwks-rsa": "^3.2.0",
|
||||
"keyv": "^5.3.2",
|
||||
"keyv-file": "^5.1.2",
|
||||
"klona": "^2.0.6",
|
||||
@@ -88,13 +86,13 @@
|
||||
"mime": "^3.0.0",
|
||||
"module-alias": "^2.2.3",
|
||||
"mongoose": "^8.12.1",
|
||||
"multer": "^2.0.0",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"nanoid": "^3.3.7",
|
||||
"nodemailer": "^6.9.15",
|
||||
"ollama": "^0.5.0",
|
||||
"openai": "^4.96.2",
|
||||
"openai": "^4.47.1",
|
||||
"openai-chat-tokens": "^0.2.8",
|
||||
"openid-client": "^6.5.0",
|
||||
"openid-client": "^5.4.2",
|
||||
"passport": "^0.6.0",
|
||||
"passport-apple": "^2.0.2",
|
||||
"passport-discord": "^0.1.4",
|
||||
@@ -118,6 +116,6 @@
|
||||
"jest": "^29.7.0",
|
||||
"mongodb-memory-server": "^10.1.3",
|
||||
"nodemon": "^3.0.3",
|
||||
"supertest": "^7.1.0"
|
||||
"supertest": "^7.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,17 +16,17 @@ const FinalizationRegistry = global.FinalizationRegistry || null;
|
||||
*/
|
||||
const clientRegistry = FinalizationRegistry
|
||||
? new FinalizationRegistry((heldValue) => {
|
||||
try {
|
||||
// This will run when the client is garbage collected
|
||||
if (heldValue && heldValue.userId) {
|
||||
logger.debug(`[FinalizationRegistry] Cleaning up client for user ${heldValue.userId}`);
|
||||
} else {
|
||||
logger.debug('[FinalizationRegistry] Cleaning up client');
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors
|
||||
try {
|
||||
// This will run when the client is garbage collected
|
||||
if (heldValue && heldValue.userId) {
|
||||
logger.debug(`[FinalizationRegistry] Cleaning up client for user ${heldValue.userId}`);
|
||||
} else {
|
||||
logger.debug('[FinalizationRegistry] Cleaning up client');
|
||||
}
|
||||
})
|
||||
} catch (e) {
|
||||
// Ignore errors
|
||||
}
|
||||
})
|
||||
: null;
|
||||
|
||||
/**
|
||||
@@ -134,12 +134,15 @@ function disposeClient(client) {
|
||||
if (client.message_delta) {
|
||||
client.message_delta = null;
|
||||
}
|
||||
if (client.isClaudeLatest !== undefined) {
|
||||
client.isClaudeLatest = null;
|
||||
if (client.isClaude3 !== undefined) {
|
||||
client.isClaude3 = null;
|
||||
}
|
||||
if (client.useMessages !== undefined) {
|
||||
client.useMessages = null;
|
||||
}
|
||||
if (client.isLegacyOutput !== undefined) {
|
||||
client.isLegacyOutput = null;
|
||||
}
|
||||
if (client.supportsCacheControl !== undefined) {
|
||||
client.supportsCacheControl = null;
|
||||
}
|
||||
|
||||
@@ -128,7 +128,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
clientRef = new WeakRef(client);
|
||||
|
||||
getAbortData = () => {
|
||||
const currentClient = clientRef?.deref();
|
||||
const currentClient = clientRef.deref();
|
||||
const currentText =
|
||||
currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText();
|
||||
|
||||
@@ -228,7 +228,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
|
||||
if (!client?.skipSaveUserMessage && latestUserMessage) {
|
||||
await saveMessage(req, latestUserMessage, {
|
||||
context: "api/server/controllers/AskController.js - don't skip saving user message",
|
||||
context: 'api/server/controllers/AskController.js - don\'t skip saving user message',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -255,7 +255,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
logger.error('[AskController] Error handling request', error);
|
||||
let partialText = '';
|
||||
try {
|
||||
const currentClient = clientRef?.deref();
|
||||
const currentClient = clientRef.deref();
|
||||
partialText =
|
||||
currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText();
|
||||
} catch (getTextError) {
|
||||
@@ -268,7 +268,6 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
conversationId: reqDataContext.conversationId,
|
||||
messageId: reqDataContext.responseMessageId,
|
||||
parentMessageId: overrideParentMessageId ?? reqDataContext.userMessageId ?? parentMessageId,
|
||||
userMessageId: reqDataContext.userMessageId,
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error('[AskController] Error in `handleAbortError` during catch block', err);
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
const cookies = require('cookie');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const openIdClient = require('openid-client');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
registerUser,
|
||||
resetPassword,
|
||||
setAuthTokens,
|
||||
requestPasswordReset,
|
||||
setOpenIDAuthTokens,
|
||||
} = require('~/server/services/AuthService');
|
||||
const { findUser, getUserById, deleteAllUserSessions, findSession } = require('~/models');
|
||||
const { getOpenIdConfig } = require('~/strategies');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { findSession, getUserById, deleteAllUserSessions } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const registrationController = async (req, res) => {
|
||||
try {
|
||||
@@ -59,28 +55,10 @@ const resetPasswordController = async (req, res) => {
|
||||
|
||||
const refreshController = async (req, res) => {
|
||||
const refreshToken = req.headers.cookie ? cookies.parse(req.headers.cookie).refreshToken : null;
|
||||
const token_provider = req.headers.cookie
|
||||
? cookies.parse(req.headers.cookie).token_provider
|
||||
: null;
|
||||
if (!refreshToken) {
|
||||
return res.status(200).send('Refresh token not provided');
|
||||
}
|
||||
if (token_provider === 'openid' && isEnabled(process.env.OPENID_REUSE_TOKENS) === true) {
|
||||
try {
|
||||
const openIdConfig = getOpenIdConfig();
|
||||
const tokenset = await openIdClient.refreshTokenGrant(openIdConfig, refreshToken);
|
||||
const claims = tokenset.claims();
|
||||
const user = await findUser({ email: claims.email });
|
||||
if (!user) {
|
||||
return res.status(401).redirect('/login');
|
||||
}
|
||||
const token = setOpenIDAuthTokens(tokenset, res);
|
||||
return res.status(200).send({ token, user });
|
||||
} catch (error) {
|
||||
logger.error('[refreshController] OpenID token refresh error', error);
|
||||
return res.status(403).send('Invalid OpenID refresh token');
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
const user = await getUserById(payload.id, '-password -__v -totpSecret');
|
||||
@@ -96,10 +74,7 @@ const refreshController = async (req, res) => {
|
||||
}
|
||||
|
||||
// Find the session with the hashed refresh token
|
||||
const session = await findSession({
|
||||
userId: userId,
|
||||
refreshToken: refreshToken,
|
||||
});
|
||||
const session = await findSession({ userId: userId, refreshToken: refreshToken });
|
||||
|
||||
if (session && session.expiration > new Date()) {
|
||||
const token = await setAuthTokens(userId, res, session._id);
|
||||
|
||||
@@ -1,26 +1,9 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const Balance = require('~/db/models').Balance;
|
||||
const Balance = require('~/models/Balance');
|
||||
|
||||
async function balanceController(req, res) {
|
||||
const balanceData = await Balance.findOne(
|
||||
{ user: req.user.id },
|
||||
'-_id tokenCredits autoRefillEnabled refillIntervalValue refillIntervalUnit lastRefill refillAmount',
|
||||
).lean();
|
||||
|
||||
if (!balanceData) {
|
||||
return res.status(404).json({ error: 'Balance not found' });
|
||||
}
|
||||
|
||||
// If auto-refill is not enabled, remove auto-refill related fields from the response
|
||||
if (!balanceData.autoRefillEnabled) {
|
||||
delete balanceData.refillIntervalValue;
|
||||
delete balanceData.refillIntervalUnit;
|
||||
delete balanceData.lastRefill;
|
||||
delete balanceData.refillAmount;
|
||||
}
|
||||
|
||||
res.status(200).json(balanceData);
|
||||
const { tokenCredits: balance = '' } =
|
||||
(await Balance.findOne({ user: req.user.id }, 'tokenCredits').lean()) ?? {};
|
||||
res.status(200).send('' + balance);
|
||||
}
|
||||
|
||||
module.exports = balanceController;
|
||||
|
||||
@@ -123,7 +123,7 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
clientRef = new WeakRef(client);
|
||||
|
||||
getAbortData = () => {
|
||||
const currentClient = clientRef?.deref();
|
||||
const currentClient = clientRef.deref();
|
||||
const currentText =
|
||||
currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText();
|
||||
|
||||
@@ -219,7 +219,7 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
logger.error('[EditController] Error handling request', error);
|
||||
let partialText = '';
|
||||
try {
|
||||
const currentClient = clientRef?.deref();
|
||||
const currentClient = clientRef.deref();
|
||||
partialText =
|
||||
currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText();
|
||||
} catch (getTextError) {
|
||||
@@ -232,7 +232,6 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
conversationId,
|
||||
messageId: reqDataContext.responseMessageId,
|
||||
parentMessageId: overrideParentMessageId ?? userMessageId ?? parentMessageId,
|
||||
userMessageId,
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error('[EditController] Error in `handleAbortError` during catch block', err);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const { CacheKeys, AuthType } = require('librechat-data-provider');
|
||||
const { getToolkitKey } = require('~/server/services/ToolService');
|
||||
const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs');
|
||||
const { getCustomConfig } = require('~/server/services/Config');
|
||||
const { availableTools } = require('~/app/clients/tools');
|
||||
const { getMCPManager } = require('~/config');
|
||||
@@ -69,7 +69,7 @@ const getAvailablePluginsController = async (req, res) => {
|
||||
);
|
||||
}
|
||||
|
||||
let plugins = authenticatedPlugins;
|
||||
let plugins = await addOpenAPISpecs(authenticatedPlugins);
|
||||
|
||||
if (includedTools.length > 0) {
|
||||
plugins = plugins.filter((plugin) => includedTools.includes(plugin.pluginKey));
|
||||
@@ -105,11 +105,11 @@ const getAvailableTools = async (req, res) => {
|
||||
return;
|
||||
}
|
||||
|
||||
let pluginManifest = availableTools;
|
||||
const pluginManifest = availableTools;
|
||||
const customConfig = await getCustomConfig();
|
||||
if (customConfig?.mcpServers != null) {
|
||||
const mcpManager = getMCPManager();
|
||||
pluginManifest = await mcpManager.loadManifestTools(pluginManifest);
|
||||
await mcpManager.loadManifestTools(pluginManifest);
|
||||
}
|
||||
|
||||
/** @type {TPlugin[]} */
|
||||
@@ -128,7 +128,7 @@ const getAvailableTools = async (req, res) => {
|
||||
(plugin) =>
|
||||
toolDefinitions[plugin.pluginKey] !== undefined ||
|
||||
(plugin.toolkit === true &&
|
||||
Object.keys(toolDefinitions).some((key) => getToolkitKey(key) === plugin.pluginKey)),
|
||||
Object.keys(toolDefinitions).some((key) => key.startsWith(`${plugin.pluginKey}_`))),
|
||||
);
|
||||
|
||||
await cache.set(CacheKeys.TOOLS, tools);
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
verifyTOTP,
|
||||
getTOTPSecret,
|
||||
verifyBackupCode,
|
||||
generateTOTPSecret,
|
||||
generateBackupCodes,
|
||||
verifyTOTP,
|
||||
verifyBackupCode,
|
||||
getTOTPSecret,
|
||||
} = require('~/server/services/twoFactorService');
|
||||
const { getUserById, updateUser } = require('~/models');
|
||||
const { updateUser, getUserById } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
const { encryptV3 } = require('~/server/utils/crypto');
|
||||
|
||||
const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, '');
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
const { FileSources } = require('librechat-data-provider');
|
||||
const {
|
||||
Tools,
|
||||
FileSources,
|
||||
webSearchKeys,
|
||||
extractWebSearchEnvVars,
|
||||
} = require('librechat-data-provider');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
Balance,
|
||||
getFiles,
|
||||
updateUser,
|
||||
deleteFiles,
|
||||
@@ -15,6 +10,7 @@ const {
|
||||
deleteUserById,
|
||||
deleteAllUserSessions,
|
||||
} = require('~/models');
|
||||
const User = require('~/models/User');
|
||||
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
|
||||
const { updateUserPluginsService, deleteUserKey } = require('~/server/services/UserService');
|
||||
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
|
||||
@@ -22,10 +18,8 @@ const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud')
|
||||
const { processDeleteRequest } = require('~/server/services/Files/process');
|
||||
const { deleteAllSharedLinks } = require('~/models/Share');
|
||||
const { deleteToolCalls } = require('~/models/ToolCall');
|
||||
|
||||
const Transaction = require('~/db/models').Transaction;
|
||||
const Balance = require('~/db/models').Balance;
|
||||
const User = require('~/db/models').User;
|
||||
const { Transaction } = require('~/models/Transaction');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const getUserController = async (req, res) => {
|
||||
/** @type {MongoUser} */
|
||||
@@ -89,6 +83,7 @@ const deleteUserFiles = async (req) => {
|
||||
const updateUserPluginsController = async (req, res) => {
|
||||
const { user } = req;
|
||||
const { pluginKey, action, auth, isEntityTool } = req.body;
|
||||
let authService;
|
||||
try {
|
||||
if (!isEntityTool) {
|
||||
const userPluginsService = await updateUserPluginsService(user, pluginKey, action);
|
||||
@@ -100,55 +95,32 @@ const updateUserPluginsController = async (req, res) => {
|
||||
}
|
||||
}
|
||||
|
||||
if (auth == null) {
|
||||
return res.status(200).send();
|
||||
}
|
||||
|
||||
let keys = Object.keys(auth);
|
||||
if (keys.length === 0 && pluginKey !== Tools.web_search) {
|
||||
return res.status(200).send();
|
||||
}
|
||||
const values = Object.values(auth);
|
||||
|
||||
/** @type {number} */
|
||||
let status = 200;
|
||||
/** @type {string} */
|
||||
let message;
|
||||
/** @type {IPluginAuth | Error} */
|
||||
let authService;
|
||||
|
||||
if (pluginKey === Tools.web_search) {
|
||||
/** @type {TCustomConfig['webSearch']} */
|
||||
const webSearchConfig = req.app.locals?.webSearch;
|
||||
keys = extractWebSearchEnvVars({
|
||||
keys: action === 'install' ? keys : webSearchKeys,
|
||||
config: webSearchConfig,
|
||||
});
|
||||
}
|
||||
|
||||
if (action === 'install') {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await updateUserPluginAuth(user.id, keys[i], pluginKey, values[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
({ status, message } = authService);
|
||||
if (auth) {
|
||||
const keys = Object.keys(auth);
|
||||
const values = Object.values(auth);
|
||||
if (action === 'install' && keys.length > 0) {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await updateUserPluginAuth(user.id, keys[i], pluginKey, values[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
const { status, message } = authService;
|
||||
res.status(status).send({ message });
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (action === 'uninstall') {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await deleteUserPluginAuth(user.id, keys[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
({ status, message } = authService);
|
||||
if (action === 'uninstall' && keys.length > 0) {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await deleteUserPluginAuth(user.id, keys[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
const { status, message } = authService;
|
||||
res.status(status).send({ message });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (status === 200) {
|
||||
return res.status(status).send();
|
||||
}
|
||||
|
||||
res.status(status).send({ message });
|
||||
res.status(200).send();
|
||||
} catch (err) {
|
||||
logger.error('[updateUserPluginsController]', err);
|
||||
return res.status(500).json({ message: 'Something went wrong.' });
|
||||
|
||||
@@ -14,6 +14,15 @@ const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { saveBase64Image } = require('~/server/services/Files/process');
|
||||
const { logger, sendEvent } = require('~/config');
|
||||
|
||||
/** @typedef {import('@librechat/agents').Graph} Graph */
|
||||
/** @typedef {import('@librechat/agents').EventHandler} EventHandler */
|
||||
/** @typedef {import('@librechat/agents').ModelEndData} ModelEndData */
|
||||
/** @typedef {import('@librechat/agents').ToolEndData} ToolEndData */
|
||||
/** @typedef {import('@librechat/agents').ToolEndCallback} ToolEndCallback */
|
||||
/** @typedef {import('@librechat/agents').ChatModelStreamHandler} ChatModelStreamHandler */
|
||||
/** @typedef {import('@librechat/agents').ContentAggregatorResult['aggregateContent']} ContentAggregator */
|
||||
/** @typedef {import('@librechat/agents').GraphEvents} GraphEvents */
|
||||
|
||||
class ModelEndHandler {
|
||||
/**
|
||||
* @param {Array<UsageMetadata>} collectedUsage
|
||||
@@ -29,7 +38,7 @@ class ModelEndHandler {
|
||||
* @param {string} event
|
||||
* @param {ModelEndData | undefined} data
|
||||
* @param {Record<string, unknown> | undefined} metadata
|
||||
* @param {StandardGraph} graph
|
||||
* @param {Graph} graph
|
||||
* @returns
|
||||
*/
|
||||
handle(event, data, metadata, graph) {
|
||||
@@ -52,10 +61,7 @@ class ModelEndHandler {
|
||||
}
|
||||
|
||||
this.collectedUsage.push(usage);
|
||||
const streamingDisabled = !!(
|
||||
graph.clientOptions?.disableStreaming || graph?.boundModel?.disableStreaming
|
||||
);
|
||||
if (!streamingDisabled) {
|
||||
if (!graph.clientOptions?.disableStreaming) {
|
||||
return;
|
||||
}
|
||||
if (!data.output.content) {
|
||||
@@ -237,30 +243,6 @@ function createToolEndCallback({ req, res, artifactPromises }) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (output.artifact[Tools.web_search]) {
|
||||
artifactPromises.push(
|
||||
(async () => {
|
||||
const name = `${output.name}_${output.tool_call_id}_${nanoid()}`;
|
||||
const attachment = {
|
||||
name,
|
||||
type: Tools.web_search,
|
||||
messageId: metadata.run_id,
|
||||
toolCallId: output.tool_call_id,
|
||||
conversationId: metadata.thread_id,
|
||||
[Tools.web_search]: { ...output.artifact[Tools.web_search] },
|
||||
};
|
||||
if (!res.headersSent) {
|
||||
return attachment;
|
||||
}
|
||||
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||
return attachment;
|
||||
})().catch((error) => {
|
||||
logger.error('Error processing artifact content:', error);
|
||||
return null;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (output.artifact.content) {
|
||||
/** @type {FormattedContent[]} */
|
||||
const content = output.artifact.content;
|
||||
|
||||
@@ -39,6 +39,9 @@ const BaseClient = require('~/app/clients/BaseClient');
|
||||
const { logger, sendEvent } = require('~/config');
|
||||
const { createRun } = require('./run');
|
||||
|
||||
/** @typedef {import('@librechat/agents').MessageContentComplex} MessageContentComplex */
|
||||
/** @typedef {import('@langchain/core/runnables').RunnableConfig} RunnableConfig */
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req
|
||||
* @param {Agent} agent
|
||||
@@ -55,7 +58,7 @@ const payloadParser = ({ req, agent, endpoint }) => {
|
||||
|
||||
const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deepseek]);
|
||||
|
||||
const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi];
|
||||
const noSystemModelRegex = [/\b(o\d)\b/gi];
|
||||
|
||||
// const { processMemory, memoryInstructions } = require('~/server/services/Endpoints/agents/memory');
|
||||
// const { getFormattedMemories } = require('~/models/Memory');
|
||||
@@ -145,13 +148,19 @@ class AgentClient extends BaseClient {
|
||||
* @param {MongoFile[]} attachments
|
||||
*/
|
||||
checkVisionRequest(attachments) {
|
||||
logger.info(
|
||||
'[api/server/controllers/agents/client.js #checkVisionRequest] not implemented',
|
||||
attachments,
|
||||
);
|
||||
// if (!attachments) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
// const availableModels = this.options.modelsConfig?.[this.options.endpoint];
|
||||
// if (!availableModels) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
// let visionRequestDetected = false;
|
||||
// for (const file of attachments) {
|
||||
// if (file?.type?.includes('image')) {
|
||||
@@ -162,11 +171,13 @@ class AgentClient extends BaseClient {
|
||||
// if (!visionRequestDetected) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
// this.isVisionModel = validateVisionModel({ model: this.modelOptions.model, availableModels });
|
||||
// if (this.isVisionModel) {
|
||||
// delete this.modelOptions.stop;
|
||||
// return;
|
||||
// }
|
||||
|
||||
// for (const model of availableModels) {
|
||||
// if (!validateVisionModel({ model, availableModels })) {
|
||||
// continue;
|
||||
@@ -176,12 +187,14 @@ class AgentClient extends BaseClient {
|
||||
// delete this.modelOptions.stop;
|
||||
// return;
|
||||
// }
|
||||
|
||||
// if (!availableModels.includes(this.defaultVisionModel)) {
|
||||
// return;
|
||||
// }
|
||||
// if (!validateVisionModel({ model: this.defaultVisionModel, availableModels })) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
// this.modelOptions.model = this.defaultVisionModel;
|
||||
// this.isVisionModel = true;
|
||||
// delete this.modelOptions.stop;
|
||||
@@ -540,7 +553,7 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
|
||||
async chatCompletion({ payload, abortController = null }) {
|
||||
/** @type {Partial<GraphRunnableConfig>} */
|
||||
/** @type {Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string; streamMode: string }} */
|
||||
let config;
|
||||
/** @type {ReturnType<createRun>} */
|
||||
let run;
|
||||
@@ -660,7 +673,7 @@ class AgentClient extends BaseClient {
|
||||
this.indexTokenCountMap,
|
||||
toolSet,
|
||||
);
|
||||
if (legacyContentEndpoints.has(this.options.agent.endpoint?.toLowerCase())) {
|
||||
if (legacyContentEndpoints.has(this.options.agent.endpoint)) {
|
||||
initialMessages = formatContentStrings(initialMessages);
|
||||
}
|
||||
|
||||
@@ -715,14 +728,12 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
|
||||
if (noSystemMessages === true && systemContent?.length) {
|
||||
const latestMessageContent = _messages.pop().content;
|
||||
let latestMessage = _messages.pop().content;
|
||||
if (typeof latestMessage !== 'string') {
|
||||
latestMessageContent[0].text = [systemContent, latestMessageContent[0].text].join('\n');
|
||||
_messages.push(new HumanMessage({ content: latestMessageContent }));
|
||||
} else {
|
||||
const text = [systemContent, latestMessageContent].join('\n');
|
||||
_messages.push(new HumanMessage(text));
|
||||
latestMessage = latestMessage[0].text;
|
||||
}
|
||||
latestMessage = [systemContent, latestMessage].join('\n');
|
||||
_messages.push(new HumanMessage(latestMessage));
|
||||
}
|
||||
|
||||
let messages = _messages;
|
||||
|
||||
@@ -259,7 +259,6 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
sender,
|
||||
messageId: responseMessageId,
|
||||
parentMessageId: overrideParentMessageId ?? userMessageId ?? parentMessageId,
|
||||
userMessageId,
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error('[api/server/controllers/agents/request] Error in `handleAbortError`', err);
|
||||
|
||||
@@ -23,7 +23,6 @@ const { updateAction, getActions } = require('~/models/Action');
|
||||
const { updateAgentProjects } = require('~/models/Agent');
|
||||
const { getProjectByName } = require('~/models/Project');
|
||||
const { deleteFileByFilter } = require('~/models/File');
|
||||
const { revertAgentVersion } = require('~/models/Agent');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const systemTools = {
|
||||
@@ -105,13 +104,11 @@ const getAgentHandler = async (req, res) => {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
agent.version = agent.versions ? agent.versions.length : 0;
|
||||
|
||||
if (agent.avatar && agent.avatar?.source === FileSources.s3) {
|
||||
const originalUrl = agent.avatar.filepath;
|
||||
agent.avatar.filepath = await refreshS3Url(agent.avatar);
|
||||
if (originalUrl !== agent.avatar.filepath) {
|
||||
await updateAgent({ id }, { avatar: agent.avatar }, { updatingUserId: req.user.id });
|
||||
await updateAgent({ id }, { avatar: agent.avatar });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,7 +127,6 @@ const getAgentHandler = async (req, res) => {
|
||||
author: agent.author,
|
||||
projectIds: agent.projectIds,
|
||||
isCollaborative: agent.isCollaborative,
|
||||
version: agent.version,
|
||||
});
|
||||
}
|
||||
return res.status(200).json(agent);
|
||||
@@ -169,9 +165,7 @@ const updateAgentHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
let updatedAgent =
|
||||
Object.keys(updateData).length > 0
|
||||
? await updateAgent({ id }, updateData, { updatingUserId: req.user.id })
|
||||
: existingAgent;
|
||||
Object.keys(updateData).length > 0 ? await updateAgent({ id }, updateData) : existingAgent;
|
||||
|
||||
if (projectIds || removeProjectIds) {
|
||||
updatedAgent = await updateAgentProjects({
|
||||
@@ -193,14 +187,6 @@ const updateAgentHandler = async (req, res) => {
|
||||
return res.json(updatedAgent);
|
||||
} catch (error) {
|
||||
logger.error('[/Agents/:id] Error updating Agent', error);
|
||||
|
||||
if (error.statusCode === 409) {
|
||||
return res.status(409).json({
|
||||
error: error.message,
|
||||
details: error.details,
|
||||
});
|
||||
}
|
||||
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
};
|
||||
@@ -407,11 +393,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
||||
},
|
||||
};
|
||||
|
||||
promises.push(
|
||||
await updateAgent({ id: agent_id, author: req.user.id }, data, {
|
||||
updatingUserId: req.user.id,
|
||||
}),
|
||||
);
|
||||
promises.push(await updateAgent({ id: agent_id, author: req.user.id }, data));
|
||||
|
||||
const resolved = await Promise.all(promises);
|
||||
res.status(201).json(resolved[0]);
|
||||
@@ -429,66 +411,6 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Reverts an agent to a previous version from its version history.
|
||||
* @route PATCH /agents/:id/revert
|
||||
* @param {object} req - Express Request object
|
||||
* @param {object} req.params - Request parameters
|
||||
* @param {string} req.params.id - The ID of the agent to revert
|
||||
* @param {object} req.body - Request body
|
||||
* @param {number} req.body.version_index - The index of the version to revert to
|
||||
* @param {object} req.user - Authenticated user information
|
||||
* @param {string} req.user.id - User ID
|
||||
* @param {string} req.user.role - User role
|
||||
* @param {ServerResponse} res - Express Response object
|
||||
* @returns {Promise<Agent>} 200 - The updated agent after reverting to the specified version
|
||||
* @throws {Error} 400 - If version_index is missing
|
||||
* @throws {Error} 403 - If user doesn't have permission to modify the agent
|
||||
* @throws {Error} 404 - If agent not found
|
||||
* @throws {Error} 500 - If there's an internal server error during the reversion process
|
||||
*/
|
||||
const revertAgentVersionHandler = async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const { version_index } = req.body;
|
||||
|
||||
if (version_index === undefined) {
|
||||
return res.status(400).json({ error: 'version_index is required' });
|
||||
}
|
||||
|
||||
const isAdmin = req.user.role === SystemRoles.ADMIN;
|
||||
const existingAgent = await getAgent({ id });
|
||||
|
||||
if (!existingAgent) {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id;
|
||||
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
|
||||
|
||||
if (!hasEditPermission) {
|
||||
return res.status(403).json({
|
||||
error: 'You do not have permission to modify this non-collaborative agent',
|
||||
});
|
||||
}
|
||||
|
||||
const updatedAgent = await revertAgentVersion({ id }, version_index);
|
||||
|
||||
if (updatedAgent.author) {
|
||||
updatedAgent.author = updatedAgent.author.toString();
|
||||
}
|
||||
|
||||
if (updatedAgent.author !== req.user.id) {
|
||||
delete updatedAgent.author;
|
||||
}
|
||||
|
||||
return res.json(updatedAgent);
|
||||
} catch (error) {
|
||||
logger.error('[/agents/:id/revert] Error reverting Agent version', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
createAgent: createAgentHandler,
|
||||
getAgent: getAgentHandler,
|
||||
@@ -497,5 +419,4 @@ module.exports = {
|
||||
deleteAgent: deleteAgentHandler,
|
||||
getListAgents: getListAgentsHandler,
|
||||
uploadAgentAvatar: uploadAgentAvatarHandler,
|
||||
revertAgentVersion: revertAgentVersionHandler,
|
||||
};
|
||||
|
||||
@@ -119,7 +119,7 @@ const chatV1 = async (req, res) => {
|
||||
} else if (/Files.*are invalid/.test(error.message)) {
|
||||
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
||||
endpoint === EModelEndpoint.azureAssistants
|
||||
? " If using Azure OpenAI, files are only available in the region of the assistant's model at the time of upload."
|
||||
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
||||
: ''
|
||||
}`;
|
||||
return sendResponse(req, res, messageData, errorMessage);
|
||||
@@ -326,15 +326,8 @@ const chatV1 = async (req, res) => {
|
||||
|
||||
file_ids = files.map(({ file_id }) => file_id);
|
||||
if (file_ids.length || thread_file_ids.length) {
|
||||
userMessage.file_ids = file_ids;
|
||||
attachedFileIds = new Set([...file_ids, ...thread_file_ids]);
|
||||
if (endpoint === EModelEndpoint.azureAssistants) {
|
||||
userMessage.attachments = Array.from(attachedFileIds).map((file_id) => ({
|
||||
file_id,
|
||||
tools: [{ type: 'file_search' }],
|
||||
}));
|
||||
} else {
|
||||
userMessage.file_ids = Array.from(attachedFileIds);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -386,8 +379,8 @@ const chatV1 = async (req, res) => {
|
||||
body.additional_instructions ? `${body.additional_instructions}\n` : ''
|
||||
}The user has uploaded ${imageCount} image${pluralized}.
|
||||
Use the \`${ImageVisionTool.function.name}\` tool to retrieve ${
|
||||
plural ? '' : 'a '
|
||||
}detailed text description${pluralized} for ${plural ? 'each' : 'the'} image${pluralized}.`;
|
||||
plural ? '' : 'a '
|
||||
}detailed text description${pluralized} for ${plural ? 'each' : 'the'} image${pluralized}.`;
|
||||
|
||||
return files;
|
||||
};
|
||||
@@ -583,8 +576,6 @@ const chatV1 = async (req, res) => {
|
||||
thread_id,
|
||||
model: assistant_id,
|
||||
endpoint,
|
||||
spec: endpointOption.spec,
|
||||
iconURL: endpointOption.iconURL,
|
||||
};
|
||||
|
||||
sendMessage(res, {
|
||||
|
||||
@@ -428,8 +428,6 @@ const chatV2 = async (req, res) => {
|
||||
thread_id,
|
||||
model: assistant_id,
|
||||
endpoint,
|
||||
spec: endpointOption.spec,
|
||||
iconURL: endpointOption.iconURL,
|
||||
};
|
||||
|
||||
sendMessage(res, {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const cookies = require('cookie');
|
||||
const { getOpenIdConfig } = require('~/strategies');
|
||||
const { Issuer } = require('openid-client');
|
||||
const { logoutUser } = require('~/server/services/AuthService');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
@@ -10,29 +10,20 @@ const logoutController = async (req, res) => {
|
||||
const logout = await logoutUser(req, refreshToken);
|
||||
const { status, message } = logout;
|
||||
res.clearCookie('refreshToken');
|
||||
res.clearCookie('token_provider');
|
||||
const response = { message };
|
||||
if (
|
||||
req.user.openidId != null &&
|
||||
isEnabled(process.env.OPENID_USE_END_SESSION_ENDPOINT) &&
|
||||
process.env.OPENID_ISSUER
|
||||
) {
|
||||
const openIdConfig = getOpenIdConfig();
|
||||
if (!openIdConfig) {
|
||||
const issuer = await Issuer.discover(process.env.OPENID_ISSUER);
|
||||
const redirect = issuer.metadata.end_session_endpoint;
|
||||
if (!redirect) {
|
||||
logger.warn(
|
||||
'[logoutController] OpenID config not found. Please verify that the open id configuration and initialization are correct.',
|
||||
'[logoutController] end_session_endpoint not found in OpenID issuer metadata. Please verify that the issuer is correct.',
|
||||
);
|
||||
} else {
|
||||
const endSessionEndpoint = openIdConfig
|
||||
? openIdConfig.serverMetadata().end_session_endpoint
|
||||
: null;
|
||||
if (endSessionEndpoint) {
|
||||
response.redirect = endSessionEndpoint;
|
||||
} else {
|
||||
logger.warn(
|
||||
'[logoutController] end_session_endpoint not found in OpenID issuer metadata. Please verify that the issuer is correct.',
|
||||
);
|
||||
}
|
||||
response.redirect = redirect;
|
||||
}
|
||||
}
|
||||
return res.status(status).send(response);
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
const jwt = require('jsonwebtoken');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
verifyTOTP,
|
||||
getTOTPSecret,
|
||||
verifyBackupCode,
|
||||
getTOTPSecret,
|
||||
} = require('~/server/services/twoFactorService');
|
||||
const { setAuthTokens } = require('~/server/services/AuthService');
|
||||
const { getUserById } = require('~/models');
|
||||
const { getUserById } = require('~/models/userMethods');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Verifies the 2FA code during login using a temporary token.
|
||||
|
||||
@@ -6,7 +6,6 @@ const {
|
||||
Permissions,
|
||||
ToolCallTypes,
|
||||
PermissionTypes,
|
||||
loadWebSearchAuth,
|
||||
} = require('librechat-data-provider');
|
||||
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
|
||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||
@@ -25,36 +24,6 @@ const toolAccessPermType = {
|
||||
[Tools.execute_code]: PermissionTypes.RUN_CODE,
|
||||
};
|
||||
|
||||
/**
|
||||
* Verifies web search authentication, ensuring each category has at least
|
||||
* one fully authenticated service.
|
||||
*
|
||||
* @param {ServerRequest} req - The request object
|
||||
* @param {ServerResponse} res - The response object
|
||||
* @returns {Promise<void>} A promise that resolves when the function has completed
|
||||
*/
|
||||
const verifyWebSearchAuth = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
/** @type {TCustomConfig['webSearch']} */
|
||||
const webSearchConfig = req.app.locals?.webSearch || {};
|
||||
const result = await loadWebSearchAuth({
|
||||
userId,
|
||||
loadAuthValues,
|
||||
webSearchConfig,
|
||||
throwError: false,
|
||||
});
|
||||
|
||||
return res.status(200).json({
|
||||
authenticated: result.authenticated,
|
||||
authTypes: result.authTypes,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in verifyWebSearchAuth:', error);
|
||||
return res.status(500).json({ message: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req - The request object, containing information about the HTTP request.
|
||||
* @param {ServerResponse} res - The response object, used to send back the desired HTTP response.
|
||||
@@ -63,9 +32,6 @@ const verifyWebSearchAuth = async (req, res) => {
|
||||
const verifyToolAuth = async (req, res) => {
|
||||
try {
|
||||
const { toolId } = req.params;
|
||||
if (toolId === Tools.web_search) {
|
||||
return await verifyWebSearchAuth(req, res);
|
||||
}
|
||||
const authFields = fieldsMap[toolId];
|
||||
if (!authFields) {
|
||||
res.status(404).json({ message: 'Tool not found' });
|
||||
|
||||
@@ -9,9 +9,8 @@ const passport = require('passport');
|
||||
const mongoSanitize = require('express-mongo-sanitize');
|
||||
const fs = require('fs');
|
||||
const cookieParser = require('cookie-parser');
|
||||
const { connectDb, indexSync } = require('~/db');
|
||||
|
||||
const { jwtLogin, passportLogin } = require('~/strategies');
|
||||
const { connectDb, indexSync } = require('~/lib/db');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { ldapLogin } = require('~/strategies');
|
||||
const { logger } = require('~/config');
|
||||
@@ -25,25 +24,20 @@ const routes = require('./routes');
|
||||
|
||||
const { PORT, HOST, ALLOW_SOCIAL_LOGIN, DISABLE_COMPRESSION, TRUST_PROXY } = process.env ?? {};
|
||||
|
||||
// Allow PORT=0 to be used for automatic free port assignment
|
||||
const port = isNaN(Number(PORT)) ? 3080 : Number(PORT);
|
||||
const port = Number(PORT) || 3080;
|
||||
const host = HOST || 'localhost';
|
||||
const trusted_proxy = Number(TRUST_PROXY) || 1; /* trust first proxy by default */
|
||||
|
||||
const app = express();
|
||||
|
||||
const startServer = async () => {
|
||||
if (typeof Bun !== 'undefined') {
|
||||
axios.defaults.headers.common['Accept-Encoding'] = 'gzip';
|
||||
}
|
||||
await connectDb();
|
||||
|
||||
logger.info('Connected to MongoDB');
|
||||
await indexSync();
|
||||
|
||||
const app = express();
|
||||
app.disable('x-powered-by');
|
||||
app.set('trust proxy', trusted_proxy);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
const indexPath = path.join(app.locals.paths.dist, 'index.html');
|
||||
@@ -55,29 +49,28 @@ const startServer = async () => {
|
||||
app.use(noIndex);
|
||||
app.use(errorController);
|
||||
app.use(express.json({ limit: '3mb' }));
|
||||
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
||||
app.use(mongoSanitize());
|
||||
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
||||
app.use(staticCache(app.locals.paths.dist));
|
||||
app.use(staticCache(app.locals.paths.fonts));
|
||||
app.use(staticCache(app.locals.paths.assets));
|
||||
app.set('trust proxy', trusted_proxy);
|
||||
app.use(cors());
|
||||
app.use(cookieParser());
|
||||
|
||||
if (!isEnabled(DISABLE_COMPRESSION)) {
|
||||
app.use(compression());
|
||||
} else {
|
||||
console.warn('Response compression has been disabled via DISABLE_COMPRESSION.');
|
||||
}
|
||||
|
||||
// Serve static assets with aggressive caching
|
||||
app.use(staticCache(app.locals.paths.dist));
|
||||
app.use(staticCache(app.locals.paths.fonts));
|
||||
app.use(staticCache(app.locals.paths.assets));
|
||||
|
||||
if (!ALLOW_SOCIAL_LOGIN) {
|
||||
console.warn('Social logins are disabled. Set ALLOW_SOCIAL_LOGIN=true to enable them.');
|
||||
console.warn(
|
||||
'Social logins are disabled. Set Environment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.',
|
||||
);
|
||||
}
|
||||
|
||||
/* OAUTH */
|
||||
app.use(passport.initialize());
|
||||
passport.use(jwtLogin());
|
||||
passport.use(await jwtLogin());
|
||||
passport.use(passportLogin());
|
||||
|
||||
/* LDAP Auth */
|
||||
@@ -86,7 +79,7 @@ const startServer = async () => {
|
||||
}
|
||||
|
||||
if (isEnabled(ALLOW_SOCIAL_LOGIN)) {
|
||||
await configureSocialLogins(app);
|
||||
configureSocialLogins(app);
|
||||
}
|
||||
|
||||
app.use('/oauth', routes.oauth);
|
||||
@@ -135,7 +128,7 @@ const startServer = async () => {
|
||||
});
|
||||
|
||||
app.listen(port, host, () => {
|
||||
if (host === '0.0.0.0') {
|
||||
if (host == '0.0.0.0') {
|
||||
logger.info(
|
||||
`Server listening on all interfaces at port ${port}. Use http://localhost:${port} to access it`,
|
||||
);
|
||||
@@ -183,6 +176,3 @@ process.on('uncaughtException', (err) => {
|
||||
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// export app for easier testing purposes
|
||||
module.exports = app;
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const request = require('supertest');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
jest.mock('~/server/services/Config/loadCustomConfig', () => {
|
||||
return jest.fn(() => Promise.resolve({}));
|
||||
});
|
||||
|
||||
describe('Server Configuration', () => {
|
||||
// Increase the default timeout to allow for Mongo cleanup
|
||||
jest.setTimeout(30_000);
|
||||
|
||||
let mongoServer;
|
||||
let app;
|
||||
|
||||
/** Mocked fs.readFileSync for index.html */
|
||||
const originalReadFileSync = fs.readFileSync;
|
||||
beforeAll(() => {
|
||||
fs.readFileSync = function (filepath, options) {
|
||||
if (filepath.includes('index.html')) {
|
||||
return '<!DOCTYPE html><html><head><title>LibreChat</title></head><body><div id="root"></div></body></html>';
|
||||
}
|
||||
return originalReadFileSync(filepath, options);
|
||||
};
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original fs.readFileSync
|
||||
fs.readFileSync = originalReadFileSync;
|
||||
});
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
process.env.MONGO_URI = mongoServer.getUri();
|
||||
process.env.PORT = '0'; // Use a random available port
|
||||
app = require('~/server');
|
||||
|
||||
// Wait for the app to be healthy
|
||||
await healthCheckPoll(app);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoServer.stop();
|
||||
await mongoose.disconnect();
|
||||
});
|
||||
|
||||
it('should return OK for /health', async () => {
|
||||
const response = await request(app).get('/health');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.text).toBe('OK');
|
||||
});
|
||||
|
||||
it('should not cache index page', async () => {
|
||||
const response = await request(app).get('/');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers['cache-control']).toBe('no-cache, no-store, must-revalidate');
|
||||
expect(response.headers['pragma']).toBe('no-cache');
|
||||
expect(response.headers['expires']).toBe('0');
|
||||
});
|
||||
});
|
||||
|
||||
// Polls the /health endpoint every 30ms for up to 10 seconds to wait for the server to start completely
|
||||
async function healthCheckPoll(app, retries = 0) {
|
||||
const maxRetries = Math.floor(10000 / 30); // 10 seconds / 30ms
|
||||
try {
|
||||
const response = await request(app).get('/health');
|
||||
if (response.status === 200) {
|
||||
return; // App is healthy
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore connection errors during polling
|
||||
}
|
||||
|
||||
if (retries < maxRetries) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 30));
|
||||
await healthCheckPoll(app, retries + 1);
|
||||
} else {
|
||||
throw new Error('App did not become healthy within 10 seconds.');
|
||||
}
|
||||
}
|
||||
@@ -311,7 +311,7 @@ const handleAbortError = async (res, req, error, data) => {
|
||||
} else {
|
||||
logger.error('[handleAbortError] AI response error; aborting request:', error);
|
||||
}
|
||||
const { sender, conversationId, messageId, parentMessageId, userMessageId, partialText } = data;
|
||||
const { sender, conversationId, messageId, parentMessageId, partialText } = data;
|
||||
|
||||
if (error.stack && error.stack.includes('google')) {
|
||||
logger.warn(
|
||||
@@ -344,10 +344,10 @@ const handleAbortError = async (res, req, error, data) => {
|
||||
parentMessageId,
|
||||
text: errorText,
|
||||
user: req.user.id,
|
||||
shouldSaveMessage: true,
|
||||
spec: endpointOption?.spec,
|
||||
iconURL: endpointOption?.iconURL,
|
||||
modelLabel: endpointOption?.modelLabel,
|
||||
shouldSaveMessage: userMessageId != null,
|
||||
model: endpointOption?.modelOptions?.model || req.body?.model,
|
||||
};
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
const { Keyv } = require('keyv');
|
||||
const uap = require('ua-parser-js');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { isEnabled, removePorts } = require('~/server/utils');
|
||||
const keyvMongo = require('~/cache/keyvMongo');
|
||||
const denyRequest = require('./denyRequest');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { findUser } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const banCache = new Keyv({ store: keyvMongo, namespace: ViolationTypes.BAN, ttl: 0 });
|
||||
const message = 'Your account has been temporarily banned due to violations of our service.';
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const { getInvite } = require('~/models/inviteUser');
|
||||
const { deleteTokens } = require('~/models');
|
||||
const { deleteTokens } = require('~/models/Token');
|
||||
|
||||
async function checkInviteUser(req, res, next) {
|
||||
const token = req.body.token;
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
const cookies = require('cookie');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const passport = require('passport');
|
||||
|
||||
// This middleware does not require authentication,
|
||||
// but if the user is authenticated, it will set the user object.
|
||||
const optionalJwtAuth = (req, res, next) => {
|
||||
const cookieHeader = req.headers.cookie;
|
||||
const tokenProvider = cookieHeader ? cookies.parse(cookieHeader).token_provider : null;
|
||||
const callback = (err, user) => {
|
||||
passport.authenticate('jwt', { session: false }, (err, user) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
@@ -15,11 +11,7 @@ const optionalJwtAuth = (req, res, next) => {
|
||||
req.user = user;
|
||||
}
|
||||
next();
|
||||
};
|
||||
if (tokenProvider === 'openid' && isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
return passport.authenticate('openidJwt', { session: false }, callback)(req, res, next);
|
||||
}
|
||||
passport.authenticate('jwt', { session: false }, callback)(req, res, next);
|
||||
})(req, res, next);
|
||||
};
|
||||
|
||||
module.exports = optionalJwtAuth;
|
||||
|
||||
@@ -1,23 +1,5 @@
|
||||
const passport = require('passport');
|
||||
const cookies = require('cookie');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
/**
|
||||
* Custom Middleware to handle JWT authentication, with support for OpenID token reuse
|
||||
* Switches between JWT and OpenID authentication based on cookies and environment settings
|
||||
*/
|
||||
const requireJwtAuth = (req, res, next) => {
|
||||
// Check if token provider is specified in cookies
|
||||
const cookieHeader = req.headers.cookie;
|
||||
const tokenProvider = cookieHeader ? cookies.parse(cookieHeader).token_provider : null;
|
||||
|
||||
// Use OpenID authentication if token provider is OpenID and OPENID_REUSE_TOKENS is enabled
|
||||
if (tokenProvider === 'openid' && isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
return passport.authenticate('openidJwt', { session: false })(req, res, next);
|
||||
}
|
||||
|
||||
// Default to standard JWT authentication
|
||||
return passport.authenticate('jwt', { session: false })(req, res, next);
|
||||
};
|
||||
const requireJwtAuth = passport.authenticate('jwt', { session: false });
|
||||
|
||||
module.exports = requireJwtAuth;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user