Compare commits
3 Commits
feat/user-
...
ci/Semanti
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f94aba4831 | ||
|
|
661c6cc280 | ||
|
|
0146800b66 |
52
.env.example
52
.env.example
@@ -20,11 +20,6 @@ DOMAIN_CLIENT=http://localhost:3080
|
||||
DOMAIN_SERVER=http://localhost:3080
|
||||
|
||||
NO_INDEX=true
|
||||
# Use the address that is at most n number of hops away from the Express application.
|
||||
# req.socket.remoteAddress is the first hop, and the rest are looked for in the X-Forwarded-For header from right to left.
|
||||
# A value of 0 means that the first untrusted address would be req.socket.remoteAddress, i.e. there is no reverse proxy.
|
||||
# Defaulted to 1.
|
||||
TRUST_PROXY=1
|
||||
|
||||
#===============#
|
||||
# JSON Logging #
|
||||
@@ -88,7 +83,7 @@ PROXY=
|
||||
#============#
|
||||
|
||||
ANTHROPIC_API_KEY=user_provided
|
||||
# ANTHROPIC_MODELS=claude-3-7-sonnet-latest,claude-3-7-sonnet-20250219,claude-3-5-haiku-20241022,claude-3-5-sonnet-20241022,claude-3-5-sonnet-latest,claude-3-5-sonnet-20240620,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
||||
# ANTHROPIC_MODELS=claude-3-5-haiku-20241022,claude-3-5-sonnet-20241022,claude-3-5-sonnet-latest,claude-3-5-sonnet-20240620,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
||||
# ANTHROPIC_REVERSE_PROXY=
|
||||
|
||||
#============#
|
||||
@@ -175,7 +170,7 @@ GOOGLE_KEY=user_provided
|
||||
#============#
|
||||
|
||||
OPENAI_API_KEY=user_provided
|
||||
# OPENAI_MODELS=o1,o1-mini,o1-preview,gpt-4o,gpt-4.5-preview,chatgpt-4o-latest,gpt-4o-mini,gpt-3.5-turbo-0125,gpt-3.5-turbo-0301,gpt-3.5-turbo,gpt-4,gpt-4-0613,gpt-4-vision-preview,gpt-3.5-turbo-0613,gpt-3.5-turbo-16k-0613,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-instruct-0914,gpt-3.5-turbo-16k
|
||||
# OPENAI_MODELS=o1,o1-mini,o1-preview,gpt-4o,chatgpt-4o-latest,gpt-4o-mini,gpt-3.5-turbo-0125,gpt-3.5-turbo-0301,gpt-3.5-turbo,gpt-4,gpt-4-0613,gpt-4-vision-preview,gpt-3.5-turbo-0613,gpt-3.5-turbo-16k-0613,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-instruct-0914,gpt-3.5-turbo-16k
|
||||
|
||||
DEBUG_OPENAI=false
|
||||
|
||||
@@ -209,6 +204,12 @@ ASSISTANTS_API_KEY=user_provided
|
||||
# More info, including how to enable use of Assistants with Azure here:
|
||||
# https://www.librechat.ai/docs/configuration/librechat_yaml/ai_endpoints/azure#using-assistants-with-azure
|
||||
|
||||
#============#
|
||||
# OpenRouter #
|
||||
#============#
|
||||
# !!!Warning: Use the variable above instead of this one. Using this one will override the OpenAI endpoint
|
||||
# OPENROUTER_API_KEY=
|
||||
|
||||
#============#
|
||||
# Plugins #
|
||||
#============#
|
||||
@@ -248,13 +249,6 @@ AZURE_AI_SEARCH_SEARCH_OPTION_SELECT=
|
||||
# DALLE3_AZURE_API_VERSION=
|
||||
# DALLE2_AZURE_API_VERSION=
|
||||
|
||||
# Flux
|
||||
#-----------------
|
||||
FLUX_API_BASE_URL=https://api.us1.bfl.ai
|
||||
# FLUX_API_BASE_URL = 'https://api.bfl.ml';
|
||||
|
||||
# Get your API key at https://api.us1.bfl.ai/auth/profile
|
||||
# FLUX_API_KEY=
|
||||
|
||||
# Google
|
||||
#-----------------
|
||||
@@ -298,10 +292,6 @@ MEILI_NO_ANALYTICS=true
|
||||
MEILI_HOST=http://0.0.0.0:7700
|
||||
MEILI_MASTER_KEY=DrhYf7zENyR6AlUCKmnz0eYASOQdl6zxH7s7MKFSfFCt
|
||||
|
||||
# Optional: Disable indexing, useful in a multi-node setup
|
||||
# where only one instance should perform an index sync.
|
||||
# MEILI_NO_SYNC=true
|
||||
|
||||
#==================================================#
|
||||
# Speech to Text & Text to Speech #
|
||||
#==================================================#
|
||||
@@ -399,7 +389,7 @@ FACEBOOK_CALLBACK_URL=/oauth/facebook/callback
|
||||
GITHUB_CLIENT_ID=
|
||||
GITHUB_CLIENT_SECRET=
|
||||
GITHUB_CALLBACK_URL=/oauth/github/callback
|
||||
# GitHub Enterprise
|
||||
# GitHub Eenterprise
|
||||
# GITHUB_ENTERPRISE_BASE_URL=
|
||||
# GITHUB_ENTERPRISE_USER_AGENT=
|
||||
|
||||
@@ -473,15 +463,6 @@ FIREBASE_STORAGE_BUCKET=
|
||||
FIREBASE_MESSAGING_SENDER_ID=
|
||||
FIREBASE_APP_ID=
|
||||
|
||||
#========================#
|
||||
# S3 AWS Bucket #
|
||||
#========================#
|
||||
|
||||
AWS_ACCESS_KEY_ID=
|
||||
AWS_SECRET_ACCESS_KEY=
|
||||
AWS_REGION=
|
||||
AWS_BUCKET_NAME=
|
||||
|
||||
#========================#
|
||||
# Shared Links #
|
||||
#========================#
|
||||
@@ -514,16 +495,6 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
# Google tag manager id
|
||||
#ANALYTICS_GTM_ID=user provided google tag manager id
|
||||
|
||||
#===============#
|
||||
# REDIS Options #
|
||||
#===============#
|
||||
|
||||
# REDIS_URI=10.10.10.10:6379
|
||||
# USE_REDIS=true
|
||||
|
||||
# USE_REDIS_CLUSTER=true
|
||||
# REDIS_CA=/path/to/ca.crt
|
||||
|
||||
#==================================================#
|
||||
# Others #
|
||||
#==================================================#
|
||||
@@ -531,6 +502,9 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
|
||||
# NODE_ENV=
|
||||
|
||||
# REDIS_URI=
|
||||
# USE_REDIS=
|
||||
|
||||
# E2E_USER_EMAIL=
|
||||
# E2E_USER_PASSWORD=
|
||||
|
||||
@@ -553,4 +527,4 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
#=====================================================#
|
||||
# OpenWeather #
|
||||
#=====================================================#
|
||||
OPENWEATHER_API_KEY=
|
||||
OPENWEATHER_API_KEY=
|
||||
@@ -1,42 +0,0 @@
|
||||
name: Locize Translation Access Request
|
||||
description: Request access to an additional language in Locize for LibreChat translations.
|
||||
title: "Locize Access Request: "
|
||||
labels: ["🌍 i18n", "🔑 access request"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your interest in contributing to LibreChat translations!
|
||||
Please fill out the form below to request access to an additional language in **Locize**.
|
||||
|
||||
**🔗 Available Languages:** [View the list here](https://www.librechat.ai/docs/translation)
|
||||
|
||||
**📌 Note:** Ensure that the requested language is supported before submitting your request.
|
||||
- type: input
|
||||
id: account_name
|
||||
attributes:
|
||||
label: Locize Account Name
|
||||
description: Please provide your Locize account name (e.g., John Doe).
|
||||
placeholder: e.g., John Doe
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: language_requested
|
||||
attributes:
|
||||
label: Language Code (ISO 639-1)
|
||||
description: |
|
||||
Enter the **ISO 639-1** language code for the language you want to translate into.
|
||||
Example: `es` for Spanish, `zh-Hant` for Traditional Chinese.
|
||||
|
||||
**🔗 Reference:** [Available Languages](https://www.librechat.ai/docs/translation)
|
||||
placeholder: e.g., es
|
||||
validations:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: agreement
|
||||
attributes:
|
||||
label: Agreement
|
||||
description: By submitting this request, you confirm that you will contribute responsibly and adhere to the project guidelines.
|
||||
options:
|
||||
- label: I agree to use my access solely for contributing to LibreChat translations.
|
||||
required: true
|
||||
50
.github/ISSUE_TEMPLATE/QUESTION.yml
vendored
Normal file
50
.github/ISSUE_TEMPLATE/QUESTION.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Question
|
||||
description: Ask your question
|
||||
title: "[Question]: "
|
||||
labels: ["❓ question"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill this!
|
||||
- type: textarea
|
||||
id: what-is-your-question
|
||||
attributes:
|
||||
label: What is your question?
|
||||
description: Please give as many details as possible
|
||||
placeholder: Please give as many details as possible
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: more-details
|
||||
attributes:
|
||||
label: More Details
|
||||
description: Please provide more details if needed.
|
||||
placeholder: Please provide more details if needed.
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: browsers
|
||||
attributes:
|
||||
label: What is the main subject of your question?
|
||||
multiple: true
|
||||
options:
|
||||
- Documentation
|
||||
- Installation
|
||||
- UI
|
||||
- Endpoints
|
||||
- User System/OAuth
|
||||
- Other
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots
|
||||
description: If applicable, add screenshots to help explain your problem. You can drag and drop, paste images directly here or link to them.
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/danny-avila/LibreChat/blob/main/.github/CODE_OF_CONDUCT.md)
|
||||
options:
|
||||
- label: I agree to follow this project's Code of Conduct
|
||||
required: true
|
||||
60
.github/configuration-release.json
vendored
60
.github/configuration-release.json
vendored
@@ -1,60 +0,0 @@
|
||||
{
|
||||
"categories": [
|
||||
{
|
||||
"title": "### ✨ New Features",
|
||||
"labels": ["feat"]
|
||||
},
|
||||
{
|
||||
"title": "### 🌍 Internationalization",
|
||||
"labels": ["i18n"]
|
||||
},
|
||||
{
|
||||
"title": "### 👐 Accessibility",
|
||||
"labels": ["a11y"]
|
||||
},
|
||||
{
|
||||
"title": "### 🔧 Fixes",
|
||||
"labels": ["Fix", "fix"]
|
||||
},
|
||||
{
|
||||
"title": "### ⚙️ Other Changes",
|
||||
"labels": ["ci", "style", "docs", "refactor", "chore"]
|
||||
}
|
||||
],
|
||||
"ignore_labels": [
|
||||
"🔁 duplicate",
|
||||
"📊 analytics",
|
||||
"🌱 good first issue",
|
||||
"🔍 investigation",
|
||||
"🙏 help wanted",
|
||||
"❌ invalid",
|
||||
"❓ question",
|
||||
"🚫 wontfix",
|
||||
"🚀 release",
|
||||
"version"
|
||||
],
|
||||
"base_branches": ["main"],
|
||||
"sort": {
|
||||
"order": "ASC",
|
||||
"on_property": "mergedAt"
|
||||
},
|
||||
"label_extractor": [
|
||||
{
|
||||
"pattern": "^(?:[^A-Za-z0-9]*)(feat|fix|chore|docs|refactor|ci|style|a11y|i18n)\\s*:",
|
||||
"target": "$1",
|
||||
"flags": "i",
|
||||
"on_property": "title",
|
||||
"method": "match"
|
||||
},
|
||||
{
|
||||
"pattern": "^(?:[^A-Za-z0-9]*)(v\\d+\\.\\d+\\.\\d+(?:-rc\\d+)?).*",
|
||||
"target": "version",
|
||||
"flags": "i",
|
||||
"on_property": "title",
|
||||
"method": "match"
|
||||
}
|
||||
],
|
||||
"template": "## [#{{TO_TAG}}] - #{{TO_TAG_DATE}}\n\nChanges from #{{FROM_TAG}} to #{{TO_TAG}}.\n\n#{{CHANGELOG}}\n\n[See full release details][release-#{{TO_TAG}}]\n\n[release-#{{TO_TAG}}]: https://github.com/#{{OWNER}}/#{{REPO}}/releases/tag/#{{TO_TAG}}\n\n---",
|
||||
"pr_template": "- #{{TITLE}} by **@#{{AUTHOR}}** in [##{{NUMBER}}](#{{URL}})",
|
||||
"empty_template": "- no changes"
|
||||
}
|
||||
68
.github/configuration-unreleased.json
vendored
68
.github/configuration-unreleased.json
vendored
@@ -1,68 +0,0 @@
|
||||
{
|
||||
"categories": [
|
||||
{
|
||||
"title": "### ✨ New Features",
|
||||
"labels": ["feat"]
|
||||
},
|
||||
{
|
||||
"title": "### 🌍 Internationalization",
|
||||
"labels": ["i18n"]
|
||||
},
|
||||
{
|
||||
"title": "### 👐 Accessibility",
|
||||
"labels": ["a11y"]
|
||||
},
|
||||
{
|
||||
"title": "### 🔧 Fixes",
|
||||
"labels": ["Fix", "fix"]
|
||||
},
|
||||
{
|
||||
"title": "### ⚙️ Other Changes",
|
||||
"labels": ["ci", "style", "docs", "refactor", "chore"]
|
||||
}
|
||||
],
|
||||
"ignore_labels": [
|
||||
"🔁 duplicate",
|
||||
"📊 analytics",
|
||||
"🌱 good first issue",
|
||||
"🔍 investigation",
|
||||
"🙏 help wanted",
|
||||
"❌ invalid",
|
||||
"❓ question",
|
||||
"🚫 wontfix",
|
||||
"🚀 release",
|
||||
"version",
|
||||
"action"
|
||||
],
|
||||
"base_branches": ["main"],
|
||||
"sort": {
|
||||
"order": "ASC",
|
||||
"on_property": "mergedAt"
|
||||
},
|
||||
"label_extractor": [
|
||||
{
|
||||
"pattern": "^(?:[^A-Za-z0-9]*)(feat|fix|chore|docs|refactor|ci|style|a11y|i18n)\\s*:",
|
||||
"target": "$1",
|
||||
"flags": "i",
|
||||
"on_property": "title",
|
||||
"method": "match"
|
||||
},
|
||||
{
|
||||
"pattern": "^(?:[^A-Za-z0-9]*)(v\\d+\\.\\d+\\.\\d+(?:-rc\\d+)?).*",
|
||||
"target": "version",
|
||||
"flags": "i",
|
||||
"on_property": "title",
|
||||
"method": "match"
|
||||
},
|
||||
{
|
||||
"pattern": "^(?:[^A-Za-z0-9]*)(action)\\b.*",
|
||||
"target": "action",
|
||||
"flags": "i",
|
||||
"on_property": "title",
|
||||
"method": "match"
|
||||
}
|
||||
],
|
||||
"template": "## [Unreleased]\n\n#{{CHANGELOG}}\n\n---",
|
||||
"pr_template": "- #{{TITLE}} by **@#{{AUTHOR}}** in [##{{NUMBER}}](#{{URL}})",
|
||||
"empty_template": "- no changes"
|
||||
}
|
||||
8
.github/workflows/backend-review.yml
vendored
8
.github/workflows/backend-review.yml
vendored
@@ -39,9 +39,6 @@ jobs:
|
||||
- name: Install MCP Package
|
||||
run: npm run build:mcp
|
||||
|
||||
- name: Install Data Schemas Package
|
||||
run: npm run build:data-schemas
|
||||
|
||||
- name: Create empty auth.json file
|
||||
run: |
|
||||
mkdir -p api/data
|
||||
@@ -64,7 +61,4 @@ jobs:
|
||||
run: cd api && npm run test:ci
|
||||
|
||||
- name: Run librechat-data-provider unit tests
|
||||
run: cd packages/data-provider && npm run test:ci
|
||||
|
||||
- name: Run librechat-mcp unit tests
|
||||
run: cd packages/mcp && npm run test:ci
|
||||
run: cd packages/data-provider && npm run test:ci
|
||||
41
.github/workflows/create-release-pr.yml
vendored
Normal file
41
.github/workflows/create-release-pr.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: Create Release PR
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
create-release-pr:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get last version from package.json
|
||||
id: version
|
||||
run: |
|
||||
LAST_VERSION=$(jq -r '.version' package.json)
|
||||
echo "Last version: $LAST_VERSION"
|
||||
echo "LAST_VERSION=$LAST_VERSION" >> $GITHUB_ENV
|
||||
echo "last_version=$LAST_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create New Release Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
sign-commits: true
|
||||
commit-message: "chore(release): prepare for v${{ steps.version.outputs.last_version }}"
|
||||
base: release
|
||||
branch: release/v${{ steps.version.outputs.last_version }}
|
||||
reviewers: danny-avila
|
||||
title: "✨ v${{ steps.version.outputs.last_version }}"
|
||||
body: |
|
||||
**Release Details**:
|
||||
- 🚀 **Version**: v${{ steps.version.outputs.last_version }}
|
||||
- 📌 **Branch**: `main` → `release`
|
||||
- 🔄 **Merging this PR will finalize the release.**
|
||||
labels: "🚀 release"
|
||||
58
.github/workflows/data-schemas.yml
vendored
58
.github/workflows/data-schemas.yml
vendored
@@ -1,58 +0,0 @@
|
||||
name: Publish `@librechat/data-schemas` to NPM
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'packages/data-schemas/package.json'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
reason:
|
||||
description: 'Reason for manual trigger'
|
||||
required: false
|
||||
default: 'Manual publish requested'
|
||||
|
||||
jobs:
|
||||
build-and-publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18.x'
|
||||
|
||||
- name: Install dependencies
|
||||
run: cd packages/data-schemas && npm ci
|
||||
|
||||
- name: Build
|
||||
run: cd packages/data-schemas && npm run build
|
||||
|
||||
- name: Set up npm authentication
|
||||
run: echo "//registry.npmjs.org/:_authToken=${{ secrets.PUBLISH_NPM_TOKEN }}" > ~/.npmrc
|
||||
|
||||
- name: Check version change
|
||||
id: check
|
||||
working-directory: packages/data-schemas
|
||||
run: |
|
||||
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||
PUBLISHED_VERSION=$(npm view @librechat/data-schemas version 2>/dev/null || echo "0.0.0")
|
||||
if [ "$PACKAGE_VERSION" = "$PUBLISHED_VERSION" ]; then
|
||||
echo "No version change, skipping publish"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Version changed, proceeding with publish"
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Pack package
|
||||
if: steps.check.outputs.skip != 'true'
|
||||
working-directory: packages/data-schemas
|
||||
run: npm pack
|
||||
|
||||
- name: Publish
|
||||
if: steps.check.outputs.skip != 'true'
|
||||
working-directory: packages/data-schemas
|
||||
run: npm publish *.tgz --access public
|
||||
25
.github/workflows/eslint-ci.yml
vendored
25
.github/workflows/eslint-ci.yml
vendored
@@ -41,32 +41,19 @@ jobs:
|
||||
# Extract the base commit SHA from the pull_request event payload.
|
||||
BASE_SHA=$(jq --raw-output .pull_request.base.sha "$GITHUB_EVENT_PATH")
|
||||
echo "Base commit SHA: $BASE_SHA"
|
||||
|
||||
# Get changed files (only JS/TS files in api/ or client/)
|
||||
CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRTUXB "$BASE_SHA" HEAD | grep -E '^(api|client)/.*\.(js|jsx|ts|tsx)$' || true)
|
||||
|
||||
# Debug output
|
||||
echo "Changed files:"
|
||||
|
||||
# List files changed between the base commit and HEAD, filtering only those in api/ or client/
|
||||
CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRTUXB "$BASE_SHA" HEAD | grep -E '^(api|client)/.*\.(js|jsx|ts|tsx)$')
|
||||
echo "Files to lint:"
|
||||
echo "$CHANGED_FILES"
|
||||
|
||||
# Ensure there are files to lint before running ESLint
|
||||
if [[ -z "$CHANGED_FILES" ]]; then
|
||||
echo "No matching files changed. Skipping ESLint."
|
||||
echo "UPLOAD_SARIF=false" >> $GITHUB_ENV
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Set variable to allow SARIF upload
|
||||
echo "UPLOAD_SARIF=true" >> $GITHUB_ENV
|
||||
|
||||
# Run ESLint
|
||||
|
||||
# Run ESLint on the changed files.
|
||||
npx eslint --no-error-on-unmatched-pattern \
|
||||
--config eslint.config.mjs \
|
||||
--format @microsoft/eslint-formatter-sarif \
|
||||
--output-file eslint-results.sarif $CHANGED_FILES || true
|
||||
|
||||
- name: Upload analysis results to GitHub
|
||||
if: env.UPLOAD_SARIF == 'true'
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: eslint-results.sarif
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
name: Generate Release Changelog PR
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
jobs:
|
||||
generate-release-changelog-pr:
|
||||
permissions:
|
||||
contents: write # Needed for pushing commits and creating branches.
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# 1. Checkout the repository (with full history).
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# 2. Generate the release changelog using our custom configuration.
|
||||
- name: Generate Release Changelog
|
||||
id: generate_release
|
||||
uses: mikepenz/release-changelog-builder-action@v5.1.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
configuration: ".github/configuration-release.json"
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
outputFile: CHANGELOG-release.md
|
||||
|
||||
# 3. Update the main CHANGELOG.md:
|
||||
# - If it doesn't exist, create it with a basic header.
|
||||
# - Remove the "Unreleased" section (if present).
|
||||
# - Prepend the new release changelog above previous releases.
|
||||
# - Remove all temporary files before committing.
|
||||
- name: Update CHANGELOG.md
|
||||
run: |
|
||||
# Determine the release tag, e.g. "v1.2.3"
|
||||
TAG=${GITHUB_REF##*/}
|
||||
echo "Using release tag: $TAG"
|
||||
|
||||
# Ensure CHANGELOG.md exists; if not, create a basic header.
|
||||
if [ ! -f CHANGELOG.md ]; then
|
||||
echo "# Changelog" > CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
echo "All notable changes to this project will be documented in this file." >> CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
fi
|
||||
|
||||
echo "Updating CHANGELOG.md…"
|
||||
|
||||
# Remove the "Unreleased" section (from "## [Unreleased]" until the first occurrence of '---') if it exists.
|
||||
if grep -q "^## \[Unreleased\]" CHANGELOG.md; then
|
||||
awk '/^## \[Unreleased\]/{flag=1} flag && /^---/{flag=0; next} !flag' CHANGELOG.md > CHANGELOG.cleaned
|
||||
else
|
||||
cp CHANGELOG.md CHANGELOG.cleaned
|
||||
fi
|
||||
|
||||
# Split the cleaned file into:
|
||||
# - header.md: content before the first release header ("## [v...").
|
||||
# - tail.md: content from the first release header onward.
|
||||
awk '/^## \[v/{exit} {print}' CHANGELOG.cleaned > header.md
|
||||
awk 'f{print} /^## \[v/{f=1; print}' CHANGELOG.cleaned > tail.md
|
||||
|
||||
# Combine header, the new release changelog, and the tail.
|
||||
echo "Combining updated changelog parts..."
|
||||
cat header.md CHANGELOG-release.md > CHANGELOG.md.new
|
||||
echo "" >> CHANGELOG.md.new
|
||||
cat tail.md >> CHANGELOG.md.new
|
||||
|
||||
mv CHANGELOG.md.new CHANGELOG.md
|
||||
|
||||
# Remove temporary files.
|
||||
rm -f CHANGELOG.cleaned header.md tail.md CHANGELOG-release.md
|
||||
|
||||
echo "Final CHANGELOG.md content:"
|
||||
cat CHANGELOG.md
|
||||
|
||||
# 4. Create (or update) the Pull Request with the updated CHANGELOG.md.
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
sign-commits: true
|
||||
commit-message: "chore: update CHANGELOG for release ${{ github.ref_name }}"
|
||||
base: main
|
||||
branch: "changelog/${{ github.ref_name }}"
|
||||
reviewers: danny-avila
|
||||
title: "chore: update CHANGELOG for release ${{ github.ref_name }}"
|
||||
body: |
|
||||
**Description**:
|
||||
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${{ github.ref_name }} above previous releases.
|
||||
@@ -1,106 +0,0 @@
|
||||
name: Generate Unreleased Changelog PR
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 1" # Runs every Monday at 00:00 UTC
|
||||
|
||||
jobs:
|
||||
generate-unreleased-changelog-pr:
|
||||
permissions:
|
||||
contents: write # Needed for pushing commits and creating branches.
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# 1. Checkout the repository on main.
|
||||
- name: Checkout Repository on Main
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
# 4. Get the latest version tag.
|
||||
- name: Get Latest Tag
|
||||
id: get_latest_tag
|
||||
run: |
|
||||
LATEST_TAG=$(git describe --tags $(git rev-list --tags --max-count=1) || echo "none")
|
||||
echo "Latest tag: $LATEST_TAG"
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
# 5. Generate the Unreleased changelog.
|
||||
- name: Generate Unreleased Changelog
|
||||
id: generate_unreleased
|
||||
uses: mikepenz/release-changelog-builder-action@v5.1.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
configuration: ".github/configuration-unreleased.json"
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
outputFile: CHANGELOG-unreleased.md
|
||||
fromTag: ${{ steps.get_latest_tag.outputs.tag }}
|
||||
toTag: main
|
||||
|
||||
# 7. Update CHANGELOG.md with the new Unreleased section.
|
||||
- name: Update CHANGELOG.md
|
||||
id: update_changelog
|
||||
run: |
|
||||
# Create CHANGELOG.md if it doesn't exist.
|
||||
if [ ! -f CHANGELOG.md ]; then
|
||||
echo "# Changelog" > CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
echo "All notable changes to this project will be documented in this file." >> CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
fi
|
||||
|
||||
echo "Updating CHANGELOG.md…"
|
||||
|
||||
# Extract content before the "## [Unreleased]" (or first version header if missing).
|
||||
if grep -q "^## \[Unreleased\]" CHANGELOG.md; then
|
||||
awk '/^## \[Unreleased\]/{exit} {print}' CHANGELOG.md > CHANGELOG_TMP.md
|
||||
else
|
||||
awk '/^## \[v/{exit} {print}' CHANGELOG.md > CHANGELOG_TMP.md
|
||||
fi
|
||||
|
||||
# Append the generated Unreleased changelog.
|
||||
echo "" >> CHANGELOG_TMP.md
|
||||
cat CHANGELOG-unreleased.md >> CHANGELOG_TMP.md
|
||||
echo "" >> CHANGELOG_TMP.md
|
||||
|
||||
# Append the remainder of the original changelog (starting from the first version header).
|
||||
awk 'f{print} /^## \[v/{f=1; print}' CHANGELOG.md >> CHANGELOG_TMP.md
|
||||
|
||||
# Replace the old file with the updated file.
|
||||
mv CHANGELOG_TMP.md CHANGELOG.md
|
||||
|
||||
# Remove the temporary generated file.
|
||||
rm -f CHANGELOG-unreleased.md
|
||||
|
||||
echo "Final CHANGELOG.md:"
|
||||
cat CHANGELOG.md
|
||||
|
||||
# 8. Check if CHANGELOG.md has any updates.
|
||||
- name: Check for CHANGELOG.md changes
|
||||
id: changelog_changes
|
||||
run: |
|
||||
if git diff --quiet CHANGELOG.md; then
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# 9. Create (or update) the Pull Request only if there are changes.
|
||||
- name: Create Pull Request
|
||||
if: steps.changelog_changes.outputs.has_changes == 'true'
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
base: main
|
||||
branch: "changelog/unreleased-update"
|
||||
sign-commits: true
|
||||
commit-message: "action: update Unreleased changelog"
|
||||
title: "action: update Unreleased changelog"
|
||||
body: |
|
||||
**Description**:
|
||||
- This PR updates the Unreleased section in CHANGELOG.md.
|
||||
- It compares the current main branch with the latest version tag (determined as ${{ steps.get_latest_tag.outputs.tag }}),
|
||||
regenerates the Unreleased changelog, removes any old Unreleased block, and inserts the new content.
|
||||
93
.github/workflows/i18n-unused-keys.yml
vendored
93
.github/workflows/i18n-unused-keys.yml
vendored
@@ -1,93 +0,0 @@
|
||||
name: Detect Unused i18next Strings
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "client/src/**"
|
||||
- "api/**"
|
||||
|
||||
jobs:
|
||||
detect-unused-i18n-keys:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write # Required for posting PR comments
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Find unused i18next keys
|
||||
id: find-unused
|
||||
run: |
|
||||
echo "🔍 Scanning for unused i18next keys..."
|
||||
|
||||
# Define paths
|
||||
I18N_FILE="client/src/locales/en/translation.json"
|
||||
SOURCE_DIRS=("client/src" "api")
|
||||
|
||||
# Check if translation file exists
|
||||
if [[ ! -f "$I18N_FILE" ]]; then
|
||||
echo "::error title=Missing i18n File::Translation file not found: $I18N_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract all keys from the JSON file
|
||||
KEYS=$(jq -r 'keys[]' "$I18N_FILE")
|
||||
|
||||
# Track unused keys
|
||||
UNUSED_KEYS=()
|
||||
|
||||
# Check if each key is used in the source code
|
||||
for KEY in $KEYS; do
|
||||
FOUND=false
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -q "$KEY" "$DIR"; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$FOUND" == false ]]; then
|
||||
UNUSED_KEYS+=("$KEY")
|
||||
fi
|
||||
done
|
||||
|
||||
# Output results
|
||||
if [[ ${#UNUSED_KEYS[@]} -gt 0 ]]; then
|
||||
echo "🛑 Found ${#UNUSED_KEYS[@]} unused i18n keys:"
|
||||
echo "unused_keys=$(echo "${UNUSED_KEYS[@]}" | jq -R -s -c 'split(" ")')" >> $GITHUB_ENV
|
||||
for KEY in "${UNUSED_KEYS[@]}"; do
|
||||
echo "::warning title=Unused i18n Key::'$KEY' is defined but not used in the codebase."
|
||||
done
|
||||
else
|
||||
echo "✅ No unused i18n keys detected!"
|
||||
echo "unused_keys=[]" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Post verified comment on PR
|
||||
if: env.unused_keys != '[]'
|
||||
run: |
|
||||
PR_NUMBER=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
|
||||
|
||||
# Format the unused keys list as checkboxes for easy manual checking.
|
||||
FILTERED_KEYS=$(echo "$unused_keys" | jq -r '.[]' | grep -v '^\s*$' | sed 's/^/- [ ] `/;s/$/`/' )
|
||||
|
||||
COMMENT_BODY=$(cat <<EOF
|
||||
### 🚨 Unused i18next Keys Detected
|
||||
|
||||
The following translation keys are defined in \`translation.json\` but are **not used** in the codebase:
|
||||
|
||||
$FILTERED_KEYS
|
||||
|
||||
⚠️ **Please remove these unused keys to keep the translation files clean.**
|
||||
EOF
|
||||
)
|
||||
|
||||
gh api "repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" \
|
||||
-f body="$COMMENT_BODY" \
|
||||
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Fail workflow if unused keys found
|
||||
if: env.unused_keys != '[]'
|
||||
run: exit 1
|
||||
21
.github/workflows/locize-i18n-sync.yml
vendored
21
.github/workflows/locize-i18n-sync.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Sync Locize Translations & Create Translation PR
|
||||
name: Push New Keys & Create Translation PR
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -7,8 +7,8 @@ on:
|
||||
types: [locize/versionPublished]
|
||||
|
||||
jobs:
|
||||
sync-translations:
|
||||
name: Sync Translation Keys with Locize
|
||||
push-new-keys:
|
||||
name: Push Missing Translation Keys to locize
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
@@ -22,22 +22,23 @@ jobs:
|
||||
- name: Install locize CLI
|
||||
run: npm install -g locize-cli
|
||||
|
||||
# Sync translations (Push missing keys & remove deleted ones)
|
||||
- name: Sync Locize with Repository
|
||||
# Only push keys if this workflow was triggered by a push event.
|
||||
- name: Push Missing Translation Keys to locize
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
run: |
|
||||
cd client/src/locales
|
||||
locize sync --api-key ${{ secrets.LOCIZE_API_KEY }} --project-id ${{ secrets.LOCIZE_PROJECT_ID }} --language en
|
||||
locize save-missing --api-key ${{ secrets.LOCIZE_API_KEY }} --project-id ${{ secrets.LOCIZE_PROJECT_ID }} --language en
|
||||
|
||||
# When triggered by repository_dispatch, skip sync step.
|
||||
- name: Skip sync step on non-push events
|
||||
# When triggered by repository_dispatch, skip pushing new keys.
|
||||
- name: Skip push step on non-push events
|
||||
if: ${{ github.event_name != 'push' }}
|
||||
run: echo "Skipping sync as the event is not a push."
|
||||
run: echo "Skipping push of new keys as the event is not a push."
|
||||
|
||||
create-pull-request:
|
||||
name: Create Translation PR on Version Published
|
||||
runs-on: ubuntu-latest
|
||||
needs: sync-translations
|
||||
# This job will wait for push-new-keys to complete.
|
||||
needs: push-new-keys
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
110
.github/workflows/main-version-bump.yml
vendored
Normal file
110
.github/workflows/main-version-bump.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: Semantic Version Bump (via Labels)
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
versioning:
|
||||
if: github.event.pull_request.merged == true && github.base_ref == 'main'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install -g semver jq
|
||||
|
||||
- name: Get last version from package.json
|
||||
id: version
|
||||
run: |
|
||||
LAST_VERSION=$(jq -r '.version' package.json)
|
||||
echo "Last version: $LAST_VERSION"
|
||||
echo "LAST_VERSION=$LAST_VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Check if an existing version bump PR exists
|
||||
id: check_existing_pr
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
EXISTING_PR=$(gh pr list --state open --json number,title --jq '.[] | select(.title | contains("chore(version)")) | {number: .number, title: .title}')
|
||||
EXISTING_PR_NUMBER=$(echo "$EXISTING_PR" | jq -r '.number')
|
||||
EXISTING_PR_VERSION=$(echo "$EXISTING_PR" | grep -oP '\d+\.\d+\.\d+' | head -n 1)
|
||||
|
||||
if [[ -n "$EXISTING_PR_NUMBER" ]]; then
|
||||
echo "Closing existing PR #$EXISTING_PR_NUMBER"
|
||||
gh pr close "$EXISTING_PR_NUMBER" --comment "🚀 Auto-closing this PR in favor of a new version bump."
|
||||
echo "EXISTING_PR_NUMBER=$EXISTING_PR_NUMBER" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
if [[ -n "$EXISTING_PR_VERSION" ]]; then
|
||||
echo "EXISTING_PR_VERSION=$EXISTING_PR_VERSION" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Determine new version based on PR labels
|
||||
id: bump_version
|
||||
env:
|
||||
PR_LABELS: ${{ join(github.event.pull_request.labels.*.name, ' ') }}
|
||||
BASE_VERSION: ${{ env.EXISTING_PR_VERSION || env.LAST_VERSION }}
|
||||
run: |
|
||||
echo "Labels on PR: $PR_LABELS"
|
||||
echo "Base version: $BASE_VERSION"
|
||||
|
||||
MAJOR=false
|
||||
MINOR=false
|
||||
PATCH=false
|
||||
|
||||
if echo "$PR_LABELS" | grep -qi "💥 breaking change"; then
|
||||
MAJOR=true
|
||||
fi
|
||||
if echo "$PR_LABELS" | grep -qi "✨ feat"; then
|
||||
MINOR=true
|
||||
fi
|
||||
if echo "$PR_LABELS" | grep -qi "🔧 fix"; then
|
||||
PATCH=true
|
||||
fi
|
||||
|
||||
NEW_VERSION=$BASE_VERSION
|
||||
|
||||
if [ "$MAJOR" = true ]; then
|
||||
NEW_VERSION=$(semver -i major $BASE_VERSION)
|
||||
elif [ "$MINOR" = true ]; then
|
||||
NEW_VERSION=$(semver -i minor $BASE_VERSION)
|
||||
elif [ "$PATCH" = true ]; then
|
||||
NEW_VERSION=$(semver -i patch $BASE_VERSION)
|
||||
fi
|
||||
|
||||
echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_ENV
|
||||
echo "Next version: $NEW_VERSION"
|
||||
|
||||
- name: Update package.json version
|
||||
run: |
|
||||
jq --arg version "$NEW_VERSION" '.version = $version' package.json > temp.json && mv temp.json package.json
|
||||
|
||||
- name: Install dependencies and update package-lock.json
|
||||
run: |
|
||||
npm install
|
||||
npm ci
|
||||
|
||||
- name: Create New Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
sign-commits: true
|
||||
commit-message: "chore(version): bump version to ${{ env.NEW_VERSION }}"
|
||||
base: main
|
||||
branch: version-bump/${{ env.NEW_VERSION }}
|
||||
reviewers: danny-avila
|
||||
title: "chore(version): Bump version to ${{ env.NEW_VERSION }}"
|
||||
body: |
|
||||
**Description**:
|
||||
- 🎯 **Objective**: Update `package.json` and `package-lock.json` with the latest version bump.
|
||||
- 🔍 **Details**: This PR is automatically generated upon merging PRs with the correct versioning labels.
|
||||
- ✅ **Status**: Ready for review.
|
||||
labels: "📦 version update"
|
||||
113
.github/workflows/tag-and-release.yml
vendored
Normal file
113
.github/workflows/tag-and-release.yml
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
name: Tag and Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- release
|
||||
|
||||
jobs:
|
||||
create-tag-release:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get last version from package.json
|
||||
id: version
|
||||
run: |
|
||||
# Extract the version using jq (make sure jq is installed)
|
||||
LAST_VERSION=$(jq -r '.version' package.json)
|
||||
echo "Last version: $LAST_VERSION"
|
||||
echo "LAST_VERSION=$LAST_VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate Changelog
|
||||
id: changelog
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Get previous tag (if none exists, use a placeholder)
|
||||
PREV_VERSION=$(git tag --sort=-v:refname | head -n 1)
|
||||
if [ -z "$PREV_VERSION" ]; then
|
||||
PREV_VERSION="Initial_Commit"
|
||||
fi
|
||||
|
||||
# Start building the changelog file
|
||||
{
|
||||
echo "## What's Changed"
|
||||
echo ""
|
||||
echo "## [v${LAST_VERSION}](https://github.com/${{ github.repository }}/releases/tag/v${LAST_VERSION})"
|
||||
echo ""
|
||||
if [ "$PREV_VERSION" != "Initial_Commit" ]; then
|
||||
echo "Updates since v${PREV_VERSION} include:"
|
||||
echo "- https://github.com/${{ github.repository }}/compare/v${PREV_VERSION}...v${LAST_VERSION}"
|
||||
else
|
||||
echo "This is the first release."
|
||||
fi
|
||||
echo ""
|
||||
|
||||
echo "### ✨ New Features"
|
||||
gh pr list --state merged --search "feat" --json title,number,url --jq \
|
||||
'.[] | "* 🚀 [\(.title) by @\(.url | split("/")[-2])](\(.url))"' || echo "* No new features."
|
||||
echo ""
|
||||
|
||||
echo "### 🖼️ Style"
|
||||
gh pr list --state merged --search "style" --json title,number,url --jq \
|
||||
'.[] | "* 🎨 [\(.title) by @\(.url | split("/")[-2])](\(.url))"' || echo "* No style updates."
|
||||
echo ""
|
||||
|
||||
echo "### 👐 Accessibility"
|
||||
gh pr list --state merged --search "a11y" --json title,number,url --jq \
|
||||
'.[] | "* 👐 [\(.title) by @\(.url | split("/")[-2])](\(.url))"' || echo "* No accessibility updates."
|
||||
echo ""
|
||||
|
||||
echo "### 🌍 Internationalization"
|
||||
gh pr list --state merged --search "i18n" --json title,number,url --jq \
|
||||
'.[] | "* 🌏 [\(.title) by @\(.url | split("/")[-2])](\(.url))"' || echo "* No internationalization updates."
|
||||
echo ""
|
||||
|
||||
echo "### ⚙️ Other Changes"
|
||||
gh pr list --state merged --search "chore OR refactor OR docs" --json title,number,url --jq \
|
||||
'.[] | "* 📦 [\(.title) by @\(.url | split("/")[-2])](\(.url))"' || echo "* No other changes."
|
||||
echo ""
|
||||
|
||||
echo "### 🔧 Fixes"
|
||||
gh pr list --state merged --search "fix" --json title,number,url --jq \
|
||||
'.[] | "* 🔧 [\(.title) by @\(.url | split("/")[-2])](\(.url))"' || echo "* No fixes."
|
||||
echo ""
|
||||
|
||||
echo "## New Contributors"
|
||||
# Use jq’s unique_by to get one PR per new author along with its URL
|
||||
gh pr list --state merged --json author,url --jq \
|
||||
'unique_by(.author.login)[] | "* @" + .author.login + " made their first contribution in " + .url' \
|
||||
|| echo "* No new contributors."
|
||||
echo ""
|
||||
|
||||
echo "**Full Changelog**: https://github.com/${{ github.repository }}/compare/v${PREV_VERSION}...v${LAST_VERSION}"
|
||||
} > changelog.md
|
||||
|
||||
# Output the changelog for logging purposes
|
||||
cat changelog.md
|
||||
|
||||
# Save the changelog as a multiline environment variable
|
||||
echo "CHANGELOG<<EOF" >> $GITHUB_ENV
|
||||
cat changelog.md >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
- name: Create Git Tag
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
git tag v${LAST_VERSION}
|
||||
git push origin v${LAST_VERSION}
|
||||
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh release create v${LAST_VERSION} \
|
||||
--title "Release v${LAST_VERSION}" \
|
||||
--notes "$CHANGELOG"
|
||||
153
.github/workflows/unused-packages.yml
vendored
153
.github/workflows/unused-packages.yml
vendored
@@ -1,153 +0,0 @@
|
||||
name: Detect Unused NPM Packages
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'package.json'
|
||||
- 'package-lock.json'
|
||||
- 'client/**'
|
||||
- 'api/**'
|
||||
|
||||
jobs:
|
||||
detect-unused-packages:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install depcheck
|
||||
run: npm install -g depcheck
|
||||
|
||||
- name: Validate JSON files
|
||||
run: |
|
||||
for FILE in package.json client/package.json api/package.json; do
|
||||
if [[ -f "$FILE" ]]; then
|
||||
jq empty "$FILE" || (echo "::error title=Invalid JSON::$FILE is invalid" && exit 1)
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Extract Dependencies Used in Scripts
|
||||
id: extract-used-scripts
|
||||
run: |
|
||||
extract_deps_from_scripts() {
|
||||
local package_file=$1
|
||||
if [[ -f "$package_file" ]]; then
|
||||
jq -r '.scripts | to_entries[].value' "$package_file" | \
|
||||
grep -oE '([a-zA-Z0-9_-]+)' | sort -u > used_scripts.txt
|
||||
else
|
||||
touch used_scripts.txt
|
||||
fi
|
||||
}
|
||||
|
||||
extract_deps_from_scripts "package.json"
|
||||
mv used_scripts.txt root_used_deps.txt
|
||||
|
||||
extract_deps_from_scripts "client/package.json"
|
||||
mv used_scripts.txt client_used_deps.txt
|
||||
|
||||
extract_deps_from_scripts "api/package.json"
|
||||
mv used_scripts.txt api_used_deps.txt
|
||||
|
||||
- name: Extract Dependencies Used in Source Code
|
||||
id: extract-used-code
|
||||
run: |
|
||||
extract_deps_from_code() {
|
||||
local folder=$1
|
||||
local output_file=$2
|
||||
if [[ -d "$folder" ]]; then
|
||||
grep -rEho "require\\(['\"]([a-zA-Z0-9@/._-]+)['\"]\\)" "$folder" --include=\*.{js,ts,mjs,cjs} | \
|
||||
sed -E "s/require\\(['\"]([a-zA-Z0-9@/._-]+)['\"]\\)/\1/" > "$output_file"
|
||||
|
||||
grep -rEho "import .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{js,ts,mjs,cjs} | \
|
||||
sed -E "s/import .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]/\1/" >> "$output_file"
|
||||
|
||||
sort -u "$output_file" -o "$output_file"
|
||||
else
|
||||
touch "$output_file"
|
||||
fi
|
||||
}
|
||||
|
||||
extract_deps_from_code "." root_used_code.txt
|
||||
extract_deps_from_code "client" client_used_code.txt
|
||||
extract_deps_from_code "api" api_used_code.txt
|
||||
|
||||
- name: Run depcheck for root package.json
|
||||
id: check-root
|
||||
run: |
|
||||
if [[ -f "package.json" ]]; then
|
||||
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat root_used_deps.txt root_used_code.txt | sort) || echo "")
|
||||
echo "ROOT_UNUSED<<EOF" >> $GITHUB_ENV
|
||||
echo "$UNUSED" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Run depcheck for client/package.json
|
||||
id: check-client
|
||||
run: |
|
||||
if [[ -f "client/package.json" ]]; then
|
||||
chmod -R 755 client
|
||||
cd client
|
||||
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt | sort) || echo "")
|
||||
echo "CLIENT_UNUSED<<EOF" >> $GITHUB_ENV
|
||||
echo "$UNUSED" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
cd ..
|
||||
fi
|
||||
|
||||
- name: Run depcheck for api/package.json
|
||||
id: check-api
|
||||
run: |
|
||||
if [[ -f "api/package.json" ]]; then
|
||||
chmod -R 755 api
|
||||
cd api
|
||||
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../api_used_deps.txt ../api_used_code.txt | sort) || echo "")
|
||||
echo "API_UNUSED<<EOF" >> $GITHUB_ENV
|
||||
echo "$UNUSED" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
cd ..
|
||||
fi
|
||||
|
||||
- name: Post comment on PR if unused dependencies are found
|
||||
if: env.ROOT_UNUSED != '' || env.CLIENT_UNUSED != '' || env.API_UNUSED != ''
|
||||
run: |
|
||||
PR_NUMBER=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
|
||||
|
||||
ROOT_LIST=$(echo "$ROOT_UNUSED" | awk '{print "- `" $0 "`"}')
|
||||
CLIENT_LIST=$(echo "$CLIENT_UNUSED" | awk '{print "- `" $0 "`"}')
|
||||
API_LIST=$(echo "$API_UNUSED" | awk '{print "- `" $0 "`"}')
|
||||
|
||||
COMMENT_BODY=$(cat <<EOF
|
||||
### 🚨 Unused NPM Packages Detected
|
||||
|
||||
The following **unused dependencies** were found:
|
||||
|
||||
$(if [[ ! -z "$ROOT_UNUSED" ]]; then echo "#### 📂 Root \`package.json\`"; echo ""; echo "$ROOT_LIST"; echo ""; fi)
|
||||
|
||||
$(if [[ ! -z "$CLIENT_UNUSED" ]]; then echo "#### 📂 Client \`client/package.json\`"; echo ""; echo "$CLIENT_LIST"; echo ""; fi)
|
||||
|
||||
$(if [[ ! -z "$API_UNUSED" ]]; then echo "#### 📂 API \`api/package.json\`"; echo ""; echo "$API_LIST"; echo ""; fi)
|
||||
|
||||
⚠️ **Please remove these unused dependencies to keep your project clean.**
|
||||
EOF
|
||||
)
|
||||
|
||||
gh api "repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" \
|
||||
-f body="$COMMENT_BODY" \
|
||||
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Fail workflow if unused dependencies found
|
||||
if: env.ROOT_UNUSED != '' || env.CLIENT_UNUSED != '' || env.API_UNUSED != ''
|
||||
run: exit 1
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -105,5 +105,4 @@ auth.json
|
||||
uploads/
|
||||
|
||||
# owner
|
||||
release/
|
||||
!/client/src/@types/i18next.d.ts
|
||||
release/
|
||||
16
CHANGELOG.md
16
CHANGELOG.md
@@ -1,16 +0,0 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### ✨ New Features
|
||||
|
||||
- 🪄 feat: Agent Artifacts by **@danny-avila** in [#5804](https://github.com/danny-avila/LibreChat/pull/5804)
|
||||
|
||||
### ⚙️ Other Changes
|
||||
|
||||
- 🔄 chore: Enforce 18next Language Keys by **@rubentalstra** in [#5803](https://github.com/danny-avila/LibreChat/pull/5803)
|
||||
- 🔃 refactor: Parent Message ID Handling on Error, Update Translations, Bump Agents by **@danny-avila** in [#5833](https://github.com/danny-avila/LibreChat/pull/5833)
|
||||
|
||||
---
|
||||
@@ -1,4 +1,4 @@
|
||||
# v0.7.7
|
||||
# v0.7.6
|
||||
|
||||
# Base node image
|
||||
FROM node:20-alpine AS node
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Dockerfile.multi
|
||||
# v0.7.7
|
||||
# v0.7.6
|
||||
|
||||
# Base for all builds
|
||||
FROM node:20-alpine AS base-min
|
||||
@@ -11,7 +11,6 @@ RUN npm config set fetch-retry-maxtimeout 600000 && \
|
||||
COPY package*.json ./
|
||||
COPY packages/data-provider/package*.json ./packages/data-provider/
|
||||
COPY packages/mcp/package*.json ./packages/mcp/
|
||||
COPY packages/data-schemas/package*.json ./packages/data-schemas/
|
||||
COPY client/package*.json ./client/
|
||||
COPY api/package*.json ./api/
|
||||
|
||||
@@ -33,13 +32,6 @@ COPY packages/mcp ./
|
||||
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
||||
RUN npm run build
|
||||
|
||||
# Build data-schemas
|
||||
FROM base AS data-schemas-build
|
||||
WORKDIR /app/packages/data-schemas
|
||||
COPY packages/data-schemas ./
|
||||
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
||||
RUN npm run build
|
||||
|
||||
# Client build
|
||||
FROM base AS client-build
|
||||
WORKDIR /app/client
|
||||
@@ -57,9 +49,8 @@ COPY api ./api
|
||||
COPY config ./config
|
||||
COPY --from=data-provider-build /app/packages/data-provider/dist ./packages/data-provider/dist
|
||||
COPY --from=mcp-build /app/packages/mcp/dist ./packages/mcp/dist
|
||||
COPY --from=data-schemas-build /app/packages/data-schemas/dist ./packages/data-schemas/dist
|
||||
COPY --from=client-build /app/client/dist ./client/dist
|
||||
WORKDIR /app/api
|
||||
EXPOSE 3080
|
||||
ENV HOST=0.0.0.0
|
||||
CMD ["node", "server/index.js"]
|
||||
CMD ["node", "server/index.js"]
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2025 LibreChat
|
||||
Copyright (c) 2024 LibreChat
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
@@ -81,7 +81,7 @@
|
||||
- [Fork Messages & Conversations](https://www.librechat.ai/docs/features/fork) for Advanced Context control
|
||||
|
||||
- 💬 **Multimodal & File Interactions**:
|
||||
- Upload and analyze images with Claude 3, GPT-4.5, GPT-4o, o1, Llama-Vision, and Gemini 📸
|
||||
- Upload and analyze images with Claude 3, GPT-4o, o1, Llama-Vision, and Gemini 📸
|
||||
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, & Google 🗃️
|
||||
|
||||
- 🌎 **Multilingual UI**:
|
||||
@@ -197,6 +197,6 @@ We thank [Locize](https://locize.com) for their translation management tools tha
|
||||
|
||||
<p align="center">
|
||||
<a href="https://locize.com" target="_blank" rel="noopener noreferrer">
|
||||
<img src="https://github.com/user-attachments/assets/d6b70894-6064-475e-bb65-92a9e23e0077" alt="Locize Logo" height="50">
|
||||
<img src="https://locize.com/img/locize_color.svg" alt="Locize Logo" height="50">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
@@ -7,7 +7,7 @@ const {
|
||||
getResponseSender,
|
||||
validateVisionModel,
|
||||
} = require('librechat-data-provider');
|
||||
const { SplitStreamHandler: _Handler, GraphEvents } = require('@librechat/agents');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const {
|
||||
truncateText,
|
||||
formatMessage,
|
||||
@@ -16,31 +16,16 @@ const {
|
||||
parseParamFromPrompt,
|
||||
createContextHandlers,
|
||||
} = require('./prompts');
|
||||
const {
|
||||
getClaudeHeaders,
|
||||
configureReasoning,
|
||||
checkPromptCacheSupport,
|
||||
} = require('~/server/services/Endpoints/anthropic/helpers');
|
||||
const { getModelMaxTokens, getModelMaxOutputTokens, matchModelName } = require('~/utils');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const Tokenizer = require('~/server/services/Tokenizer');
|
||||
const { logger, sendEvent } = require('~/config');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const BaseClient = require('./BaseClient');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const HUMAN_PROMPT = '\n\nHuman:';
|
||||
const AI_PROMPT = '\n\nAssistant:';
|
||||
|
||||
class SplitStreamHandler extends _Handler {
|
||||
getDeltaContent(chunk) {
|
||||
return (chunk?.delta?.text ?? chunk?.completion) || '';
|
||||
}
|
||||
getReasoningDelta(chunk) {
|
||||
return chunk?.delta?.thinking || '';
|
||||
}
|
||||
}
|
||||
|
||||
/** Helper function to introduce a delay before retrying */
|
||||
function delayBeforeRetry(attempts, baseDelay = 1000) {
|
||||
return new Promise((resolve) => setTimeout(resolve, baseDelay * attempts));
|
||||
@@ -83,8 +68,6 @@ class AnthropicClient extends BaseClient {
|
||||
/** The key for the usage object's output tokens
|
||||
* @type {string} */
|
||||
this.outputTokensKey = 'output_tokens';
|
||||
/** @type {SplitStreamHandler | undefined} */
|
||||
this.streamHandler;
|
||||
}
|
||||
|
||||
setOptions(options) {
|
||||
@@ -114,10 +97,9 @@ class AnthropicClient extends BaseClient {
|
||||
|
||||
const modelMatch = matchModelName(this.modelOptions.model, EModelEndpoint.anthropic);
|
||||
this.isClaude3 = modelMatch.includes('claude-3');
|
||||
this.isLegacyOutput = !(
|
||||
/claude-3[-.]5-sonnet/.test(modelMatch) || /claude-3[-.]7/.test(modelMatch)
|
||||
);
|
||||
this.supportsCacheControl = this.options.promptCache && checkPromptCacheSupport(modelMatch);
|
||||
this.isLegacyOutput = !modelMatch.includes('claude-3-5-sonnet');
|
||||
this.supportsCacheControl =
|
||||
this.options.promptCache && this.checkPromptCacheSupport(modelMatch);
|
||||
|
||||
if (
|
||||
this.isLegacyOutput &&
|
||||
@@ -143,7 +125,7 @@ class AnthropicClient extends BaseClient {
|
||||
this.options.endpointType ?? this.options.endpoint,
|
||||
this.options.endpointTokenConfig,
|
||||
) ??
|
||||
anthropicSettings.maxOutputTokens.reset(this.modelOptions.model);
|
||||
1500;
|
||||
this.maxPromptTokens =
|
||||
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
|
||||
|
||||
@@ -189,9 +171,18 @@ class AnthropicClient extends BaseClient {
|
||||
options.baseURL = this.options.reverseProxyUrl;
|
||||
}
|
||||
|
||||
const headers = getClaudeHeaders(requestOptions?.model, this.supportsCacheControl);
|
||||
if (headers) {
|
||||
options.defaultHeaders = headers;
|
||||
if (
|
||||
this.supportsCacheControl &&
|
||||
requestOptions?.model &&
|
||||
requestOptions.model.includes('claude-3-5-sonnet')
|
||||
) {
|
||||
options.defaultHeaders = {
|
||||
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15,prompt-caching-2024-07-31',
|
||||
};
|
||||
} else if (this.supportsCacheControl) {
|
||||
options.defaultHeaders = {
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
};
|
||||
}
|
||||
|
||||
return new Anthropic(options);
|
||||
@@ -677,38 +668,29 @@ class AnthropicClient extends BaseClient {
|
||||
* @returns {Promise<Anthropic.default.Message | Anthropic.default.Completion>} The response from the Anthropic client.
|
||||
*/
|
||||
async createResponse(client, options, useMessages) {
|
||||
return (useMessages ?? this.useMessages)
|
||||
return useMessages ?? this.useMessages
|
||||
? await client.messages.create(options)
|
||||
: await client.completions.create(options);
|
||||
}
|
||||
|
||||
getMessageMapMethod() {
|
||||
/**
|
||||
* @param {TMessage} msg
|
||||
*/
|
||||
return (msg) => {
|
||||
if (msg.text != null && msg.text && msg.text.startsWith(':::thinking')) {
|
||||
msg.text = msg.text.replace(/:::thinking.*?:::/gs, '').trim();
|
||||
}
|
||||
|
||||
return msg;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string[]} [intermediateReply]
|
||||
* @returns {string}
|
||||
* @param {string} modelName
|
||||
* @returns {boolean}
|
||||
*/
|
||||
getStreamText(intermediateReply) {
|
||||
if (!this.streamHandler) {
|
||||
return intermediateReply?.join('') ?? '';
|
||||
checkPromptCacheSupport(modelName) {
|
||||
const modelMatch = matchModelName(modelName, EModelEndpoint.anthropic);
|
||||
if (modelMatch.includes('claude-3-5-sonnet-latest')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const reasoningText = this.streamHandler.reasoningTokens.join('');
|
||||
|
||||
const reasoningBlock = reasoningText.length > 0 ? `:::thinking\n${reasoningText}\n:::\n` : '';
|
||||
|
||||
return `${reasoningBlock}${this.streamHandler.tokens.join('')}`;
|
||||
if (
|
||||
modelMatch === 'claude-3-5-sonnet' ||
|
||||
modelMatch === 'claude-3-5-haiku' ||
|
||||
modelMatch === 'claude-3-haiku' ||
|
||||
modelMatch === 'claude-3-opus'
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async sendCompletion(payload, { onProgress, abortController }) {
|
||||
@@ -728,6 +710,7 @@ class AnthropicClient extends BaseClient {
|
||||
user_id: this.user,
|
||||
};
|
||||
|
||||
let text = '';
|
||||
const {
|
||||
stream,
|
||||
model,
|
||||
@@ -738,34 +721,22 @@ class AnthropicClient extends BaseClient {
|
||||
topK: top_k,
|
||||
} = this.modelOptions;
|
||||
|
||||
let requestOptions = {
|
||||
const requestOptions = {
|
||||
model,
|
||||
stream: stream || true,
|
||||
stop_sequences,
|
||||
temperature,
|
||||
metadata,
|
||||
top_p,
|
||||
top_k,
|
||||
};
|
||||
|
||||
if (this.useMessages) {
|
||||
requestOptions.messages = payload;
|
||||
requestOptions.max_tokens =
|
||||
maxOutputTokens || anthropicSettings.maxOutputTokens.reset(requestOptions.model);
|
||||
requestOptions.max_tokens = maxOutputTokens || legacy.maxOutputTokens.default;
|
||||
} else {
|
||||
requestOptions.prompt = payload;
|
||||
requestOptions.max_tokens_to_sample = maxOutputTokens || legacy.maxOutputTokens.default;
|
||||
}
|
||||
|
||||
requestOptions = configureReasoning(requestOptions, {
|
||||
thinking: this.options.thinking,
|
||||
thinkingBudget: this.options.thinkingBudget,
|
||||
});
|
||||
|
||||
if (!/claude-3[-.]7/.test(model)) {
|
||||
requestOptions.top_p = top_p;
|
||||
requestOptions.top_k = top_k;
|
||||
} else if (requestOptions.thinking == null) {
|
||||
requestOptions.topP = top_p;
|
||||
requestOptions.topK = top_k;
|
||||
requestOptions.max_tokens_to_sample = maxOutputTokens || 1500;
|
||||
}
|
||||
|
||||
if (this.systemMessage && this.supportsCacheControl === true) {
|
||||
@@ -785,17 +756,13 @@ class AnthropicClient extends BaseClient {
|
||||
}
|
||||
|
||||
logger.debug('[AnthropicClient]', { ...requestOptions });
|
||||
this.streamHandler = new SplitStreamHandler({
|
||||
accumulate: true,
|
||||
runId: this.responseMessageId,
|
||||
handlers: {
|
||||
[GraphEvents.ON_RUN_STEP]: (event) => sendEvent(this.options.res, event),
|
||||
[GraphEvents.ON_MESSAGE_DELTA]: (event) => sendEvent(this.options.res, event),
|
||||
[GraphEvents.ON_REASONING_DELTA]: (event) => sendEvent(this.options.res, event),
|
||||
},
|
||||
});
|
||||
|
||||
let intermediateReply = this.streamHandler.tokens;
|
||||
const handleChunk = (currentChunk) => {
|
||||
if (currentChunk) {
|
||||
text += currentChunk;
|
||||
onProgress(currentChunk);
|
||||
}
|
||||
};
|
||||
|
||||
const maxRetries = 3;
|
||||
const streamRate = this.options.streamRate ?? Constants.DEFAULT_STREAM_RATE;
|
||||
@@ -816,15 +783,22 @@ class AnthropicClient extends BaseClient {
|
||||
});
|
||||
|
||||
for await (const completion of response) {
|
||||
// Handle each completion as before
|
||||
const type = completion?.type ?? '';
|
||||
if (tokenEventTypes.has(type)) {
|
||||
logger.debug(`[AnthropicClient] ${type}`, completion);
|
||||
this[type] = completion;
|
||||
}
|
||||
this.streamHandler.handle(completion);
|
||||
if (completion?.delta?.text) {
|
||||
handleChunk(completion.delta.text);
|
||||
} else if (completion.completion) {
|
||||
handleChunk(completion.completion);
|
||||
}
|
||||
|
||||
await sleep(streamRate);
|
||||
}
|
||||
|
||||
// Successful processing, exit loop
|
||||
break;
|
||||
} catch (error) {
|
||||
attempts += 1;
|
||||
@@ -834,10 +808,6 @@ class AnthropicClient extends BaseClient {
|
||||
|
||||
if (attempts < maxRetries) {
|
||||
await delayBeforeRetry(attempts, 350);
|
||||
} else if (this.streamHandler && this.streamHandler.reasoningTokens.length) {
|
||||
return this.getStreamText();
|
||||
} else if (intermediateReply.length > 0) {
|
||||
return this.getStreamText(intermediateReply);
|
||||
} else {
|
||||
throw new Error(`Operation failed after ${maxRetries} attempts: ${error.message}`);
|
||||
}
|
||||
@@ -853,7 +823,8 @@ class AnthropicClient extends BaseClient {
|
||||
}
|
||||
|
||||
await processResponse.bind(this)();
|
||||
return this.getStreamText(intermediateReply);
|
||||
|
||||
return text.trim();
|
||||
}
|
||||
|
||||
getSaveOptions() {
|
||||
@@ -863,8 +834,6 @@ class AnthropicClient extends BaseClient {
|
||||
promptPrefix: this.options.promptPrefix,
|
||||
modelLabel: this.options.modelLabel,
|
||||
promptCache: this.options.promptCache,
|
||||
thinking: this.options.thinking,
|
||||
thinkingBudget: this.options.thinkingBudget,
|
||||
resendFiles: this.options.resendFiles,
|
||||
iconURL: this.options.iconURL,
|
||||
greeting: this.options.greeting,
|
||||
|
||||
@@ -5,12 +5,10 @@ const {
|
||||
isAgentsEndpoint,
|
||||
isParamEndpoint,
|
||||
EModelEndpoint,
|
||||
ContentTypes,
|
||||
excludedKeys,
|
||||
ErrorTypes,
|
||||
Constants,
|
||||
} = require('librechat-data-provider');
|
||||
const { getMessages, saveMessage, updateMessage, saveConvo, getConvo } = require('~/models');
|
||||
const { getMessages, saveMessage, updateMessage, saveConvo } = require('~/models');
|
||||
const { addSpaceIfNeeded, isEnabled } = require('~/server/utils');
|
||||
const { truncateToolCallOutputs } = require('./prompts');
|
||||
const checkBalance = require('~/models/checkBalance');
|
||||
@@ -57,10 +55,6 @@ class BaseClient {
|
||||
* Flag to determine if the client re-submitted the latest assistant message.
|
||||
* @type {boolean | undefined} */
|
||||
this.continued;
|
||||
/**
|
||||
* Flag to determine if the client has already fetched the conversation while saving new messages.
|
||||
* @type {boolean | undefined} */
|
||||
this.fetchedConvo;
|
||||
/** @type {TMessage[]} */
|
||||
this.currentMessages = [];
|
||||
/** @type {import('librechat-data-provider').VisionModes | undefined} */
|
||||
@@ -366,14 +360,17 @@ class BaseClient {
|
||||
* context: TMessage[],
|
||||
* remainingContextTokens: number,
|
||||
* messagesToRefine: TMessage[],
|
||||
* }>} An object with three properties: `context`, `remainingContextTokens`, and `messagesToRefine`.
|
||||
* summaryIndex: number,
|
||||
* }>} An object with four properties: `context`, `summaryIndex`, `remainingContextTokens`, and `messagesToRefine`.
|
||||
* `context` is an array of messages that fit within the token limit.
|
||||
* `summaryIndex` is the index of the first message in the `messagesToRefine` array.
|
||||
* `remainingContextTokens` is the number of tokens remaining within the limit after adding the messages to the context.
|
||||
* `messagesToRefine` is an array of messages that were not added to the context because they would have exceeded the token limit.
|
||||
*/
|
||||
async getMessagesWithinTokenLimit({ messages: _messages, maxContextTokens, instructions }) {
|
||||
// Every reply is primed with <|start|>assistant<|message|>, so we
|
||||
// start with 3 tokens for the label after all messages have been counted.
|
||||
let summaryIndex = -1;
|
||||
let currentTokenCount = 3;
|
||||
const instructionsTokenCount = instructions?.tokenCount ?? 0;
|
||||
let remainingContextTokens =
|
||||
@@ -406,12 +403,14 @@ class BaseClient {
|
||||
}
|
||||
|
||||
const prunedMemory = messages;
|
||||
summaryIndex = prunedMemory.length - 1;
|
||||
remainingContextTokens -= currentTokenCount;
|
||||
|
||||
return {
|
||||
context: context.reverse(),
|
||||
remainingContextTokens,
|
||||
messagesToRefine: prunedMemory,
|
||||
summaryIndex,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -454,7 +453,7 @@ class BaseClient {
|
||||
|
||||
let orderedWithInstructions = this.addInstructions(orderedMessages, instructions);
|
||||
|
||||
let { context, remainingContextTokens, messagesToRefine } =
|
||||
let { context, remainingContextTokens, messagesToRefine, summaryIndex } =
|
||||
await this.getMessagesWithinTokenLimit({
|
||||
messages: orderedWithInstructions,
|
||||
instructions,
|
||||
@@ -524,7 +523,7 @@ class BaseClient {
|
||||
}
|
||||
|
||||
// Make sure to only continue summarization logic if the summary message was generated
|
||||
shouldSummarize = summaryMessage != null && shouldSummarize === true;
|
||||
shouldSummarize = summaryMessage && shouldSummarize;
|
||||
|
||||
logger.debug('[BaseClient] Context Count (2/2)', {
|
||||
remainingContextTokens,
|
||||
@@ -534,18 +533,17 @@ class BaseClient {
|
||||
/** @type {Record<string, number> | undefined} */
|
||||
let tokenCountMap;
|
||||
if (buildTokenMap) {
|
||||
const currentPayload = shouldSummarize ? orderedWithInstructions : context;
|
||||
tokenCountMap = currentPayload.reduce((map, message, index) => {
|
||||
tokenCountMap = orderedWithInstructions.reduce((map, message, index) => {
|
||||
const { messageId } = message;
|
||||
if (!messageId) {
|
||||
return map;
|
||||
}
|
||||
|
||||
if (shouldSummarize && index === messagesToRefine.length - 1 && !usePrevSummary) {
|
||||
if (shouldSummarize && index === summaryIndex && !usePrevSummary) {
|
||||
map.summaryMessage = { ...summaryMessage, messageId, tokenCount: summaryTokenCount };
|
||||
}
|
||||
|
||||
map[messageId] = currentPayload[index].tokenCount;
|
||||
map[messageId] = orderedWithInstructions[index].tokenCount;
|
||||
return map;
|
||||
}, {});
|
||||
}
|
||||
@@ -865,39 +863,16 @@ class BaseClient {
|
||||
return { message: savedMessage };
|
||||
}
|
||||
|
||||
const fieldsToKeep = {
|
||||
conversationId: message.conversationId,
|
||||
endpoint: this.options.endpoint,
|
||||
endpointType: this.options.endpointType,
|
||||
...endpointOptions,
|
||||
};
|
||||
|
||||
const existingConvo =
|
||||
this.fetchedConvo === true
|
||||
? null
|
||||
: await getConvo(this.options.req?.user?.id, message.conversationId);
|
||||
|
||||
const unsetFields = {};
|
||||
if (existingConvo != null) {
|
||||
this.fetchedConvo = true;
|
||||
for (const key in existingConvo) {
|
||||
if (!key) {
|
||||
continue;
|
||||
}
|
||||
if (excludedKeys.has(key)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (endpointOptions?.[key] === undefined) {
|
||||
unsetFields[key] = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const conversation = await saveConvo(this.options.req, fieldsToKeep, {
|
||||
context: 'api/app/clients/BaseClient.js - saveMessageToDatabase #saveConvo',
|
||||
unsetFields,
|
||||
});
|
||||
const conversation = await saveConvo(
|
||||
this.options.req,
|
||||
{
|
||||
conversationId: message.conversationId,
|
||||
endpoint: this.options.endpoint,
|
||||
endpointType: this.options.endpointType,
|
||||
...endpointOptions,
|
||||
},
|
||||
{ context: 'api/app/clients/BaseClient.js - saveMessageToDatabase #saveConvo' },
|
||||
);
|
||||
|
||||
return { message: savedMessage, conversation };
|
||||
}
|
||||
@@ -1018,17 +993,11 @@ class BaseClient {
|
||||
const processValue = (value) => {
|
||||
if (Array.isArray(value)) {
|
||||
for (let item of value) {
|
||||
if (
|
||||
!item ||
|
||||
!item.type ||
|
||||
item.type === ContentTypes.THINK ||
|
||||
item.type === ContentTypes.ERROR ||
|
||||
item.type === ContentTypes.IMAGE_URL
|
||||
) {
|
||||
if (!item || !item.type || item.type === 'image_url') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (item.type === ContentTypes.TOOL_CALL && item.tool_call != null) {
|
||||
if (item.type === 'tool_call' && item.tool_call != null) {
|
||||
const toolName = item.tool_call?.name || '';
|
||||
if (toolName != null && toolName && typeof toolName === 'string') {
|
||||
numTokens += this.getTokenCount(toolName);
|
||||
@@ -1124,13 +1093,9 @@ class BaseClient {
|
||||
return message;
|
||||
}
|
||||
|
||||
const files = await getFiles(
|
||||
{
|
||||
file_id: { $in: fileIds },
|
||||
},
|
||||
{},
|
||||
{},
|
||||
);
|
||||
const files = await getFiles({
|
||||
file_id: { $in: fileIds },
|
||||
});
|
||||
|
||||
await this.addImageURLs(message, files, this.visionMode);
|
||||
|
||||
|
||||
@@ -51,7 +51,7 @@ class GoogleClient extends BaseClient {
|
||||
|
||||
const serviceKey = creds[AuthKeys.GOOGLE_SERVICE_KEY] ?? {};
|
||||
this.serviceKey =
|
||||
serviceKey && typeof serviceKey === 'string' ? JSON.parse(serviceKey) : (serviceKey ?? {});
|
||||
serviceKey && typeof serviceKey === 'string' ? JSON.parse(serviceKey) : serviceKey ?? {};
|
||||
/** @type {string | null | undefined} */
|
||||
this.project_id = this.serviceKey.project_id;
|
||||
this.client_email = this.serviceKey.client_email;
|
||||
@@ -73,8 +73,6 @@ class GoogleClient extends BaseClient {
|
||||
* @type {string} */
|
||||
this.outputTokensKey = 'output_tokens';
|
||||
this.visionMode = VisionModes.generative;
|
||||
/** @type {string} */
|
||||
this.systemMessage;
|
||||
if (options.skipSetOptions) {
|
||||
return;
|
||||
}
|
||||
@@ -186,7 +184,7 @@ class GoogleClient extends BaseClient {
|
||||
if (typeof this.options.artifactsPrompt === 'string' && this.options.artifactsPrompt) {
|
||||
promptPrefix = `${promptPrefix ?? ''}\n${this.options.artifactsPrompt}`.trim();
|
||||
}
|
||||
this.systemMessage = promptPrefix;
|
||||
this.options.promptPrefix = promptPrefix;
|
||||
this.initializeClient();
|
||||
return this;
|
||||
}
|
||||
@@ -316,7 +314,7 @@ class GoogleClient extends BaseClient {
|
||||
}
|
||||
|
||||
this.augmentedPrompt = await this.contextHandlers.createContext();
|
||||
this.systemMessage = this.augmentedPrompt + this.systemMessage;
|
||||
this.options.promptPrefix = this.augmentedPrompt + this.options.promptPrefix;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -363,8 +361,8 @@ class GoogleClient extends BaseClient {
|
||||
throw new Error('[GoogleClient] PaLM 2 and Codey models are no longer supported.');
|
||||
}
|
||||
|
||||
if (this.systemMessage) {
|
||||
const instructionsTokenCount = this.getTokenCount(this.systemMessage);
|
||||
if (this.options.promptPrefix) {
|
||||
const instructionsTokenCount = this.getTokenCount(this.options.promptPrefix);
|
||||
|
||||
this.maxContextTokens = this.maxContextTokens - instructionsTokenCount;
|
||||
if (this.maxContextTokens < 0) {
|
||||
@@ -419,8 +417,8 @@ class GoogleClient extends BaseClient {
|
||||
],
|
||||
};
|
||||
|
||||
if (this.systemMessage) {
|
||||
payload.instances[0].context = this.systemMessage;
|
||||
if (this.options.promptPrefix) {
|
||||
payload.instances[0].context = this.options.promptPrefix;
|
||||
}
|
||||
|
||||
logger.debug('[GoogleClient] buildMessages', payload);
|
||||
@@ -466,7 +464,7 @@ class GoogleClient extends BaseClient {
|
||||
identityPrefix = `${identityPrefix}\nYou are ${this.options.modelLabel}`;
|
||||
}
|
||||
|
||||
let promptPrefix = (this.systemMessage ?? '').trim();
|
||||
let promptPrefix = (this.options.promptPrefix ?? '').trim();
|
||||
|
||||
if (identityPrefix) {
|
||||
promptPrefix = `${identityPrefix}${promptPrefix}`;
|
||||
@@ -641,7 +639,7 @@ class GoogleClient extends BaseClient {
|
||||
let error;
|
||||
try {
|
||||
if (!EXCLUDED_GENAI_MODELS.test(modelName) && !this.project_id) {
|
||||
/** @type {GenerativeModel} */
|
||||
/** @type {GenAI} */
|
||||
const client = this.client;
|
||||
/** @type {GenerateContentRequest} */
|
||||
const requestOptions = {
|
||||
@@ -650,7 +648,7 @@ class GoogleClient extends BaseClient {
|
||||
generationConfig: googleGenConfigSchema.parse(this.modelOptions),
|
||||
};
|
||||
|
||||
const promptPrefix = (this.systemMessage ?? '').trim();
|
||||
const promptPrefix = (this.options.promptPrefix ?? '').trim();
|
||||
if (promptPrefix.length) {
|
||||
requestOptions.systemInstruction = {
|
||||
parts: [
|
||||
@@ -665,17 +663,7 @@ class GoogleClient extends BaseClient {
|
||||
/** @type {GenAIUsageMetadata} */
|
||||
let usageMetadata;
|
||||
|
||||
abortController.signal.addEventListener(
|
||||
'abort',
|
||||
() => {
|
||||
logger.warn('[GoogleClient] Request was aborted', abortController.signal.reason);
|
||||
},
|
||||
{ once: true },
|
||||
);
|
||||
|
||||
const result = await client.generateContentStream(requestOptions, {
|
||||
signal: abortController.signal,
|
||||
});
|
||||
const result = await client.generateContentStream(requestOptions);
|
||||
for await (const chunk of result.stream) {
|
||||
usageMetadata = !usageMetadata
|
||||
? chunk?.usageMetadata
|
||||
@@ -827,8 +815,7 @@ class GoogleClient extends BaseClient {
|
||||
let reply = '';
|
||||
const { abortController } = options;
|
||||
|
||||
const model =
|
||||
this.options.titleModel ?? this.modelOptions.modelName ?? this.modelOptions.model ?? '';
|
||||
const model = this.modelOptions.modelName ?? this.modelOptions.model ?? '';
|
||||
const safetySettings = getSafetySettings(model);
|
||||
if (!EXCLUDED_GENAI_MODELS.test(model) && !this.project_id) {
|
||||
logger.debug('Identified titling model as GenAI version');
|
||||
|
||||
@@ -2,7 +2,7 @@ const { z } = require('zod');
|
||||
const axios = require('axios');
|
||||
const { Ollama } = require('ollama');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { deriveBaseURL, logAxiosError } = require('~/utils');
|
||||
const { deriveBaseURL } = require('~/utils');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -68,7 +68,7 @@ class OllamaClient {
|
||||
} catch (error) {
|
||||
const logMessage =
|
||||
'Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn\'t start with `ollama` (case-insensitive).';
|
||||
logAxiosError({ message: logMessage, error });
|
||||
logger.error(logMessage, error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ const {
|
||||
ImageDetail,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
KnownEndpoints,
|
||||
openAISettings,
|
||||
ImageDetailCost,
|
||||
CohereConstants,
|
||||
@@ -109,14 +108,18 @@ class OpenAIClient extends BaseClient {
|
||||
const omniPattern = /\b(o1|o3)\b/i;
|
||||
this.isOmni = omniPattern.test(this.modelOptions.model);
|
||||
|
||||
const { OPENAI_FORCE_PROMPT } = process.env ?? {};
|
||||
const { OPENROUTER_API_KEY, OPENAI_FORCE_PROMPT } = process.env ?? {};
|
||||
if (OPENROUTER_API_KEY && !this.azure) {
|
||||
this.apiKey = OPENROUTER_API_KEY;
|
||||
this.useOpenRouter = true;
|
||||
}
|
||||
|
||||
const { reverseProxyUrl: reverseProxy } = this.options;
|
||||
|
||||
if (
|
||||
!this.useOpenRouter &&
|
||||
((reverseProxy && reverseProxy.includes(KnownEndpoints.openrouter)) ||
|
||||
(this.options.endpoint &&
|
||||
this.options.endpoint.toLowerCase().includes(KnownEndpoints.openrouter)))
|
||||
reverseProxy &&
|
||||
reverseProxy.includes('https://openrouter.ai/api/v1')
|
||||
) {
|
||||
this.useOpenRouter = true;
|
||||
}
|
||||
@@ -303,9 +306,7 @@ class OpenAIClient extends BaseClient {
|
||||
}
|
||||
|
||||
getEncoding() {
|
||||
return this.modelOptions?.model && /gpt-4[^-\s]/.test(this.modelOptions.model)
|
||||
? 'o200k_base'
|
||||
: 'cl100k_base';
|
||||
return this.model?.includes('gpt-4o') ? 'o200k_base' : 'cl100k_base';
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -505,8 +506,9 @@ class OpenAIClient extends BaseClient {
|
||||
if (promptPrefix && this.isOmni === true) {
|
||||
const lastUserMessageIndex = payload.findLastIndex((message) => message.role === 'user');
|
||||
if (lastUserMessageIndex !== -1) {
|
||||
payload[lastUserMessageIndex].content =
|
||||
`${promptPrefix}\n${payload[lastUserMessageIndex].content}`;
|
||||
payload[
|
||||
lastUserMessageIndex
|
||||
].content = `${promptPrefix}\n${payload[lastUserMessageIndex].content}`;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -612,7 +614,7 @@ class OpenAIClient extends BaseClient {
|
||||
}
|
||||
|
||||
initializeLLM({
|
||||
model = openAISettings.model.default,
|
||||
model = 'gpt-4o-mini',
|
||||
modelName,
|
||||
temperature = 0.2,
|
||||
max_tokens,
|
||||
@@ -713,7 +715,7 @@ class OpenAIClient extends BaseClient {
|
||||
|
||||
const { OPENAI_TITLE_MODEL } = process.env ?? {};
|
||||
|
||||
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? openAISettings.model.default;
|
||||
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-4o-mini';
|
||||
if (model === Constants.CURRENT_MODEL) {
|
||||
model = this.modelOptions.model;
|
||||
}
|
||||
@@ -906,7 +908,7 @@ ${convo}
|
||||
let prompt;
|
||||
|
||||
// TODO: remove the gpt fallback and make it specific to endpoint
|
||||
const { OPENAI_SUMMARY_MODEL = openAISettings.model.default } = process.env ?? {};
|
||||
const { OPENAI_SUMMARY_MODEL = 'gpt-4o-mini' } = process.env ?? {};
|
||||
let model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL;
|
||||
if (model === Constants.CURRENT_MODEL) {
|
||||
model = this.modelOptions.model;
|
||||
@@ -1065,36 +1067,14 @@ ${convo}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string[]} [intermediateReply]
|
||||
* @returns {string}
|
||||
*/
|
||||
getStreamText(intermediateReply) {
|
||||
getStreamText() {
|
||||
if (!this.streamHandler) {
|
||||
return intermediateReply?.join('') ?? '';
|
||||
}
|
||||
|
||||
let thinkMatch;
|
||||
let remainingText;
|
||||
let reasoningText = '';
|
||||
|
||||
if (this.streamHandler.reasoningTokens.length > 0) {
|
||||
reasoningText = this.streamHandler.reasoningTokens.join('');
|
||||
thinkMatch = reasoningText.match(/<think>([\s\S]*?)<\/think>/)?.[1]?.trim();
|
||||
if (thinkMatch != null && thinkMatch) {
|
||||
const reasoningTokens = `:::thinking\n${thinkMatch}\n:::\n`;
|
||||
remainingText = reasoningText.split(/<\/think>/)?.[1]?.trim() || '';
|
||||
return `${reasoningTokens}${remainingText}${this.streamHandler.tokens.join('')}`;
|
||||
} else if (thinkMatch === '') {
|
||||
remainingText = reasoningText.split(/<\/think>/)?.[1]?.trim() || '';
|
||||
return `${remainingText}${this.streamHandler.tokens.join('')}`;
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
const reasoningTokens =
|
||||
reasoningText.length > 0
|
||||
? `:::thinking\n${reasoningText.replace('<think>', '').replace('</think>', '').trim()}\n:::\n`
|
||||
this.streamHandler.reasoningTokens.length > 0
|
||||
? `:::thinking\n${this.streamHandler.reasoningTokens.join('')}\n:::\n`
|
||||
: '';
|
||||
|
||||
return `${reasoningTokens}${this.streamHandler.tokens.join('')}`;
|
||||
@@ -1272,29 +1252,6 @@ ${convo}
|
||||
});
|
||||
}
|
||||
|
||||
/** Note: OpenAI Web Search models do not support any known parameters besdies `max_tokens` */
|
||||
if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) {
|
||||
const searchExcludeParams = [
|
||||
'frequency_penalty',
|
||||
'presence_penalty',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'stop',
|
||||
'logit_bias',
|
||||
'seed',
|
||||
'response_format',
|
||||
'n',
|
||||
'logprobs',
|
||||
'user',
|
||||
];
|
||||
|
||||
this.options.dropParams = this.options.dropParams || [];
|
||||
this.options.dropParams = [
|
||||
...new Set([...this.options.dropParams, ...searchExcludeParams]),
|
||||
];
|
||||
}
|
||||
|
||||
if (this.options.dropParams && Array.isArray(this.options.dropParams)) {
|
||||
this.options.dropParams.forEach((param) => {
|
||||
delete modelOptions[param];
|
||||
@@ -1330,12 +1287,8 @@ ${convo}
|
||||
) {
|
||||
delete modelOptions.stream;
|
||||
delete modelOptions.stop;
|
||||
} else if (
|
||||
(!this.isOmni || /^o1-(mini|preview)/i.test(modelOptions.model)) &&
|
||||
modelOptions.reasoning_effort != null
|
||||
) {
|
||||
} else if (!this.isOmni && modelOptions.reasoning_effort != null) {
|
||||
delete modelOptions.reasoning_effort;
|
||||
delete modelOptions.temperature;
|
||||
}
|
||||
|
||||
let reasoningKey = 'reasoning_content';
|
||||
@@ -1343,12 +1296,6 @@ ${convo}
|
||||
modelOptions.include_reasoning = true;
|
||||
reasoningKey = 'reasoning';
|
||||
}
|
||||
if (this.useOpenRouter && modelOptions.reasoning_effort != null) {
|
||||
modelOptions.reasoning = {
|
||||
effort: modelOptions.reasoning_effort,
|
||||
};
|
||||
delete modelOptions.reasoning_effort;
|
||||
}
|
||||
|
||||
this.streamHandler = new SplitStreamHandler({
|
||||
reasoningKey,
|
||||
@@ -1367,19 +1314,11 @@ ${convo}
|
||||
streamPromise = new Promise((resolve) => {
|
||||
streamResolve = resolve;
|
||||
});
|
||||
/** @type {OpenAI.OpenAI.CompletionCreateParamsStreaming} */
|
||||
const params = {
|
||||
...modelOptions,
|
||||
stream: true,
|
||||
};
|
||||
if (
|
||||
this.options.endpoint === EModelEndpoint.openAI ||
|
||||
this.options.endpoint === EModelEndpoint.azureOpenAI
|
||||
) {
|
||||
params.stream_options = { include_usage: true };
|
||||
}
|
||||
const stream = await openai.beta.chat.completions
|
||||
.stream(params)
|
||||
.stream({
|
||||
...modelOptions,
|
||||
stream: true,
|
||||
})
|
||||
.on('abort', () => {
|
||||
/* Do nothing here */
|
||||
})
|
||||
@@ -1510,7 +1449,7 @@ ${convo}
|
||||
this.options.context !== 'title' &&
|
||||
message.content.startsWith('<think>')
|
||||
) {
|
||||
return this.getStreamText();
|
||||
return message.content.replace('<think>', ':::thinking').replace('</think>', ':::');
|
||||
}
|
||||
|
||||
return message.content;
|
||||
@@ -1519,7 +1458,7 @@ ${convo}
|
||||
err?.message?.includes('abort') ||
|
||||
(err instanceof OpenAI.APIError && err?.message?.includes('abort'))
|
||||
) {
|
||||
return this.getStreamText(intermediateReply);
|
||||
return intermediateReply.join('');
|
||||
}
|
||||
if (
|
||||
err?.message?.includes(
|
||||
@@ -1534,18 +1473,14 @@ ${convo}
|
||||
(err instanceof OpenAI.OpenAIError && err?.message?.includes('missing finish_reason'))
|
||||
) {
|
||||
logger.error('[OpenAIClient] Known OpenAI error:', err);
|
||||
if (this.streamHandler && this.streamHandler.reasoningTokens.length) {
|
||||
return this.getStreamText();
|
||||
} else if (intermediateReply.length > 0) {
|
||||
return this.getStreamText(intermediateReply);
|
||||
if (intermediateReply.length > 0) {
|
||||
return intermediateReply.join('');
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
} else if (err instanceof OpenAI.APIError) {
|
||||
if (this.streamHandler && this.streamHandler.reasoningTokens.length) {
|
||||
return this.getStreamText();
|
||||
} else if (intermediateReply.length > 0) {
|
||||
return this.getStreamText(intermediateReply);
|
||||
if (intermediateReply.length > 0) {
|
||||
return intermediateReply.join('');
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* Anthropic API: Adds cache control to the appropriate user messages in the payload.
|
||||
* @param {Array<AnthropicMessage | BaseMessage>} messages - The array of message objects.
|
||||
* @returns {Array<AnthropicMessage | BaseMessage>} - The updated array of message objects with cache control added.
|
||||
* @param {Array<AnthropicMessage>} messages - The array of message objects.
|
||||
* @returns {Array<AnthropicMessage>} - The updated array of message objects with cache control added.
|
||||
*/
|
||||
function addCacheControl(messages) {
|
||||
if (!Array.isArray(messages) || messages.length < 2) {
|
||||
@@ -13,9 +13,7 @@ function addCacheControl(messages) {
|
||||
|
||||
for (let i = updatedMessages.length - 1; i >= 0 && userMessagesModified < 2; i--) {
|
||||
const message = updatedMessages[i];
|
||||
if (message.getType != null && message.getType() !== 'human') {
|
||||
continue;
|
||||
} else if (message.getType == null && message.role !== 'user') {
|
||||
if (message.role !== 'user') {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -282,80 +282,4 @@ describe('formatAgentMessages', () => {
|
||||
// Additional check to ensure the consecutive assistant messages were combined
|
||||
expect(result[1].content).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should skip THINK type content parts', () => {
|
||||
const payload = [
|
||||
{
|
||||
role: 'assistant',
|
||||
content: [
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Initial response' },
|
||||
{ type: ContentTypes.THINK, [ContentTypes.THINK]: 'Reasoning about the problem...' },
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final answer' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const result = formatAgentMessages(payload);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toBeInstanceOf(AIMessage);
|
||||
expect(result[0].content).toEqual('Initial response\nFinal answer');
|
||||
});
|
||||
|
||||
it('should join TEXT content as string when THINK content type is present', () => {
|
||||
const payload = [
|
||||
{
|
||||
role: 'assistant',
|
||||
content: [
|
||||
{ type: ContentTypes.THINK, [ContentTypes.THINK]: 'Analyzing the problem...' },
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'First part of response' },
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Second part of response' },
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final part of response' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const result = formatAgentMessages(payload);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toBeInstanceOf(AIMessage);
|
||||
expect(typeof result[0].content).toBe('string');
|
||||
expect(result[0].content).toBe(
|
||||
'First part of response\nSecond part of response\nFinal part of response',
|
||||
);
|
||||
expect(result[0].content).not.toContain('Analyzing the problem...');
|
||||
});
|
||||
|
||||
it('should exclude ERROR type content parts', () => {
|
||||
const payload = [
|
||||
{
|
||||
role: 'assistant',
|
||||
content: [
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello there' },
|
||||
{
|
||||
type: ContentTypes.ERROR,
|
||||
[ContentTypes.ERROR]:
|
||||
'An error occurred while processing the request: Something went wrong',
|
||||
},
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final answer' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const result = formatAgentMessages(payload);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toBeInstanceOf(AIMessage);
|
||||
expect(result[0].content).toEqual([
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello there' },
|
||||
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Final answer' },
|
||||
]);
|
||||
|
||||
// Make sure no error content exists in the result
|
||||
const hasErrorContent = result[0].content.some(
|
||||
(item) =>
|
||||
item.type === ContentTypes.ERROR || JSON.stringify(item).includes('An error occurred'),
|
||||
);
|
||||
expect(hasErrorContent).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -153,7 +153,6 @@ const formatAgentMessages = (payload) => {
|
||||
let currentContent = [];
|
||||
let lastAIMessage = null;
|
||||
|
||||
let hasReasoning = false;
|
||||
for (const part of message.content) {
|
||||
if (part.type === ContentTypes.TEXT && part.tool_call_ids) {
|
||||
/*
|
||||
@@ -208,27 +207,11 @@ const formatAgentMessages = (payload) => {
|
||||
content: output || '',
|
||||
}),
|
||||
);
|
||||
} else if (part.type === ContentTypes.THINK) {
|
||||
hasReasoning = true;
|
||||
continue;
|
||||
} else if (part.type === ContentTypes.ERROR || part.type === ContentTypes.AGENT_UPDATE) {
|
||||
continue;
|
||||
} else {
|
||||
currentContent.push(part);
|
||||
}
|
||||
}
|
||||
|
||||
if (hasReasoning) {
|
||||
currentContent = currentContent
|
||||
.reduce((acc, curr) => {
|
||||
if (curr.type === ContentTypes.TEXT) {
|
||||
return `${acc}${curr[ContentTypes.TEXT]}\n`;
|
||||
}
|
||||
return acc;
|
||||
}, '')
|
||||
.trim();
|
||||
}
|
||||
|
||||
if (currentContent.length > 0) {
|
||||
messages.push(new AIMessage({ content: currentContent }));
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
const { SplitStreamHandler } = require('@librechat/agents');
|
||||
const { anthropicSettings } = require('librechat-data-provider');
|
||||
const AnthropicClient = require('~/app/clients/AnthropicClient');
|
||||
|
||||
@@ -406,327 +405,4 @@ describe('AnthropicClient', () => {
|
||||
expect(Number.isNaN(result)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('maxOutputTokens handling for different models', () => {
|
||||
it('should not cap maxOutputTokens for Claude 3.5 Sonnet models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 10;
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-5-sonnet',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
|
||||
// Test with decimal notation
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.5-sonnet',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should not cap maxOutputTokens for Claude 3.7 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 2;
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
|
||||
// Test with decimal notation
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.7-sonnet',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should cap maxOutputTokens for Claude 3.5 Haiku models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 2;
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-5-haiku',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||
anthropicSettings.legacy.maxOutputTokens.default,
|
||||
);
|
||||
|
||||
// Test with decimal notation
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.5-haiku',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||
anthropicSettings.legacy.maxOutputTokens.default,
|
||||
);
|
||||
});
|
||||
|
||||
it('should cap maxOutputTokens for Claude 3 Haiku and Opus models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 2;
|
||||
|
||||
// Test haiku
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-haiku',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||
anthropicSettings.legacy.maxOutputTokens.default,
|
||||
);
|
||||
|
||||
// Test opus
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-opus',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||
anthropicSettings.legacy.maxOutputTokens.default,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('topK/topP parameters for different models', () => {
|
||||
beforeEach(() => {
|
||||
// Mock the SplitStreamHandler
|
||||
jest.spyOn(SplitStreamHandler.prototype, 'handle').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should include top_k and top_p parameters for non-claude-3.7 models', async () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
|
||||
// Create a mock async generator function
|
||||
async function* mockAsyncGenerator() {
|
||||
yield { type: 'message_start', message: { usage: {} } };
|
||||
yield { delta: { text: 'Test response' } };
|
||||
yield { type: 'message_delta', usage: {} };
|
||||
}
|
||||
|
||||
// Mock createResponse to return the async generator
|
||||
jest.spyOn(client, 'createResponse').mockImplementation(() => {
|
||||
return mockAsyncGenerator();
|
||||
});
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-opus',
|
||||
temperature: 0.7,
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
},
|
||||
});
|
||||
|
||||
// Mock getClient to capture the request options
|
||||
let capturedOptions = null;
|
||||
jest.spyOn(client, 'getClient').mockImplementation((options) => {
|
||||
capturedOptions = options;
|
||||
return {};
|
||||
});
|
||||
|
||||
const payload = [{ role: 'user', content: 'Test message' }];
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
// Check the options passed to getClient
|
||||
expect(capturedOptions).toHaveProperty('top_k', 10);
|
||||
expect(capturedOptions).toHaveProperty('top_p', 0.9);
|
||||
});
|
||||
|
||||
it('should include top_k and top_p parameters for claude-3-5-sonnet models', async () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
|
||||
// Create a mock async generator function
|
||||
async function* mockAsyncGenerator() {
|
||||
yield { type: 'message_start', message: { usage: {} } };
|
||||
yield { delta: { text: 'Test response' } };
|
||||
yield { type: 'message_delta', usage: {} };
|
||||
}
|
||||
|
||||
// Mock createResponse to return the async generator
|
||||
jest.spyOn(client, 'createResponse').mockImplementation(() => {
|
||||
return mockAsyncGenerator();
|
||||
});
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-5-sonnet',
|
||||
temperature: 0.7,
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
},
|
||||
});
|
||||
|
||||
// Mock getClient to capture the request options
|
||||
let capturedOptions = null;
|
||||
jest.spyOn(client, 'getClient').mockImplementation((options) => {
|
||||
capturedOptions = options;
|
||||
return {};
|
||||
});
|
||||
|
||||
const payload = [{ role: 'user', content: 'Test message' }];
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
// Check the options passed to getClient
|
||||
expect(capturedOptions).toHaveProperty('top_k', 10);
|
||||
expect(capturedOptions).toHaveProperty('top_p', 0.9);
|
||||
});
|
||||
|
||||
it('should not include top_k and top_p parameters for claude-3-7-sonnet models', async () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
|
||||
// Create a mock async generator function
|
||||
async function* mockAsyncGenerator() {
|
||||
yield { type: 'message_start', message: { usage: {} } };
|
||||
yield { delta: { text: 'Test response' } };
|
||||
yield { type: 'message_delta', usage: {} };
|
||||
}
|
||||
|
||||
// Mock createResponse to return the async generator
|
||||
jest.spyOn(client, 'createResponse').mockImplementation(() => {
|
||||
return mockAsyncGenerator();
|
||||
});
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
temperature: 0.7,
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
},
|
||||
});
|
||||
|
||||
// Mock getClient to capture the request options
|
||||
let capturedOptions = null;
|
||||
jest.spyOn(client, 'getClient').mockImplementation((options) => {
|
||||
capturedOptions = options;
|
||||
return {};
|
||||
});
|
||||
|
||||
const payload = [{ role: 'user', content: 'Test message' }];
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
// Check the options passed to getClient
|
||||
expect(capturedOptions).not.toHaveProperty('top_k');
|
||||
expect(capturedOptions).not.toHaveProperty('top_p');
|
||||
});
|
||||
|
||||
it('should not include top_k and top_p parameters for models with decimal notation (claude-3.7)', async () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
|
||||
// Create a mock async generator function
|
||||
async function* mockAsyncGenerator() {
|
||||
yield { type: 'message_start', message: { usage: {} } };
|
||||
yield { delta: { text: 'Test response' } };
|
||||
yield { type: 'message_delta', usage: {} };
|
||||
}
|
||||
|
||||
// Mock createResponse to return the async generator
|
||||
jest.spyOn(client, 'createResponse').mockImplementation(() => {
|
||||
return mockAsyncGenerator();
|
||||
});
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.7-sonnet',
|
||||
temperature: 0.7,
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
},
|
||||
});
|
||||
|
||||
// Mock getClient to capture the request options
|
||||
let capturedOptions = null;
|
||||
jest.spyOn(client, 'getClient').mockImplementation((options) => {
|
||||
capturedOptions = options;
|
||||
return {};
|
||||
});
|
||||
|
||||
const payload = [{ role: 'user', content: 'Test message' }];
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
// Check the options passed to getClient
|
||||
expect(capturedOptions).not.toHaveProperty('top_k');
|
||||
expect(capturedOptions).not.toHaveProperty('top_p');
|
||||
});
|
||||
});
|
||||
|
||||
it('should include top_k and top_p parameters for Claude-3.7 models when thinking is explicitly disabled', async () => {
|
||||
const client = new AnthropicClient('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
temperature: 0.7,
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
},
|
||||
thinking: false,
|
||||
});
|
||||
|
||||
async function* mockAsyncGenerator() {
|
||||
yield { type: 'message_start', message: { usage: {} } };
|
||||
yield { delta: { text: 'Test response' } };
|
||||
yield { type: 'message_delta', usage: {} };
|
||||
}
|
||||
|
||||
jest.spyOn(client, 'createResponse').mockImplementation(() => {
|
||||
return mockAsyncGenerator();
|
||||
});
|
||||
|
||||
let capturedOptions = null;
|
||||
jest.spyOn(client, 'getClient').mockImplementation((options) => {
|
||||
capturedOptions = options;
|
||||
return {};
|
||||
});
|
||||
|
||||
const payload = [{ role: 'user', content: 'Test message' }];
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('topK', 10);
|
||||
expect(capturedOptions).toHaveProperty('topP', 0.9);
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.7-sonnet',
|
||||
temperature: 0.7,
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
},
|
||||
thinking: false,
|
||||
});
|
||||
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('topK', 10);
|
||||
expect(capturedOptions).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -30,8 +30,6 @@ jest.mock('~/models', () => ({
|
||||
updateFileUsage: jest.fn(),
|
||||
}));
|
||||
|
||||
const { getConvo, saveConvo } = require('~/models');
|
||||
|
||||
jest.mock('@langchain/openai', () => {
|
||||
return {
|
||||
ChatOpenAI: jest.fn().mockImplementation(() => {
|
||||
@@ -164,7 +162,7 @@ describe('BaseClient', () => {
|
||||
const result = await TestClient.getMessagesWithinTokenLimit({ messages });
|
||||
|
||||
expect(result.context).toEqual(expectedContext);
|
||||
expect(result.messagesToRefine.length - 1).toEqual(expectedIndex);
|
||||
expect(result.summaryIndex).toEqual(expectedIndex);
|
||||
expect(result.remainingContextTokens).toBe(expectedRemainingContextTokens);
|
||||
expect(result.messagesToRefine).toEqual(expectedMessagesToRefine);
|
||||
});
|
||||
@@ -200,7 +198,7 @@ describe('BaseClient', () => {
|
||||
const result = await TestClient.getMessagesWithinTokenLimit({ messages });
|
||||
|
||||
expect(result.context).toEqual(expectedContext);
|
||||
expect(result.messagesToRefine.length - 1).toEqual(expectedIndex);
|
||||
expect(result.summaryIndex).toEqual(expectedIndex);
|
||||
expect(result.remainingContextTokens).toBe(expectedRemainingContextTokens);
|
||||
expect(result.messagesToRefine).toEqual(expectedMessagesToRefine);
|
||||
});
|
||||
@@ -542,11 +540,10 @@ describe('BaseClient', () => {
|
||||
|
||||
test('saveMessageToDatabase is called with the correct arguments', async () => {
|
||||
const saveOptions = TestClient.getSaveOptions();
|
||||
const user = {};
|
||||
const user = {}; // Mock user
|
||||
const opts = { user };
|
||||
const saveSpy = jest.spyOn(TestClient, 'saveMessageToDatabase');
|
||||
await TestClient.sendMessage('Hello, world!', opts);
|
||||
expect(saveSpy).toHaveBeenCalledWith(
|
||||
expect(TestClient.saveMessageToDatabase).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
sender: expect.any(String),
|
||||
text: expect.any(String),
|
||||
@@ -560,157 +557,6 @@ describe('BaseClient', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('should handle existing conversation when getConvo retrieves one', async () => {
|
||||
const existingConvo = {
|
||||
conversationId: 'existing-convo-id',
|
||||
endpoint: 'openai',
|
||||
endpointType: 'openai',
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages: [
|
||||
{ role: 'user', content: 'Existing message 1' },
|
||||
{ role: 'assistant', content: 'Existing response 1' },
|
||||
],
|
||||
temperature: 1,
|
||||
};
|
||||
|
||||
const { temperature: _temp, ...newConvo } = existingConvo;
|
||||
|
||||
const user = {
|
||||
id: 'user-id',
|
||||
};
|
||||
|
||||
getConvo.mockResolvedValue(existingConvo);
|
||||
saveConvo.mockResolvedValue(newConvo);
|
||||
|
||||
TestClient = initializeFakeClient(
|
||||
apiKey,
|
||||
{
|
||||
...options,
|
||||
req: {
|
||||
user,
|
||||
},
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const saveSpy = jest.spyOn(TestClient, 'saveMessageToDatabase');
|
||||
|
||||
const newMessage = 'New message in existing conversation';
|
||||
const response = await TestClient.sendMessage(newMessage, {
|
||||
user,
|
||||
conversationId: existingConvo.conversationId,
|
||||
});
|
||||
|
||||
expect(getConvo).toHaveBeenCalledWith(user.id, existingConvo.conversationId);
|
||||
expect(TestClient.conversationId).toBe(existingConvo.conversationId);
|
||||
expect(response.conversationId).toBe(existingConvo.conversationId);
|
||||
expect(TestClient.fetchedConvo).toBe(true);
|
||||
|
||||
expect(saveSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
conversationId: existingConvo.conversationId,
|
||||
text: newMessage,
|
||||
}),
|
||||
expect.any(Object),
|
||||
expect.any(Object),
|
||||
);
|
||||
|
||||
expect(saveConvo).toHaveBeenCalledTimes(2);
|
||||
expect(saveConvo).toHaveBeenCalledWith(
|
||||
expect.any(Object),
|
||||
expect.objectContaining({
|
||||
conversationId: existingConvo.conversationId,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
context: 'api/app/clients/BaseClient.js - saveMessageToDatabase #saveConvo',
|
||||
unsetFields: {
|
||||
temperature: 1,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await TestClient.sendMessage('Another message', {
|
||||
conversationId: existingConvo.conversationId,
|
||||
});
|
||||
expect(getConvo).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should correctly handle existing conversation and unset fields appropriately', async () => {
|
||||
const existingConvo = {
|
||||
conversationId: 'existing-convo-id',
|
||||
endpoint: 'openai',
|
||||
endpointType: 'openai',
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages: [
|
||||
{ role: 'user', content: 'Existing message 1' },
|
||||
{ role: 'assistant', content: 'Existing response 1' },
|
||||
],
|
||||
title: 'Existing Conversation',
|
||||
someExistingField: 'existingValue',
|
||||
anotherExistingField: 'anotherValue',
|
||||
temperature: 0.7,
|
||||
modelLabel: 'GPT-3.5',
|
||||
};
|
||||
|
||||
getConvo.mockResolvedValue(existingConvo);
|
||||
saveConvo.mockResolvedValue(existingConvo);
|
||||
|
||||
TestClient = initializeFakeClient(
|
||||
apiKey,
|
||||
{
|
||||
...options,
|
||||
modelOptions: {
|
||||
model: 'gpt-4',
|
||||
temperature: 0.5,
|
||||
},
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const newMessage = 'New message in existing conversation';
|
||||
await TestClient.sendMessage(newMessage, {
|
||||
conversationId: existingConvo.conversationId,
|
||||
});
|
||||
|
||||
expect(saveConvo).toHaveBeenCalledTimes(2);
|
||||
|
||||
const saveConvoCall = saveConvo.mock.calls[0];
|
||||
const [, savedFields, saveOptions] = saveConvoCall;
|
||||
|
||||
// Instead of checking all excludedKeys, we'll just check specific fields
|
||||
// that we know should be excluded
|
||||
expect(savedFields).not.toHaveProperty('messages');
|
||||
expect(savedFields).not.toHaveProperty('title');
|
||||
|
||||
// Only check that someExistingField is in unsetFields
|
||||
expect(saveOptions.unsetFields).toHaveProperty('someExistingField', 1);
|
||||
|
||||
// Mock saveConvo to return the expected fields
|
||||
saveConvo.mockImplementation((req, fields) => {
|
||||
return Promise.resolve({
|
||||
...fields,
|
||||
endpoint: 'openai',
|
||||
endpointType: 'openai',
|
||||
model: 'gpt-4',
|
||||
temperature: 0.5,
|
||||
});
|
||||
});
|
||||
|
||||
// Only check the conversationId since that's the only field we can be sure about
|
||||
expect(savedFields).toHaveProperty('conversationId', 'existing-convo-id');
|
||||
|
||||
expect(TestClient.fetchedConvo).toBe(true);
|
||||
|
||||
await TestClient.sendMessage('Another message', {
|
||||
conversationId: existingConvo.conversationId,
|
||||
});
|
||||
|
||||
expect(getConvo).toHaveBeenCalledTimes(1);
|
||||
|
||||
const secondSaveConvoCall = saveConvo.mock.calls[1];
|
||||
expect(secondSaveConvoCall[2]).toHaveProperty('unsetFields', {});
|
||||
});
|
||||
|
||||
test('sendCompletion is called with the correct arguments', async () => {
|
||||
const payload = {}; // Mock payload
|
||||
TestClient.buildMessages.mockReturnValue({ prompt: payload, tokenCountMap: null });
|
||||
|
||||
@@ -56,6 +56,7 @@ const initializeFakeClient = (apiKey, options, fakeMessages) => {
|
||||
let TestClient = new FakeClient(apiKey);
|
||||
TestClient.options = options;
|
||||
TestClient.abortController = { abort: jest.fn() };
|
||||
TestClient.saveMessageToDatabase = jest.fn();
|
||||
TestClient.loadHistory = jest
|
||||
.fn()
|
||||
.mockImplementation((conversationId, parentMessageId = null) => {
|
||||
@@ -85,6 +86,7 @@ const initializeFakeClient = (apiKey, options, fakeMessages) => {
|
||||
return 'Mock response text';
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
TestClient.getCompletion = jest.fn().mockImplementation(async (..._args) => {
|
||||
return {
|
||||
choices: [
|
||||
|
||||
@@ -202,6 +202,14 @@ describe('OpenAIClient', () => {
|
||||
expect(client.modelOptions.temperature).toBe(0.7);
|
||||
});
|
||||
|
||||
it('should set apiKey and useOpenRouter if OPENROUTER_API_KEY is present', () => {
|
||||
process.env.OPENROUTER_API_KEY = 'openrouter-key';
|
||||
client.setOptions({});
|
||||
expect(client.apiKey).toBe('openrouter-key');
|
||||
expect(client.useOpenRouter).toBe(true);
|
||||
delete process.env.OPENROUTER_API_KEY; // Cleanup
|
||||
});
|
||||
|
||||
it('should set FORCE_PROMPT based on OPENAI_FORCE_PROMPT or reverseProxyUrl', () => {
|
||||
process.env.OPENAI_FORCE_PROMPT = 'true';
|
||||
client.setOptions({});
|
||||
@@ -526,6 +534,7 @@ describe('OpenAIClient', () => {
|
||||
afterEach(() => {
|
||||
delete process.env.AZURE_OPENAI_DEFAULT_MODEL;
|
||||
delete process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME;
|
||||
delete process.env.OPENROUTER_API_KEY;
|
||||
});
|
||||
|
||||
it('should call getCompletion and fetchEventSource when using a text/instruct model', async () => {
|
||||
|
||||
@@ -2,10 +2,9 @@ const availableTools = require('./manifest.json');
|
||||
|
||||
// Structured Tools
|
||||
const DALLE3 = require('./structured/DALLE3');
|
||||
const FluxAPI = require('./structured/FluxAPI');
|
||||
const OpenWeather = require('./structured/OpenWeather');
|
||||
const StructuredWolfram = require('./structured/Wolfram');
|
||||
const createYouTubeTools = require('./structured/YouTube');
|
||||
const StructuredWolfram = require('./structured/Wolfram');
|
||||
const StructuredACS = require('./structured/AzureAISearch');
|
||||
const StructuredSD = require('./structured/StableDiffusion');
|
||||
const GoogleSearchAPI = require('./structured/GoogleSearch');
|
||||
@@ -31,7 +30,6 @@ module.exports = {
|
||||
manifestToolMap,
|
||||
// Structured Tools
|
||||
DALLE3,
|
||||
FluxAPI,
|
||||
OpenWeather,
|
||||
StructuredSD,
|
||||
StructuredACS,
|
||||
|
||||
@@ -164,19 +164,5 @@
|
||||
"description": "Sign up at <a href=\"https://home.openweathermap.org/users/sign_up\" target=\"_blank\">OpenWeather</a>, then get your key at <a href=\"https://home.openweathermap.org/api_keys\" target=\"_blank\">API keys</a>."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Flux",
|
||||
"pluginKey": "flux",
|
||||
"description": "Generate images using text with the Flux API.",
|
||||
"icon": "https://blackforestlabs.ai/wp-content/uploads/2024/07/bfl_logo_retraced_blk.png",
|
||||
"isAuthRequired": "true",
|
||||
"authConfig": [
|
||||
{
|
||||
"authField": "FLUX_API_KEY",
|
||||
"label": "Your Flux API Key",
|
||||
"description": "Provide your Flux API key from your user profile."
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
const { z } = require('zod');
|
||||
const path = require('path');
|
||||
const OpenAI = require('openai');
|
||||
const fetch = require('node-fetch');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { Tool } = require('@langchain/core/tools');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||
const { FileContext } = require('librechat-data-provider');
|
||||
const { getImageBasename } = require('~/server/services/Files/images');
|
||||
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const displayMessage =
|
||||
'DALL-E displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
||||
class DALLE3 extends Tool {
|
||||
constructor(fields = {}) {
|
||||
super();
|
||||
@@ -117,7 +114,10 @@ class DALLE3 extends Tool {
|
||||
if (this.isAgent === true && typeof value === 'string') {
|
||||
return [value, {}];
|
||||
} else if (this.isAgent === true && typeof value === 'object') {
|
||||
return [displayMessage, value];
|
||||
return [
|
||||
'DALL-E displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.',
|
||||
value,
|
||||
];
|
||||
}
|
||||
|
||||
return value;
|
||||
@@ -160,32 +160,6 @@ Error Message: ${error.message}`);
|
||||
);
|
||||
}
|
||||
|
||||
if (this.isAgent) {
|
||||
let fetchOptions = {};
|
||||
if (process.env.PROXY) {
|
||||
fetchOptions.agent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
const imageResponse = await fetch(theImageUrl, fetchOptions);
|
||||
const arrayBuffer = await imageResponse.arrayBuffer();
|
||||
const base64 = Buffer.from(arrayBuffer).toString('base64');
|
||||
const content = [
|
||||
{
|
||||
type: ContentTypes.IMAGE_URL,
|
||||
image_url: {
|
||||
url: `data:image/png;base64,${base64}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const response = [
|
||||
{
|
||||
type: ContentTypes.TEXT,
|
||||
text: displayMessage,
|
||||
},
|
||||
];
|
||||
return [response, { content }];
|
||||
}
|
||||
|
||||
const imageBasename = getImageBasename(theImageUrl);
|
||||
const imageExt = path.extname(imageBasename);
|
||||
|
||||
|
||||
@@ -1,554 +0,0 @@
|
||||
const { z } = require('zod');
|
||||
const axios = require('axios');
|
||||
const fetch = require('node-fetch');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { Tool } = require('@langchain/core/tools');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const displayMessage =
|
||||
'Flux displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
||||
|
||||
/**
|
||||
* FluxAPI - A tool for generating high-quality images from text prompts using the Flux API.
|
||||
* Each call generates one image. If multiple images are needed, make multiple consecutive calls with the same or varied prompts.
|
||||
*/
|
||||
class FluxAPI extends Tool {
|
||||
// Pricing constants in USD per image
|
||||
static PRICING = {
|
||||
FLUX_PRO_1_1_ULTRA: -0.06, // /v1/flux-pro-1.1-ultra
|
||||
FLUX_PRO_1_1: -0.04, // /v1/flux-pro-1.1
|
||||
FLUX_PRO: -0.05, // /v1/flux-pro
|
||||
FLUX_DEV: -0.025, // /v1/flux-dev
|
||||
FLUX_PRO_FINETUNED: -0.06, // /v1/flux-pro-finetuned
|
||||
FLUX_PRO_1_1_ULTRA_FINETUNED: -0.07, // /v1/flux-pro-1.1-ultra-finetuned
|
||||
};
|
||||
|
||||
constructor(fields = {}) {
|
||||
super();
|
||||
|
||||
/** @type {boolean} Used to initialize the Tool without necessary variables. */
|
||||
this.override = fields.override ?? false;
|
||||
|
||||
this.userId = fields.userId;
|
||||
this.fileStrategy = fields.fileStrategy;
|
||||
|
||||
/** @type {boolean} **/
|
||||
this.isAgent = fields.isAgent;
|
||||
this.returnMetadata = fields.returnMetadata ?? false;
|
||||
|
||||
if (fields.processFileURL) {
|
||||
/** @type {processFileURL} Necessary for output to contain all image metadata. */
|
||||
this.processFileURL = fields.processFileURL.bind(this);
|
||||
}
|
||||
|
||||
this.apiKey = fields.FLUX_API_KEY || this.getApiKey();
|
||||
|
||||
this.name = 'flux';
|
||||
this.description =
|
||||
'Use Flux to generate images from text descriptions. This tool can generate images and list available finetunes. Each generate call creates one image. For multiple images, make multiple consecutive calls.';
|
||||
|
||||
this.description_for_model = `// Transform any image description into a detailed, high-quality prompt. Never submit a prompt under 3 sentences. Follow these core rules:
|
||||
// 1. ALWAYS enhance basic prompts into 5-10 detailed sentences (e.g., "a cat" becomes: "A close-up photo of a sleek Siamese cat with piercing blue eyes. The cat sits elegantly on a vintage leather armchair, its tail curled gracefully around its paws. Warm afternoon sunlight streams through a nearby window, casting gentle shadows across its face and highlighting the subtle variations in its cream and chocolate-point fur. The background is softly blurred, creating a shallow depth of field that draws attention to the cat's expressive features. The overall composition has a peaceful, contemplative mood with a professional photography style.")
|
||||
// 2. Each prompt MUST be 3-6 descriptive sentences minimum, focusing on visual elements: lighting, composition, mood, and style
|
||||
// Use action: 'list_finetunes' to see available custom models. When using finetunes, use endpoint: '/v1/flux-pro-finetuned' (default) or '/v1/flux-pro-1.1-ultra-finetuned' for higher quality and aspect ratio.`;
|
||||
|
||||
// Add base URL from environment variable with fallback
|
||||
this.baseUrl = process.env.FLUX_API_BASE_URL || 'https://api.us1.bfl.ai';
|
||||
|
||||
// Define the schema for structured input
|
||||
this.schema = z.object({
|
||||
action: z
|
||||
.enum(['generate', 'list_finetunes', 'generate_finetuned'])
|
||||
.default('generate')
|
||||
.describe(
|
||||
'Action to perform: "generate" for image generation, "generate_finetuned" for finetuned model generation, "list_finetunes" to get available custom models',
|
||||
),
|
||||
prompt: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Text prompt for image generation. Required when action is "generate". Not used for list_finetunes.',
|
||||
),
|
||||
width: z
|
||||
.number()
|
||||
.optional()
|
||||
.describe(
|
||||
'Width of the generated image in pixels. Must be a multiple of 32. Default is 1024.',
|
||||
),
|
||||
height: z
|
||||
.number()
|
||||
.optional()
|
||||
.describe(
|
||||
'Height of the generated image in pixels. Must be a multiple of 32. Default is 768.',
|
||||
),
|
||||
prompt_upsampling: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.default(false)
|
||||
.describe('Whether to perform upsampling on the prompt.'),
|
||||
steps: z
|
||||
.number()
|
||||
.int()
|
||||
.optional()
|
||||
.describe('Number of steps to run the model for, a number from 1 to 50. Default is 40.'),
|
||||
seed: z.number().optional().describe('Optional seed for reproducibility.'),
|
||||
safety_tolerance: z
|
||||
.number()
|
||||
.optional()
|
||||
.default(6)
|
||||
.describe(
|
||||
'Tolerance level for input and output moderation. Between 0 and 6, 0 being most strict, 6 being least strict.',
|
||||
),
|
||||
endpoint: z
|
||||
.enum([
|
||||
'/v1/flux-pro-1.1',
|
||||
'/v1/flux-pro',
|
||||
'/v1/flux-dev',
|
||||
'/v1/flux-pro-1.1-ultra',
|
||||
'/v1/flux-pro-finetuned',
|
||||
'/v1/flux-pro-1.1-ultra-finetuned',
|
||||
])
|
||||
.optional()
|
||||
.default('/v1/flux-pro-1.1')
|
||||
.describe('Endpoint to use for image generation.'),
|
||||
raw: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.default(false)
|
||||
.describe(
|
||||
'Generate less processed, more natural-looking images. Only works for /v1/flux-pro-1.1-ultra.',
|
||||
),
|
||||
finetune_id: z.string().optional().describe('ID of the finetuned model to use'),
|
||||
finetune_strength: z
|
||||
.number()
|
||||
.optional()
|
||||
.default(1.1)
|
||||
.describe('Strength of the finetuning effect (typically between 0.1 and 1.2)'),
|
||||
guidance: z.number().optional().default(2.5).describe('Guidance scale for finetuned models'),
|
||||
aspect_ratio: z
|
||||
.string()
|
||||
.optional()
|
||||
.default('16:9')
|
||||
.describe('Aspect ratio for ultra models (e.g., "16:9")'),
|
||||
});
|
||||
}
|
||||
|
||||
getAxiosConfig() {
|
||||
const config = {};
|
||||
if (process.env.PROXY) {
|
||||
config.httpsAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
return config;
|
||||
}
|
||||
|
||||
/** @param {Object|string} value */
|
||||
getDetails(value) {
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
return JSON.stringify(value, null, 2);
|
||||
}
|
||||
|
||||
getApiKey() {
|
||||
const apiKey = process.env.FLUX_API_KEY || '';
|
||||
if (!apiKey && !this.override) {
|
||||
throw new Error('Missing FLUX_API_KEY environment variable.');
|
||||
}
|
||||
return apiKey;
|
||||
}
|
||||
|
||||
wrapInMarkdown(imageUrl) {
|
||||
const serverDomain = process.env.DOMAIN_SERVER || 'http://localhost:3080';
|
||||
return ``;
|
||||
}
|
||||
|
||||
returnValue(value) {
|
||||
if (this.isAgent === true && typeof value === 'string') {
|
||||
return [value, {}];
|
||||
} else if (this.isAgent === true && typeof value === 'object') {
|
||||
if (Array.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
return [displayMessage, value];
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
async _call(data) {
|
||||
const { action = 'generate', ...imageData } = data;
|
||||
|
||||
// Use provided API key for this request if available, otherwise use default
|
||||
const requestApiKey = this.apiKey || this.getApiKey();
|
||||
|
||||
// Handle list_finetunes action
|
||||
if (action === 'list_finetunes') {
|
||||
return this.getMyFinetunes(requestApiKey);
|
||||
}
|
||||
|
||||
// Handle finetuned generation
|
||||
if (action === 'generate_finetuned') {
|
||||
return this.generateFinetunedImage(imageData, requestApiKey);
|
||||
}
|
||||
|
||||
// For generate action, ensure prompt is provided
|
||||
if (!imageData.prompt) {
|
||||
throw new Error('Missing required field: prompt');
|
||||
}
|
||||
|
||||
let payload = {
|
||||
prompt: imageData.prompt,
|
||||
prompt_upsampling: imageData.prompt_upsampling || false,
|
||||
safety_tolerance: imageData.safety_tolerance || 6,
|
||||
output_format: imageData.output_format || 'png',
|
||||
};
|
||||
|
||||
// Add optional parameters if provided
|
||||
if (imageData.width) {
|
||||
payload.width = imageData.width;
|
||||
}
|
||||
if (imageData.height) {
|
||||
payload.height = imageData.height;
|
||||
}
|
||||
if (imageData.steps) {
|
||||
payload.steps = imageData.steps;
|
||||
}
|
||||
if (imageData.seed !== undefined) {
|
||||
payload.seed = imageData.seed;
|
||||
}
|
||||
if (imageData.raw) {
|
||||
payload.raw = imageData.raw;
|
||||
}
|
||||
|
||||
const generateUrl = `${this.baseUrl}${imageData.endpoint || '/v1/flux-pro'}`;
|
||||
const resultUrl = `${this.baseUrl}/v1/get_result`;
|
||||
|
||||
logger.debug('[FluxAPI] Generating image with payload:', payload);
|
||||
logger.debug('[FluxAPI] Using endpoint:', generateUrl);
|
||||
|
||||
let taskResponse;
|
||||
try {
|
||||
taskResponse = await axios.post(generateUrl, payload, {
|
||||
headers: {
|
||||
'x-key': requestApiKey,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
},
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.response?.data || error.message);
|
||||
logger.error('[FluxAPI] Error while submitting task:', details);
|
||||
|
||||
return this.returnValue(
|
||||
`Something went wrong when trying to generate the image. The Flux API may be unavailable:
|
||||
Error Message: ${details}`,
|
||||
);
|
||||
}
|
||||
|
||||
const taskId = taskResponse.data.id;
|
||||
|
||||
// Polling for the result
|
||||
let status = 'Pending';
|
||||
let resultData = null;
|
||||
while (status !== 'Ready' && status !== 'Error') {
|
||||
try {
|
||||
// Wait 2 seconds between polls
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
const resultResponse = await axios.get(resultUrl, {
|
||||
headers: {
|
||||
'x-key': requestApiKey,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
params: { id: taskId },
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
status = resultResponse.data.status;
|
||||
|
||||
if (status === 'Ready') {
|
||||
resultData = resultResponse.data.result;
|
||||
break;
|
||||
} else if (status === 'Error') {
|
||||
logger.error('[FluxAPI] Error in task:', resultResponse.data);
|
||||
return this.returnValue('An error occurred during image generation.');
|
||||
}
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.response?.data || error.message);
|
||||
logger.error('[FluxAPI] Error while getting result:', details);
|
||||
return this.returnValue('An error occurred while retrieving the image.');
|
||||
}
|
||||
}
|
||||
|
||||
// If no result data
|
||||
if (!resultData || !resultData.sample) {
|
||||
logger.error('[FluxAPI] No image data received from API. Response:', resultData);
|
||||
return this.returnValue('No image data received from Flux API.');
|
||||
}
|
||||
|
||||
// Try saving the image locally
|
||||
const imageUrl = resultData.sample;
|
||||
const imageName = `img-${uuidv4()}.png`;
|
||||
|
||||
if (this.isAgent) {
|
||||
try {
|
||||
// Fetch the image and convert to base64
|
||||
const fetchOptions = {};
|
||||
if (process.env.PROXY) {
|
||||
fetchOptions.agent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
const imageResponse = await fetch(imageUrl, fetchOptions);
|
||||
const arrayBuffer = await imageResponse.arrayBuffer();
|
||||
const base64 = Buffer.from(arrayBuffer).toString('base64');
|
||||
const content = [
|
||||
{
|
||||
type: ContentTypes.IMAGE_URL,
|
||||
image_url: {
|
||||
url: `data:image/png;base64,${base64}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const response = [
|
||||
{
|
||||
type: ContentTypes.TEXT,
|
||||
text: displayMessage,
|
||||
},
|
||||
];
|
||||
return [response, { content }];
|
||||
} catch (error) {
|
||||
logger.error('Error processing image for agent:', error);
|
||||
return this.returnValue(`Failed to process the image. ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
logger.debug('[FluxAPI] Saving image:', imageUrl);
|
||||
const result = await this.processFileURL({
|
||||
fileStrategy: this.fileStrategy,
|
||||
userId: this.userId,
|
||||
URL: imageUrl,
|
||||
fileName: imageName,
|
||||
basePath: 'images',
|
||||
context: FileContext.image_generation,
|
||||
});
|
||||
|
||||
logger.debug('[FluxAPI] Image saved to path:', result.filepath);
|
||||
|
||||
// Calculate cost based on endpoint
|
||||
/**
|
||||
* TODO: Cost handling
|
||||
const endpoint = imageData.endpoint || '/v1/flux-pro';
|
||||
const endpointKey = Object.entries(FluxAPI.PRICING).find(([key, _]) =>
|
||||
endpoint.includes(key.toLowerCase().replace(/_/g, '-')),
|
||||
)?.[0];
|
||||
const cost = FluxAPI.PRICING[endpointKey] || 0;
|
||||
*/
|
||||
this.result = this.returnMetadata ? result : this.wrapInMarkdown(result.filepath);
|
||||
return this.returnValue(this.result);
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.message ?? 'No additional error details.');
|
||||
logger.error('Error while saving the image:', details);
|
||||
return this.returnValue(`Failed to save the image locally. ${details}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getMyFinetunes(apiKey = null) {
|
||||
const finetunesUrl = `${this.baseUrl}/v1/my_finetunes`;
|
||||
const detailsUrl = `${this.baseUrl}/v1/finetune_details`;
|
||||
|
||||
try {
|
||||
const headers = {
|
||||
'x-key': apiKey || this.getApiKey(),
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
};
|
||||
|
||||
// Get list of finetunes
|
||||
const response = await axios.get(finetunesUrl, {
|
||||
headers,
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
const finetunes = response.data.finetunes;
|
||||
|
||||
// Fetch details for each finetune
|
||||
const finetuneDetails = await Promise.all(
|
||||
finetunes.map(async (finetuneId) => {
|
||||
try {
|
||||
const detailResponse = await axios.get(`${detailsUrl}?finetune_id=${finetuneId}`, {
|
||||
headers,
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
return {
|
||||
id: finetuneId,
|
||||
...detailResponse.data,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`[FluxAPI] Error fetching details for finetune ${finetuneId}:`, error);
|
||||
return {
|
||||
id: finetuneId,
|
||||
error: 'Failed to fetch details',
|
||||
};
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
if (this.isAgent) {
|
||||
const formattedDetails = JSON.stringify(finetuneDetails, null, 2);
|
||||
return [`Here are the available finetunes:\n${formattedDetails}`, null];
|
||||
}
|
||||
return JSON.stringify(finetuneDetails);
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.response?.data || error.message);
|
||||
logger.error('[FluxAPI] Error while getting finetunes:', details);
|
||||
const errorMsg = `Failed to get finetunes: ${details}`;
|
||||
return this.isAgent ? this.returnValue([errorMsg, {}]) : new Error(errorMsg);
|
||||
}
|
||||
}
|
||||
|
||||
async generateFinetunedImage(imageData, requestApiKey) {
|
||||
if (!imageData.prompt) {
|
||||
throw new Error('Missing required field: prompt');
|
||||
}
|
||||
|
||||
if (!imageData.finetune_id) {
|
||||
throw new Error(
|
||||
'Missing required field: finetune_id for finetuned generation. Please supply a finetune_id!',
|
||||
);
|
||||
}
|
||||
|
||||
// Validate endpoint is appropriate for finetuned generation
|
||||
const validFinetunedEndpoints = ['/v1/flux-pro-finetuned', '/v1/flux-pro-1.1-ultra-finetuned'];
|
||||
const endpoint = imageData.endpoint || '/v1/flux-pro-finetuned';
|
||||
|
||||
if (!validFinetunedEndpoints.includes(endpoint)) {
|
||||
throw new Error(
|
||||
`Invalid endpoint for finetuned generation. Must be one of: ${validFinetunedEndpoints.join(', ')}`,
|
||||
);
|
||||
}
|
||||
|
||||
let payload = {
|
||||
prompt: imageData.prompt,
|
||||
prompt_upsampling: imageData.prompt_upsampling || false,
|
||||
safety_tolerance: imageData.safety_tolerance || 6,
|
||||
output_format: imageData.output_format || 'png',
|
||||
finetune_id: imageData.finetune_id,
|
||||
finetune_strength: imageData.finetune_strength || 1.0,
|
||||
guidance: imageData.guidance || 2.5,
|
||||
};
|
||||
|
||||
// Add optional parameters if provided
|
||||
if (imageData.width) {
|
||||
payload.width = imageData.width;
|
||||
}
|
||||
if (imageData.height) {
|
||||
payload.height = imageData.height;
|
||||
}
|
||||
if (imageData.steps) {
|
||||
payload.steps = imageData.steps;
|
||||
}
|
||||
if (imageData.seed !== undefined) {
|
||||
payload.seed = imageData.seed;
|
||||
}
|
||||
if (imageData.raw) {
|
||||
payload.raw = imageData.raw;
|
||||
}
|
||||
|
||||
const generateUrl = `${this.baseUrl}${endpoint}`;
|
||||
const resultUrl = `${this.baseUrl}/v1/get_result`;
|
||||
|
||||
logger.debug('[FluxAPI] Generating finetuned image with payload:', payload);
|
||||
logger.debug('[FluxAPI] Using endpoint:', generateUrl);
|
||||
|
||||
let taskResponse;
|
||||
try {
|
||||
taskResponse = await axios.post(generateUrl, payload, {
|
||||
headers: {
|
||||
'x-key': requestApiKey,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
},
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.response?.data || error.message);
|
||||
logger.error('[FluxAPI] Error while submitting finetuned task:', details);
|
||||
return this.returnValue(
|
||||
`Something went wrong when trying to generate the finetuned image. The Flux API may be unavailable:
|
||||
Error Message: ${details}`,
|
||||
);
|
||||
}
|
||||
|
||||
const taskId = taskResponse.data.id;
|
||||
|
||||
// Polling for the result
|
||||
let status = 'Pending';
|
||||
let resultData = null;
|
||||
while (status !== 'Ready' && status !== 'Error') {
|
||||
try {
|
||||
// Wait 2 seconds between polls
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
const resultResponse = await axios.get(resultUrl, {
|
||||
headers: {
|
||||
'x-key': requestApiKey,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
params: { id: taskId },
|
||||
...this.getAxiosConfig(),
|
||||
});
|
||||
status = resultResponse.data.status;
|
||||
|
||||
if (status === 'Ready') {
|
||||
resultData = resultResponse.data.result;
|
||||
break;
|
||||
} else if (status === 'Error') {
|
||||
logger.error('[FluxAPI] Error in finetuned task:', resultResponse.data);
|
||||
return this.returnValue('An error occurred during finetuned image generation.');
|
||||
}
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.response?.data || error.message);
|
||||
logger.error('[FluxAPI] Error while getting finetuned result:', details);
|
||||
return this.returnValue('An error occurred while retrieving the finetuned image.');
|
||||
}
|
||||
}
|
||||
|
||||
// If no result data
|
||||
if (!resultData || !resultData.sample) {
|
||||
logger.error('[FluxAPI] No image data received from API. Response:', resultData);
|
||||
return this.returnValue('No image data received from Flux API.');
|
||||
}
|
||||
|
||||
// Try saving the image locally
|
||||
const imageUrl = resultData.sample;
|
||||
const imageName = `img-${uuidv4()}.png`;
|
||||
|
||||
try {
|
||||
logger.debug('[FluxAPI] Saving finetuned image:', imageUrl);
|
||||
const result = await this.processFileURL({
|
||||
fileStrategy: this.fileStrategy,
|
||||
userId: this.userId,
|
||||
URL: imageUrl,
|
||||
fileName: imageName,
|
||||
basePath: 'images',
|
||||
context: FileContext.image_generation,
|
||||
});
|
||||
|
||||
logger.debug('[FluxAPI] Finetuned image saved to path:', result.filepath);
|
||||
|
||||
// Calculate cost based on endpoint
|
||||
const endpointKey = endpoint.includes('ultra')
|
||||
? 'FLUX_PRO_1_1_ULTRA_FINETUNED'
|
||||
: 'FLUX_PRO_FINETUNED';
|
||||
const cost = FluxAPI.PRICING[endpointKey] || 0;
|
||||
// Return the result based on returnMetadata flag
|
||||
this.result = this.returnMetadata ? result : this.wrapInMarkdown(result.filepath);
|
||||
return this.returnValue(this.result);
|
||||
} catch (error) {
|
||||
const details = this.getDetails(error?.message ?? 'No additional error details.');
|
||||
logger.error('Error while saving the finetuned image:', details);
|
||||
return this.returnValue(`Failed to save the finetuned image locally. ${details}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FluxAPI;
|
||||
@@ -6,13 +6,10 @@ const axios = require('axios');
|
||||
const sharp = require('sharp');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { Tool } = require('@langchain/core/tools');
|
||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||
const { FileContext } = require('librechat-data-provider');
|
||||
const paths = require('~/config/paths');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const displayMessage =
|
||||
'Stable Diffusion displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
||||
|
||||
class StableDiffusionAPI extends Tool {
|
||||
constructor(fields) {
|
||||
super();
|
||||
@@ -24,8 +21,6 @@ class StableDiffusionAPI extends Tool {
|
||||
this.override = fields.override ?? false;
|
||||
/** @type {boolean} Necessary for output to contain all image metadata. */
|
||||
this.returnMetadata = fields.returnMetadata ?? false;
|
||||
/** @type {boolean} */
|
||||
this.isAgent = fields.isAgent;
|
||||
if (fields.uploadImageBuffer) {
|
||||
/** @type {uploadImageBuffer} Necessary for output to contain all image metadata. */
|
||||
this.uploadImageBuffer = fields.uploadImageBuffer.bind(this);
|
||||
@@ -71,16 +66,6 @@ class StableDiffusionAPI extends Tool {
|
||||
return ``;
|
||||
}
|
||||
|
||||
returnValue(value) {
|
||||
if (this.isAgent === true && typeof value === 'string') {
|
||||
return [value, {}];
|
||||
} else if (this.isAgent === true && typeof value === 'object') {
|
||||
return [displayMessage, value];
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
getServerURL() {
|
||||
const url = process.env.SD_WEBUI_URL || '';
|
||||
if (!url && !this.override) {
|
||||
@@ -128,25 +113,6 @@ class StableDiffusionAPI extends Tool {
|
||||
}
|
||||
|
||||
try {
|
||||
if (this.isAgent) {
|
||||
const content = [
|
||||
{
|
||||
type: ContentTypes.IMAGE_URL,
|
||||
image_url: {
|
||||
url: `data:image/png;base64,${image}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const response = [
|
||||
{
|
||||
type: ContentTypes.TEXT,
|
||||
text: displayMessage,
|
||||
},
|
||||
];
|
||||
return [response, { content }];
|
||||
}
|
||||
|
||||
const buffer = Buffer.from(image.split(',', 1)[0], 'base64');
|
||||
if (this.returnMetadata && this.uploadImageBuffer && this.req) {
|
||||
const file = await this.uploadImageBuffer({
|
||||
@@ -188,7 +154,7 @@ class StableDiffusionAPI extends Tool {
|
||||
logger.error('[StableDiffusion] Error while saving the image:', error);
|
||||
}
|
||||
|
||||
return this.returnValue(this.result);
|
||||
return this.result;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -106,21 +106,18 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
|
||||
|
||||
const formattedResults = validResults
|
||||
.flatMap((result) =>
|
||||
result.data.map(([docInfo, distance]) => ({
|
||||
result.data.map(([docInfo, relevanceScore]) => ({
|
||||
filename: docInfo.metadata.source.split('/').pop(),
|
||||
content: docInfo.page_content,
|
||||
distance,
|
||||
relevanceScore,
|
||||
})),
|
||||
)
|
||||
// TODO: results should be sorted by relevance, not distance
|
||||
.sort((a, b) => a.distance - b.distance)
|
||||
// TODO: make this configurable
|
||||
.slice(0, 10);
|
||||
.sort((a, b) => b.relevanceScore - a.relevanceScore);
|
||||
|
||||
const formattedString = formattedResults
|
||||
.map(
|
||||
(result) =>
|
||||
`File: ${result.filename}\nRelevance: ${1.0 - result.distance.toFixed(4)}\nContent: ${
|
||||
`File: ${result.filename}\nRelevance: ${result.relevanceScore.toFixed(4)}\nContent: ${
|
||||
result.content
|
||||
}\n`,
|
||||
)
|
||||
|
||||
@@ -10,7 +10,6 @@ const {
|
||||
GoogleSearchAPI,
|
||||
// Structured Tools
|
||||
DALLE3,
|
||||
FluxAPI,
|
||||
OpenWeather,
|
||||
StructuredSD,
|
||||
StructuredACS,
|
||||
@@ -21,7 +20,6 @@ const {
|
||||
} = require('../');
|
||||
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
|
||||
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { createMCPTool } = require('~/server/services/MCP');
|
||||
const { loadSpecs } = require('./loadSpecs');
|
||||
const { logger } = require('~/config');
|
||||
@@ -91,6 +89,45 @@ const validateTools = async (user, tools = []) => {
|
||||
}
|
||||
};
|
||||
|
||||
const loadAuthValues = async ({ userId, authFields, throwError = true }) => {
|
||||
let authValues = {};
|
||||
|
||||
/**
|
||||
* Finds the first non-empty value for the given authentication field, supporting alternate fields.
|
||||
* @param {string[]} fields Array of strings representing the authentication fields. Supports alternate fields delimited by "||".
|
||||
* @returns {Promise<{ authField: string, authValue: string} | null>} An object containing the authentication field and value, or null if not found.
|
||||
*/
|
||||
const findAuthValue = async (fields) => {
|
||||
for (const field of fields) {
|
||||
let value = process.env[field];
|
||||
if (value) {
|
||||
return { authField: field, authValue: value };
|
||||
}
|
||||
try {
|
||||
value = await getUserPluginAuthValue(userId, field, throwError);
|
||||
} catch (err) {
|
||||
if (field === fields[fields.length - 1] && !value) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
if (value) {
|
||||
return { authField: field, authValue: value };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
for (let authField of authFields) {
|
||||
const fields = authField.split('||');
|
||||
const result = await findAuthValue(fields);
|
||||
if (result) {
|
||||
authValues[result.authField] = result.authValue;
|
||||
}
|
||||
}
|
||||
|
||||
return authValues;
|
||||
};
|
||||
|
||||
/** @typedef {typeof import('@langchain/core/tools').Tool} ToolConstructor */
|
||||
/** @typedef {import('@langchain/core/tools').Tool} Tool */
|
||||
|
||||
@@ -145,7 +182,6 @@ const loadTools = async ({
|
||||
returnMap = false,
|
||||
}) => {
|
||||
const toolConstructors = {
|
||||
flux: FluxAPI,
|
||||
calculator: Calculator,
|
||||
google: GoogleSearchAPI,
|
||||
open_weather: OpenWeather,
|
||||
@@ -194,10 +230,9 @@ const loadTools = async ({
|
||||
};
|
||||
|
||||
const toolOptions = {
|
||||
flux: imageGenOptions,
|
||||
serpapi: { location: 'Austin,Texas,United States', hl: 'en', gl: 'us' },
|
||||
dalle: imageGenOptions,
|
||||
'stable-diffusion': imageGenOptions,
|
||||
serpapi: { location: 'Austin,Texas,United States', hl: 'en', gl: 'us' },
|
||||
};
|
||||
|
||||
const toolContextMap = {};
|
||||
@@ -310,6 +345,7 @@ const loadTools = async ({
|
||||
|
||||
module.exports = {
|
||||
loadToolWithAuth,
|
||||
loadAuthValues,
|
||||
validateTools,
|
||||
loadTools,
|
||||
};
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
const { validateTools, loadTools } = require('./handleTools');
|
||||
const { validateTools, loadTools, loadAuthValues } = require('./handleTools');
|
||||
const handleOpenAIErrors = require('./handleOpenAIErrors');
|
||||
|
||||
module.exports = {
|
||||
handleOpenAIErrors,
|
||||
loadAuthValues,
|
||||
validateTools,
|
||||
loadTools,
|
||||
};
|
||||
|
||||
5
api/cache/getLogStores.js
vendored
5
api/cache/getLogStores.js
vendored
@@ -37,10 +37,6 @@ const messages = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.ONE_MINUTE })
|
||||
: new Keyv({ namespace: CacheKeys.MESSAGES, ttl: Time.ONE_MINUTE });
|
||||
|
||||
const flows = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.TWO_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.FLOWS, ttl: Time.ONE_MINUTE * 3 });
|
||||
|
||||
const tokenConfig = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.THIRTY_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.TOKEN_CONFIG, ttl: Time.THIRTY_MINUTES });
|
||||
@@ -92,7 +88,6 @@ const namespaces = {
|
||||
[CacheKeys.MODEL_QUERIES]: modelQueries,
|
||||
[CacheKeys.AUDIO_RUNS]: audioRuns,
|
||||
[CacheKeys.MESSAGES]: messages,
|
||||
[CacheKeys.FLOWS]: flows,
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
74
api/cache/keyvRedis.js
vendored
74
api/cache/keyvRedis.js
vendored
@@ -1,81 +1,15 @@
|
||||
const fs = require('fs');
|
||||
const ioredis = require('ioredis');
|
||||
const KeyvRedis = require('@keyv/redis');
|
||||
const { logger } = require('~/config');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
const { REDIS_URI, USE_REDIS, USE_REDIS_CLUSTER, REDIS_CA, REDIS_KEY_PREFIX, REDIS_MAX_LISTENERS } =
|
||||
process.env;
|
||||
const { REDIS_URI, USE_REDIS } = process.env;
|
||||
|
||||
let keyvRedis;
|
||||
const redis_prefix = REDIS_KEY_PREFIX || '';
|
||||
const redis_max_listeners = Number(REDIS_MAX_LISTENERS) || 10;
|
||||
|
||||
function mapURI(uri) {
|
||||
const regex =
|
||||
/^(?:(?<scheme>\w+):\/\/)?(?:(?<user>[^:@]+)(?::(?<password>[^@]+))?@)?(?<host>[\w.-]+)(?::(?<port>\d{1,5}))?$/;
|
||||
const match = uri.match(regex);
|
||||
|
||||
if (match) {
|
||||
const { scheme, user, password, host, port } = match.groups;
|
||||
|
||||
return {
|
||||
scheme: scheme || 'none',
|
||||
user: user || null,
|
||||
password: password || null,
|
||||
host: host || null,
|
||||
port: port || null,
|
||||
};
|
||||
} else {
|
||||
const parts = uri.split(':');
|
||||
if (parts.length === 2) {
|
||||
return {
|
||||
scheme: 'none',
|
||||
user: null,
|
||||
password: null,
|
||||
host: parts[0],
|
||||
port: parts[1],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
scheme: 'none',
|
||||
user: null,
|
||||
password: null,
|
||||
host: uri,
|
||||
port: null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (REDIS_URI && isEnabled(USE_REDIS)) {
|
||||
let redisOptions = null;
|
||||
let keyvOpts = {
|
||||
useRedisSets: false,
|
||||
keyPrefix: redis_prefix,
|
||||
};
|
||||
|
||||
if (REDIS_CA) {
|
||||
const ca = fs.readFileSync(REDIS_CA);
|
||||
redisOptions = { tls: { ca } };
|
||||
}
|
||||
|
||||
if (isEnabled(USE_REDIS_CLUSTER)) {
|
||||
const hosts = REDIS_URI.split(',').map((item) => {
|
||||
var value = mapURI(item);
|
||||
|
||||
return {
|
||||
host: value.host,
|
||||
port: value.port,
|
||||
};
|
||||
});
|
||||
const cluster = new ioredis.Cluster(hosts, { redisOptions });
|
||||
keyvRedis = new KeyvRedis(cluster, keyvOpts);
|
||||
} else {
|
||||
keyvRedis = new KeyvRedis(REDIS_URI, keyvOpts);
|
||||
}
|
||||
keyvRedis = new KeyvRedis(REDIS_URI, { useRedisSets: false });
|
||||
keyvRedis.on('error', (err) => logger.error('KeyvRedis connection error:', err));
|
||||
keyvRedis.setMaxListeners(redis_max_listeners);
|
||||
keyvRedis.setMaxListeners(20);
|
||||
logger.info(
|
||||
'[Optional] Redis initialized. Note: Redis support is experimental. If you have issues, disable it. Cache needs to be flushed for values to refresh.',
|
||||
);
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
const axios = require('axios');
|
||||
const { EventSource } = require('eventsource');
|
||||
const { Time, CacheKeys } = require('librechat-data-provider');
|
||||
const logger = require('./winston');
|
||||
|
||||
global.EventSource = EventSource;
|
||||
|
||||
let mcpManager = null;
|
||||
let flowManager = null;
|
||||
|
||||
/**
|
||||
* @returns {Promise<MCPManager>}
|
||||
@@ -19,21 +16,6 @@ async function getMCPManager() {
|
||||
return mcpManager;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {(key: string) => Keyv} getLogStores
|
||||
* @returns {Promise<FlowStateManager>}
|
||||
*/
|
||||
async function getFlowStateManager(getLogStores) {
|
||||
if (!flowManager) {
|
||||
const { FlowStateManager } = await import('librechat-mcp');
|
||||
flowManager = new FlowStateManager(getLogStores(CacheKeys.FLOWS), {
|
||||
ttl: Time.ONE_MINUTE * 3,
|
||||
logger,
|
||||
});
|
||||
}
|
||||
return flowManager;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends message data in Server Sent Events format.
|
||||
* @param {ServerResponse} res - The server response.
|
||||
@@ -48,46 +30,8 @@ const sendEvent = (res, event) => {
|
||||
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates and configures an Axios instance with optional proxy settings.
|
||||
*
|
||||
* @typedef {import('axios').AxiosInstance} AxiosInstance
|
||||
* @typedef {import('axios').AxiosProxyConfig} AxiosProxyConfig
|
||||
*
|
||||
* @returns {AxiosInstance} A configured Axios instance
|
||||
* @throws {Error} If there's an issue creating the Axios instance or parsing the proxy URL
|
||||
*/
|
||||
function createAxiosInstance() {
|
||||
const instance = axios.create();
|
||||
|
||||
if (process.env.proxy) {
|
||||
try {
|
||||
const url = new URL(process.env.proxy);
|
||||
|
||||
/** @type {AxiosProxyConfig} */
|
||||
const proxyConfig = {
|
||||
host: url.hostname.replace(/^\[|\]$/g, ''),
|
||||
protocol: url.protocol.replace(':', ''),
|
||||
};
|
||||
|
||||
if (url.port) {
|
||||
proxyConfig.port = parseInt(url.port, 10);
|
||||
}
|
||||
|
||||
instance.defaults.proxy = proxyConfig;
|
||||
} catch (error) {
|
||||
console.error('Error parsing proxy URL:', error);
|
||||
throw new Error(`Invalid proxy URL: ${process.env.proxy}`);
|
||||
}
|
||||
}
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
logger,
|
||||
sendEvent,
|
||||
getMCPManager,
|
||||
createAxiosInstance,
|
||||
getFlowStateManager,
|
||||
};
|
||||
|
||||
@@ -1,126 +0,0 @@
|
||||
const axios = require('axios');
|
||||
const { createAxiosInstance } = require('./index');
|
||||
|
||||
// Mock axios
|
||||
jest.mock('axios', () => ({
|
||||
interceptors: {
|
||||
request: { use: jest.fn(), eject: jest.fn() },
|
||||
response: { use: jest.fn(), eject: jest.fn() },
|
||||
},
|
||||
create: jest.fn().mockReturnValue({
|
||||
defaults: {
|
||||
proxy: null,
|
||||
},
|
||||
get: jest.fn().mockResolvedValue({ data: {} }),
|
||||
post: jest.fn().mockResolvedValue({ data: {} }),
|
||||
put: jest.fn().mockResolvedValue({ data: {} }),
|
||||
delete: jest.fn().mockResolvedValue({ data: {} }),
|
||||
}),
|
||||
get: jest.fn().mockResolvedValue({ data: {} }),
|
||||
post: jest.fn().mockResolvedValue({ data: {} }),
|
||||
put: jest.fn().mockResolvedValue({ data: {} }),
|
||||
delete: jest.fn().mockResolvedValue({ data: {} }),
|
||||
reset: jest.fn().mockImplementation(function () {
|
||||
this.get.mockClear();
|
||||
this.post.mockClear();
|
||||
this.put.mockClear();
|
||||
this.delete.mockClear();
|
||||
this.create.mockClear();
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('createAxiosInstance', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset mocks
|
||||
jest.clearAllMocks();
|
||||
// Create a clean copy of process.env
|
||||
process.env = { ...originalEnv };
|
||||
// Default: no proxy
|
||||
delete process.env.proxy;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original process.env
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
test('creates an axios instance without proxy when no proxy env is set', () => {
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toBeNull();
|
||||
});
|
||||
|
||||
test('configures proxy correctly with hostname and protocol', () => {
|
||||
process.env.proxy = 'http://example.com';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'example.com',
|
||||
protocol: 'http',
|
||||
});
|
||||
});
|
||||
|
||||
test('configures proxy correctly with hostname, protocol and port', () => {
|
||||
process.env.proxy = 'https://proxy.example.com:8080';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'https',
|
||||
port: 8080,
|
||||
});
|
||||
});
|
||||
|
||||
test('handles proxy URLs with authentication', () => {
|
||||
process.env.proxy = 'http://user:pass@proxy.example.com:3128';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'http',
|
||||
port: 3128,
|
||||
// Note: The current implementation doesn't handle auth - if needed, add this functionality
|
||||
});
|
||||
});
|
||||
|
||||
test('throws error when proxy URL is invalid', () => {
|
||||
process.env.proxy = 'invalid-url';
|
||||
|
||||
expect(() => createAxiosInstance()).toThrow('Invalid proxy URL');
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
// If you want to test the actual URL parsing more thoroughly
|
||||
test('handles edge case proxy URLs correctly', () => {
|
||||
// IPv6 address
|
||||
process.env.proxy = 'http://[::1]:8080';
|
||||
|
||||
let instance = createAxiosInstance();
|
||||
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: '::1',
|
||||
protocol: 'http',
|
||||
port: 8080,
|
||||
});
|
||||
|
||||
// URL with path (which should be ignored for proxy config)
|
||||
process.env.proxy = 'http://proxy.example.com:8080/some/path';
|
||||
|
||||
instance = createAxiosInstance();
|
||||
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'http',
|
||||
port: 8080,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,11 +1,9 @@
|
||||
const { MeiliSearch } = require('meilisearch');
|
||||
const { Conversation } = require('~/models/Conversation');
|
||||
const { Message } = require('~/models/Message');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const Conversation = require('~/models/schema/convoSchema');
|
||||
const Message = require('~/models/schema/messageSchema');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const searchEnabled = isEnabled(process.env.SEARCH);
|
||||
const indexingDisabled = isEnabled(process.env.MEILI_NO_SYNC);
|
||||
const searchEnabled = process.env?.SEARCH?.toLowerCase() === 'true';
|
||||
let currentTimeout = null;
|
||||
|
||||
class MeiliSearchClient {
|
||||
@@ -25,7 +23,8 @@ class MeiliSearchClient {
|
||||
}
|
||||
}
|
||||
|
||||
async function indexSync() {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
async function indexSync(req, res, next) {
|
||||
if (!searchEnabled) {
|
||||
return;
|
||||
}
|
||||
@@ -34,15 +33,10 @@ async function indexSync() {
|
||||
const client = MeiliSearchClient.getInstance();
|
||||
|
||||
const { status } = await client.health();
|
||||
if (status !== 'available') {
|
||||
if (status !== 'available' || !process.env.SEARCH) {
|
||||
throw new Error('Meilisearch not available');
|
||||
}
|
||||
|
||||
if (indexingDisabled === true) {
|
||||
logger.info('[indexSync] Indexing is disabled, skipping...');
|
||||
return;
|
||||
}
|
||||
|
||||
const messageCount = await Message.countDocuments();
|
||||
const convoCount = await Conversation.countDocuments();
|
||||
const messages = await client.index('messages').getStats();
|
||||
@@ -77,6 +71,7 @@ async function indexSync() {
|
||||
logger.info('[indexSync] Meilisearch not configured, search will be disabled.');
|
||||
} else {
|
||||
logger.error('[indexSync] error', err);
|
||||
// res.status(500).json({ error: 'Server error' });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { actionSchema } = require('@librechat/data-schemas');
|
||||
const actionSchema = require('./schema/action');
|
||||
|
||||
const Action = mongoose.model('action', actionSchema);
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ const {
|
||||
removeAgentFromAllProjects,
|
||||
} = require('./Project');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { agentSchema } = require('@librechat/data-schemas');
|
||||
const agentSchema = require('./schema/agent');
|
||||
|
||||
const Agent = mongoose.model('agent', agentSchema);
|
||||
|
||||
@@ -97,22 +97,11 @@ const updateAgent = async (searchParameter, updateData) => {
|
||||
const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
|
||||
const searchParameter = { id: agent_id };
|
||||
|
||||
// build the update to push or create the file ids set
|
||||
const fileIdsPath = `tool_resources.${tool_resource}.file_ids`;
|
||||
|
||||
await Agent.updateOne(
|
||||
{
|
||||
id: agent_id,
|
||||
[`${fileIdsPath}`]: { $exists: false },
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
[`${fileIdsPath}`]: [],
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const updateData = { $addToSet: { [fileIdsPath]: file_id } };
|
||||
|
||||
// return the updated agent or throw if no agent matches
|
||||
const updatedAgent = await updateAgent(searchParameter, updateData);
|
||||
if (updatedAgent) {
|
||||
return updatedAgent;
|
||||
@@ -301,7 +290,6 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Agent,
|
||||
getAgent,
|
||||
loadAgent,
|
||||
createAgent,
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { Agent, addAgentResourceFile, removeAgentResourceFiles } = require('./Agent');
|
||||
|
||||
describe('Agent Resource File Operations', () => {
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
});
|
||||
|
||||
const createBasicAgent = async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const agent = await Agent.create({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
return agent;
|
||||
};
|
||||
|
||||
test('should handle concurrent file additions', async () => {
|
||||
const agent = await createBasicAgent();
|
||||
const fileIds = Array.from({ length: 10 }, () => uuidv4());
|
||||
|
||||
// Concurrent additions
|
||||
const additionPromises = fileIds.map((fileId) =>
|
||||
addAgentResourceFile({
|
||||
agent_id: agent.id,
|
||||
tool_resource: 'test_tool',
|
||||
file_id: fileId,
|
||||
}),
|
||||
);
|
||||
|
||||
await Promise.all(additionPromises);
|
||||
|
||||
const updatedAgent = await Agent.findOne({ id: agent.id });
|
||||
expect(updatedAgent.tool_resources.test_tool.file_ids).toBeDefined();
|
||||
expect(updatedAgent.tool_resources.test_tool.file_ids).toHaveLength(10);
|
||||
expect(new Set(updatedAgent.tool_resources.test_tool.file_ids).size).toBe(10);
|
||||
});
|
||||
|
||||
test('should handle concurrent additions and removals', async () => {
|
||||
const agent = await createBasicAgent();
|
||||
const initialFileIds = Array.from({ length: 5 }, () => uuidv4());
|
||||
|
||||
await Promise.all(
|
||||
initialFileIds.map((fileId) =>
|
||||
addAgentResourceFile({
|
||||
agent_id: agent.id,
|
||||
tool_resource: 'test_tool',
|
||||
file_id: fileId,
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
const newFileIds = Array.from({ length: 5 }, () => uuidv4());
|
||||
const operations = [
|
||||
...newFileIds.map((fileId) =>
|
||||
addAgentResourceFile({
|
||||
agent_id: agent.id,
|
||||
tool_resource: 'test_tool',
|
||||
file_id: fileId,
|
||||
}),
|
||||
),
|
||||
...initialFileIds.map((fileId) =>
|
||||
removeAgentResourceFiles({
|
||||
agent_id: agent.id,
|
||||
files: [{ tool_resource: 'test_tool', file_id: fileId }],
|
||||
}),
|
||||
),
|
||||
];
|
||||
|
||||
await Promise.all(operations);
|
||||
|
||||
const updatedAgent = await Agent.findOne({ id: agent.id });
|
||||
expect(updatedAgent.tool_resources.test_tool.file_ids).toBeDefined();
|
||||
expect(updatedAgent.tool_resources.test_tool.file_ids).toHaveLength(5);
|
||||
});
|
||||
|
||||
test('should initialize array when adding to non-existent tool resource', async () => {
|
||||
const agent = await createBasicAgent();
|
||||
const fileId = uuidv4();
|
||||
|
||||
const updatedAgent = await addAgentResourceFile({
|
||||
agent_id: agent.id,
|
||||
tool_resource: 'new_tool',
|
||||
file_id: fileId,
|
||||
});
|
||||
|
||||
expect(updatedAgent.tool_resources.new_tool.file_ids).toBeDefined();
|
||||
expect(updatedAgent.tool_resources.new_tool.file_ids).toHaveLength(1);
|
||||
expect(updatedAgent.tool_resources.new_tool.file_ids[0]).toBe(fileId);
|
||||
});
|
||||
|
||||
test('should handle rapid sequential modifications to same tool resource', async () => {
|
||||
const agent = await createBasicAgent();
|
||||
const fileId = uuidv4();
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await addAgentResourceFile({
|
||||
agent_id: agent.id,
|
||||
tool_resource: 'test_tool',
|
||||
file_id: `${fileId}_${i}`,
|
||||
});
|
||||
|
||||
if (i % 2 === 0) {
|
||||
await removeAgentResourceFiles({
|
||||
agent_id: agent.id,
|
||||
files: [{ tool_resource: 'test_tool', file_id: `${fileId}_${i}` }],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const updatedAgent = await Agent.findOne({ id: agent.id });
|
||||
expect(updatedAgent.tool_resources.test_tool.file_ids).toBeDefined();
|
||||
expect(Array.isArray(updatedAgent.tool_resources.test_tool.file_ids)).toBe(true);
|
||||
});
|
||||
|
||||
test('should handle multiple tool resources concurrently', async () => {
|
||||
const agent = await createBasicAgent();
|
||||
const toolResources = ['tool1', 'tool2', 'tool3'];
|
||||
const operations = [];
|
||||
|
||||
toolResources.forEach((tool) => {
|
||||
const fileIds = Array.from({ length: 5 }, () => uuidv4());
|
||||
fileIds.forEach((fileId) => {
|
||||
operations.push(
|
||||
addAgentResourceFile({
|
||||
agent_id: agent.id,
|
||||
tool_resource: tool,
|
||||
file_id: fileId,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
await Promise.all(operations);
|
||||
|
||||
const updatedAgent = await Agent.findOne({ id: agent.id });
|
||||
toolResources.forEach((tool) => {
|
||||
expect(updatedAgent.tool_resources[tool].file_ids).toBeDefined();
|
||||
expect(updatedAgent.tool_resources[tool].file_ids).toHaveLength(5);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { assistantSchema } = require('@librechat/data-schemas');
|
||||
const assistantSchema = require('./schema/assistant');
|
||||
|
||||
const Assistant = mongoose.model('assistant', assistantSchema);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { balanceSchema } = require('@librechat/data-schemas');
|
||||
const balanceSchema = require('./schema/balance');
|
||||
const { getMultiplier } = require('./tx');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const Banner = require('./schema/banner');
|
||||
const logger = require('~/config/winston');
|
||||
const { bannerSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Banner = mongoose.model('Banner', bannerSchema);
|
||||
|
||||
/**
|
||||
* Retrieves the current active banner.
|
||||
* @returns {Promise<Object|null>} The active banner object or null if no active banner is found.
|
||||
@@ -28,4 +24,4 @@ const getBanner = async (user) => {
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = { Banner, getBanner };
|
||||
module.exports = { getBanner };
|
||||
|
||||
@@ -1,40 +1,40 @@
|
||||
const { logger } = require('~/config');
|
||||
|
||||
// const { Categories } = require('./schema/categories');
|
||||
const options = [
|
||||
{
|
||||
label: 'com_ui_idea',
|
||||
label: 'idea',
|
||||
value: 'idea',
|
||||
},
|
||||
{
|
||||
label: 'com_ui_travel',
|
||||
label: 'travel',
|
||||
value: 'travel',
|
||||
},
|
||||
{
|
||||
label: 'com_ui_teach_or_explain',
|
||||
label: 'teach_or_explain',
|
||||
value: 'teach_or_explain',
|
||||
},
|
||||
{
|
||||
label: 'com_ui_write',
|
||||
label: 'write',
|
||||
value: 'write',
|
||||
},
|
||||
{
|
||||
label: 'com_ui_shop',
|
||||
label: 'shop',
|
||||
value: 'shop',
|
||||
},
|
||||
{
|
||||
label: 'com_ui_code',
|
||||
label: 'code',
|
||||
value: 'code',
|
||||
},
|
||||
{
|
||||
label: 'com_ui_misc',
|
||||
label: 'misc',
|
||||
value: 'misc',
|
||||
},
|
||||
{
|
||||
label: 'com_ui_roleplay',
|
||||
label: 'roleplay',
|
||||
value: 'roleplay',
|
||||
},
|
||||
{
|
||||
label: 'com_ui_finance',
|
||||
label: 'finance',
|
||||
value: 'finance',
|
||||
},
|
||||
];
|
||||
|
||||
@@ -15,6 +15,19 @@ const searchConversation = async (conversationId) => {
|
||||
throw new Error('Error searching conversation');
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Searches for a conversation by conversationId and returns associated file ids.
|
||||
* @param {string} conversationId - The conversation's ID.
|
||||
* @returns {Promise<string[] | null>}
|
||||
*/
|
||||
const getConvoFiles = async (conversationId) => {
|
||||
try {
|
||||
return (await Conversation.findOne({ conversationId }, 'files').lean())?.files ?? [];
|
||||
} catch (error) {
|
||||
logger.error('[getConvoFiles] Error getting conversation files', error);
|
||||
throw new Error('Error getting conversation files');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves a single conversation for a given user and conversation ID.
|
||||
@@ -60,20 +73,6 @@ const deleteNullOrEmptyConversations = async () => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Searches for a conversation by conversationId and returns associated file ids.
|
||||
* @param {string} conversationId - The conversation's ID.
|
||||
* @returns {Promise<string[] | null>}
|
||||
*/
|
||||
const getConvoFiles = async (conversationId) => {
|
||||
try {
|
||||
return (await Conversation.findOne({ conversationId }, 'files').lean())?.files ?? [];
|
||||
} catch (error) {
|
||||
logger.error('[getConvoFiles] Error getting conversation files', error);
|
||||
throw new Error('Error getting conversation files');
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Conversation,
|
||||
getConvoFiles,
|
||||
@@ -105,16 +104,10 @@ module.exports = {
|
||||
update.expiredAt = null;
|
||||
}
|
||||
|
||||
/** @type {{ $set: Partial<TConversation>; $unset?: Record<keyof TConversation, number> }} */
|
||||
const updateOperation = { $set: update };
|
||||
if (metadata && metadata.unsetFields && Object.keys(metadata.unsetFields).length > 0) {
|
||||
updateOperation.$unset = metadata.unsetFields;
|
||||
}
|
||||
|
||||
/** Note: the resulting Model object is necessary for Meilisearch operations */
|
||||
const conversation = await Conversation.findOneAndUpdate(
|
||||
{ conversationId, user: req.user.id },
|
||||
updateOperation,
|
||||
update,
|
||||
{
|
||||
new: true,
|
||||
upsert: true,
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const ConversationTag = require('./schema/conversationTagSchema');
|
||||
const Conversation = require('./schema/convoSchema');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
const { conversationTagSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const ConversationTag = mongoose.model('ConversationTag', conversationTagSchema);
|
||||
|
||||
/**
|
||||
* Retrieves all conversation tags for a user.
|
||||
* @param {string} user - The user ID.
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { fileSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
const fileSchema = require('./schema/fileSchema');
|
||||
|
||||
const File = mongoose.model('File', fileSchema);
|
||||
|
||||
@@ -8,7 +7,7 @@ const File = mongoose.model('File', fileSchema);
|
||||
* Finds a file by its file_id with additional query options.
|
||||
* @param {string} file_id - The unique identifier of the file.
|
||||
* @param {object} options - Query options for filtering, projection, etc.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the file document or null.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the file document or null.
|
||||
*/
|
||||
const findFileById = async (file_id, options = {}) => {
|
||||
return await File.findOne({ file_id, ...options }).lean();
|
||||
@@ -18,46 +17,18 @@ const findFileById = async (file_id, options = {}) => {
|
||||
* Retrieves files matching a given filter, sorted by the most recently updated.
|
||||
* @param {Object} filter - The filter criteria to apply.
|
||||
* @param {Object} [_sortOptions] - Optional sort parameters.
|
||||
* @param {Object|String} [selectFields={ text: 0 }] - Fields to include/exclude in the query results.
|
||||
* Default excludes the 'text' field.
|
||||
* @returns {Promise<Array<IMongoFile>>} A promise that resolves to an array of file documents.
|
||||
* @returns {Promise<Array<MongoFile>>} A promise that resolves to an array of file documents.
|
||||
*/
|
||||
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
|
||||
const getFiles = async (filter, _sortOptions) => {
|
||||
const sortOptions = { updatedAt: -1, ..._sortOptions };
|
||||
return await File.find(filter).select(selectFields).sort(sortOptions).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves tool files (files that are embedded or have a fileIdentifier) from an array of file IDs
|
||||
* @param {string[]} fileIds - Array of file_id strings to search for
|
||||
* @returns {Promise<Array<IMongoFile>>} Files that match the criteria
|
||||
*/
|
||||
const getToolFilesByIds = async (fileIds) => {
|
||||
if (!fileIds || !fileIds.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const filter = {
|
||||
file_id: { $in: fileIds },
|
||||
$or: [{ embedded: true }, { 'metadata.fileIdentifier': { $exists: true } }],
|
||||
};
|
||||
|
||||
const selectFields = { text: 0 };
|
||||
const sortOptions = { updatedAt: -1 };
|
||||
|
||||
return await getFiles(filter, sortOptions, selectFields);
|
||||
} catch (error) {
|
||||
logger.error('[getToolFilesByIds] Error retrieving tool files:', error);
|
||||
throw new Error('Error retrieving tool files');
|
||||
}
|
||||
return await File.find(filter).sort(sortOptions).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a new file with a TTL of 1 hour.
|
||||
* @param {IMongoFile} data - The file data to be created, must contain file_id.
|
||||
* @param {MongoFile} data - The file data to be created, must contain file_id.
|
||||
* @param {boolean} disableTTL - Whether to disable the TTL.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the created file document.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the created file document.
|
||||
*/
|
||||
const createFile = async (data, disableTTL) => {
|
||||
const fileData = {
|
||||
@@ -77,8 +48,8 @@ const createFile = async (data, disableTTL) => {
|
||||
|
||||
/**
|
||||
* Updates a file identified by file_id with new data and removes the TTL.
|
||||
* @param {IMongoFile} data - The data to update, must contain file_id.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
|
||||
* @param {MongoFile} data - The data to update, must contain file_id.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
|
||||
*/
|
||||
const updateFile = async (data) => {
|
||||
const { file_id, ...update } = data;
|
||||
@@ -91,8 +62,8 @@ const updateFile = async (data) => {
|
||||
|
||||
/**
|
||||
* Increments the usage of a file identified by file_id.
|
||||
* @param {IMongoFile} data - The data to update, must contain file_id and the increment value for usage.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
|
||||
* @param {MongoFile} data - The data to update, must contain file_id and the increment value for usage.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
|
||||
*/
|
||||
const updateFileUsage = async (data) => {
|
||||
const { file_id, inc = 1 } = data;
|
||||
@@ -106,7 +77,7 @@ const updateFileUsage = async (data) => {
|
||||
/**
|
||||
* Deletes a file identified by file_id.
|
||||
* @param {string} file_id - The unique identifier of the file to delete.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
|
||||
*/
|
||||
const deleteFile = async (file_id) => {
|
||||
return await File.findOneAndDelete({ file_id }).lean();
|
||||
@@ -115,7 +86,7 @@ const deleteFile = async (file_id) => {
|
||||
/**
|
||||
* Deletes a file identified by a filter.
|
||||
* @param {object} filter - The filter criteria to apply.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
|
||||
*/
|
||||
const deleteFileByFilter = async (filter) => {
|
||||
return await File.findOneAndDelete(filter).lean();
|
||||
@@ -138,7 +109,6 @@ module.exports = {
|
||||
File,
|
||||
findFileById,
|
||||
getFiles,
|
||||
getToolFilesByIds,
|
||||
createFile,
|
||||
updateFile,
|
||||
updateFileUsage,
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { groupSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Group = mongoose.model('Group', groupSchema);
|
||||
|
||||
module.exports = Group;
|
||||
@@ -1,4 +1,4 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { keySchema } = require('@librechat/data-schemas');
|
||||
const keySchema = require('./schema/key');
|
||||
|
||||
module.exports = mongoose.model('Key', keySchema);
|
||||
|
||||
@@ -71,42 +71,7 @@ async function saveMessage(req, params, metadata) {
|
||||
} catch (err) {
|
||||
logger.error('Error saving message:', err);
|
||||
logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
|
||||
|
||||
// Check if this is a duplicate key error (MongoDB error code 11000)
|
||||
if (err.code === 11000 && err.message.includes('duplicate key error')) {
|
||||
// Log the duplicate key error but don't crash the application
|
||||
logger.warn(`Duplicate messageId detected: ${params.messageId}. Continuing execution.`);
|
||||
|
||||
try {
|
||||
// Try to find the existing message with this ID
|
||||
const existingMessage = await Message.findOne({
|
||||
messageId: params.messageId,
|
||||
user: req.user.id,
|
||||
});
|
||||
|
||||
// If we found it, return it
|
||||
if (existingMessage) {
|
||||
return existingMessage.toObject();
|
||||
}
|
||||
|
||||
// If we can't find it (unlikely but possible in race conditions)
|
||||
return {
|
||||
...params,
|
||||
messageId: params.messageId,
|
||||
user: req.user.id,
|
||||
};
|
||||
} catch (findError) {
|
||||
// If the findOne also fails, log it but don't crash
|
||||
logger.warn(`Could not retrieve existing message with ID ${params.messageId}: ${findError.message}`);
|
||||
return {
|
||||
...params,
|
||||
messageId: params.messageId,
|
||||
user: req.user.id,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
throw err; // Re-throw other errors
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { model } = require('mongoose');
|
||||
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
||||
const { projectSchema } = require('@librechat/data-schemas');
|
||||
const projectSchema = require('~/models/schema/projectSchema');
|
||||
|
||||
const Project = model('Project', projectSchema);
|
||||
|
||||
@@ -9,7 +9,7 @@ const Project = model('Project', projectSchema);
|
||||
*
|
||||
* @param {string} projectId - The ID of the project to find and return as a plain object.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<IMongoProject>} A plain object representing the project document, or `null` if no project is found.
|
||||
* @returns {Promise<MongoProject>} A plain object representing the project document, or `null` if no project is found.
|
||||
*/
|
||||
const getProjectById = async function (projectId, fieldsToSelect = null) {
|
||||
const query = Project.findById(projectId);
|
||||
@@ -27,7 +27,7 @@ const getProjectById = async function (projectId, fieldsToSelect = null) {
|
||||
*
|
||||
* @param {string} projectName - The name of the project to find or create.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<IMongoProject>} A plain object representing the project document.
|
||||
* @returns {Promise<MongoProject>} A plain object representing the project document.
|
||||
*/
|
||||
const getProjectByName = async function (projectName, fieldsToSelect = null) {
|
||||
const query = { name: projectName };
|
||||
@@ -47,7 +47,7 @@ const getProjectByName = async function (projectName, fieldsToSelect = null) {
|
||||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} promptGroupIds - The array of prompt group IDs to add to the project.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
*/
|
||||
const addGroupIdsToProject = async function (projectId, promptGroupIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
@@ -62,7 +62,7 @@ const addGroupIdsToProject = async function (projectId, promptGroupIds) {
|
||||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} promptGroupIds - The array of prompt group IDs to remove from the project.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
*/
|
||||
const removeGroupIdsFromProject = async function (projectId, promptGroupIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
@@ -87,7 +87,7 @@ const removeGroupFromAllProjects = async (promptGroupId) => {
|
||||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} agentIds - The array of agent IDs to add to the project.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
*/
|
||||
const addAgentIdsToProject = async function (projectId, agentIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
@@ -102,7 +102,7 @@ const addAgentIdsToProject = async function (projectId, agentIds) {
|
||||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} agentIds - The array of agent IDs to remove from the project.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
*/
|
||||
const removeAgentIdsFromProject = async function (projectId, agentIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { ObjectId } = require('mongodb');
|
||||
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
@@ -7,13 +6,10 @@ const {
|
||||
removeGroupIdsFromProject,
|
||||
removeGroupFromAllProjects,
|
||||
} = require('./Project');
|
||||
const { promptGroupSchema, promptSchema } = require('@librechat/data-schemas');
|
||||
const { Prompt, PromptGroup } = require('./schema/promptSchema');
|
||||
const { escapeRegExp } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
|
||||
const Prompt = mongoose.model('Prompt', promptSchema);
|
||||
|
||||
/**
|
||||
* Create a pipeline for the aggregation to get prompt groups
|
||||
* @param {Object} query
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
const mongoose = require('mongoose');
|
||||
const {
|
||||
CacheKeys,
|
||||
SystemRoles,
|
||||
@@ -7,17 +6,13 @@ const {
|
||||
removeNullishValues,
|
||||
agentPermissionsSchema,
|
||||
promptPermissionsSchema,
|
||||
runCodePermissionsSchema,
|
||||
bookmarkPermissionsSchema,
|
||||
multiConvoPermissionsSchema,
|
||||
temporaryChatPermissionsSchema,
|
||||
} = require('librechat-data-provider');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { roleSchema } = require('@librechat/data-schemas');
|
||||
const Role = require('~/models/schema/roleSchema');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Role = mongoose.model('Role', roleSchema);
|
||||
|
||||
/**
|
||||
* Retrieve a role by name and convert the found role document to a plain object.
|
||||
* If the role with the given name doesn't exist and the name is a system defined role, create it and return the lean version.
|
||||
@@ -82,8 +77,6 @@ const permissionSchemas = {
|
||||
[PermissionTypes.PROMPTS]: promptPermissionsSchema,
|
||||
[PermissionTypes.BOOKMARKS]: bookmarkPermissionsSchema,
|
||||
[PermissionTypes.MULTI_CONVO]: multiConvoPermissionsSchema,
|
||||
[PermissionTypes.TEMPORARY_CHAT]: temporaryChatPermissionsSchema,
|
||||
[PermissionTypes.RUN_CODE]: runCodePermissionsSchema,
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -171,7 +164,6 @@ const initializeRoles = async function () {
|
||||
}
|
||||
};
|
||||
module.exports = {
|
||||
Role,
|
||||
getRoleByName,
|
||||
initializeRoles,
|
||||
updateRoleByName,
|
||||
|
||||
@@ -8,7 +8,7 @@ const {
|
||||
} = require('librechat-data-provider');
|
||||
const { updateAccessPermissions, initializeRoles } = require('~/models/Role');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { Role } = require('~/models/Role');
|
||||
const Role = require('~/models/schema/roleSchema');
|
||||
|
||||
// Mock the cache
|
||||
jest.mock('~/cache/getLogStores', () => {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const signPayload = require('~/server/services/signPayload');
|
||||
const { hashToken } = require('~/server/utils/crypto');
|
||||
const { sessionSchema } = require('@librechat/data-schemas');
|
||||
const sessionSchema = require('./schema/session');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Session = mongoose.model('Session', sessionSchema);
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { nanoid } = require('nanoid');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { Conversation } = require('~/models/Conversation');
|
||||
const { shareSchema } = require('@librechat/data-schemas');
|
||||
const SharedLink = mongoose.model('SharedLink', shareSchema);
|
||||
const SharedLink = require('./schema/shareSchema');
|
||||
const { getMessages } = require('./Message');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
const tokenSchema = require('./schema/tokenSchema');
|
||||
const mongoose = require('mongoose');
|
||||
const { encryptV2 } = require('~/server/utils/crypto');
|
||||
const { tokenSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
@@ -8,39 +7,6 @@ const { logger } = require('~/config');
|
||||
* @type {mongoose.Model}
|
||||
*/
|
||||
const Token = mongoose.model('Token', tokenSchema);
|
||||
/**
|
||||
* Fixes the indexes for the Token collection from legacy TTL indexes to the new expiresAt index.
|
||||
*/
|
||||
async function fixIndexes() {
|
||||
try {
|
||||
if (
|
||||
process.env.NODE_ENV === 'CI' ||
|
||||
process.env.NODE_ENV === 'development' ||
|
||||
process.env.NODE_ENV === 'test'
|
||||
) {
|
||||
return;
|
||||
}
|
||||
const indexes = await Token.collection.indexes();
|
||||
logger.debug('Existing Token Indexes:', JSON.stringify(indexes, null, 2));
|
||||
const unwantedTTLIndexes = indexes.filter(
|
||||
(index) => index.key.createdAt === 1 && index.expireAfterSeconds !== undefined,
|
||||
);
|
||||
if (unwantedTTLIndexes.length === 0) {
|
||||
logger.debug('No unwanted Token indexes found.');
|
||||
return;
|
||||
}
|
||||
for (const index of unwantedTTLIndexes) {
|
||||
logger.debug(`Dropping unwanted Token index: ${index.name}`);
|
||||
await Token.collection.dropIndex(index.name);
|
||||
logger.debug(`Dropped Token index: ${index.name}`);
|
||||
}
|
||||
logger.debug('Token index cleanup completed successfully.');
|
||||
} catch (error) {
|
||||
logger.error('An error occurred while fixing Token indexes:', error);
|
||||
}
|
||||
}
|
||||
|
||||
fixIndexes();
|
||||
|
||||
/**
|
||||
* Creates a new Token instance.
|
||||
@@ -63,7 +29,8 @@ async function createToken(tokenData) {
|
||||
expiresAt,
|
||||
};
|
||||
|
||||
return await Token.create(newTokenData);
|
||||
const newToken = new Token(newTokenData);
|
||||
return await newToken.save();
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while creating token:', error);
|
||||
throw error;
|
||||
@@ -75,8 +42,7 @@ async function createToken(tokenData) {
|
||||
* @param {Object} query - The query to match against.
|
||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
||||
* @param {String} query.token - The token value.
|
||||
* @param {String} [query.email] - The email of the user.
|
||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
||||
* @param {String} query.email - The email of the user.
|
||||
* @returns {Promise<Object|null>} The matched Token document, or null if not found.
|
||||
* @throws Will throw an error if the find operation fails.
|
||||
*/
|
||||
@@ -93,9 +59,6 @@ async function findToken(query) {
|
||||
if (query.email) {
|
||||
conditions.push({ email: query.email });
|
||||
}
|
||||
if (query.identifier) {
|
||||
conditions.push({ identifier: query.identifier });
|
||||
}
|
||||
|
||||
const token = await Token.findOne({
|
||||
$and: conditions,
|
||||
@@ -113,8 +76,6 @@ async function findToken(query) {
|
||||
* @param {Object} query - The query to match against.
|
||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
||||
* @param {String} query.token - The token value.
|
||||
* @param {String} [query.email] - The email of the user.
|
||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
||||
* @param {Object} updateData - The data to update the Token with.
|
||||
* @returns {Promise<mongoose.Document|null>} The updated Token document, or null if not found.
|
||||
* @throws Will throw an error if the update operation fails.
|
||||
@@ -133,20 +94,14 @@ async function updateToken(query, updateData) {
|
||||
* @param {Object} query - The query to match against.
|
||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
||||
* @param {String} query.token - The token value.
|
||||
* @param {String} [query.email] - The email of the user.
|
||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
||||
* @param {String} query.email - The email of the user.
|
||||
* @returns {Promise<Object>} The result of the delete operation.
|
||||
* @throws Will throw an error if the delete operation fails.
|
||||
*/
|
||||
async function deleteTokens(query) {
|
||||
try {
|
||||
return await Token.deleteMany({
|
||||
$or: [
|
||||
{ userId: query.userId },
|
||||
{ token: query.token },
|
||||
{ email: query.email },
|
||||
{ identifier: query.identifier },
|
||||
],
|
||||
$or: [{ userId: query.userId }, { token: query.token }, { email: query.email }],
|
||||
});
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while deleting tokens:', error);
|
||||
@@ -154,46 +109,9 @@ async function deleteTokens(query) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles the OAuth token by creating or updating the token.
|
||||
* @param {object} fields
|
||||
* @param {string} fields.userId - The user's ID.
|
||||
* @param {string} fields.token - The full token to store.
|
||||
* @param {string} fields.identifier - Unique, alternative identifier for the token.
|
||||
* @param {number} fields.expiresIn - The number of seconds until the token expires.
|
||||
* @param {object} fields.metadata - Additional metadata to store with the token.
|
||||
* @param {string} [fields.type="oauth"] - The type of token. Default is 'oauth'.
|
||||
*/
|
||||
async function handleOAuthToken({
|
||||
token,
|
||||
userId,
|
||||
identifier,
|
||||
expiresIn,
|
||||
metadata,
|
||||
type = 'oauth',
|
||||
}) {
|
||||
const encrypedToken = await encryptV2(token);
|
||||
const tokenData = {
|
||||
type,
|
||||
userId,
|
||||
metadata,
|
||||
identifier,
|
||||
token: encrypedToken,
|
||||
expiresIn: parseInt(expiresIn, 10) || 3600,
|
||||
};
|
||||
|
||||
const existingToken = await findToken({ userId, identifier });
|
||||
if (existingToken) {
|
||||
return await updateToken({ identifier }, tokenData);
|
||||
} else {
|
||||
return await createToken(tokenData);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
findToken,
|
||||
createToken,
|
||||
findToken,
|
||||
updateToken,
|
||||
deleteTokens,
|
||||
handleOAuthToken,
|
||||
};
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { toolCallSchema } = require('@librechat/data-schemas');
|
||||
const ToolCall = mongoose.model('ToolCall', toolCallSchema);
|
||||
const ToolCall = require('./schema/toolCallSchema');
|
||||
|
||||
/**
|
||||
* Create a new tool call
|
||||
* @param {IToolCallData} toolCallData - The tool call data
|
||||
* @returns {Promise<IToolCallData>} The created tool call document
|
||||
* @param {ToolCallData} toolCallData - The tool call data
|
||||
* @returns {Promise<ToolCallData>} The created tool call document
|
||||
*/
|
||||
async function createToolCall(toolCallData) {
|
||||
try {
|
||||
@@ -18,7 +16,7 @@ async function createToolCall(toolCallData) {
|
||||
/**
|
||||
* Get a tool call by ID
|
||||
* @param {string} id - The tool call document ID
|
||||
* @returns {Promise<IToolCallData|null>} The tool call document or null if not found
|
||||
* @returns {Promise<ToolCallData|null>} The tool call document or null if not found
|
||||
*/
|
||||
async function getToolCallById(id) {
|
||||
try {
|
||||
@@ -46,7 +44,7 @@ async function getToolCallsByMessage(messageId, userId) {
|
||||
* Get tool calls by conversation ID and user
|
||||
* @param {string} conversationId - The conversation ID
|
||||
* @param {string} userId - The user's ObjectId
|
||||
* @returns {Promise<IToolCallData[]>} Array of tool call documents
|
||||
* @returns {Promise<ToolCallData[]>} Array of tool call documents
|
||||
*/
|
||||
async function getToolCallsByConvo(conversationId, userId) {
|
||||
try {
|
||||
@@ -59,8 +57,8 @@ async function getToolCallsByConvo(conversationId, userId) {
|
||||
/**
|
||||
* Update a tool call
|
||||
* @param {string} id - The tool call document ID
|
||||
* @param {Partial<IToolCallData>} updateData - The data to update
|
||||
* @returns {Promise<IToolCallData|null>} The updated tool call document or null if not found
|
||||
* @param {Partial<ToolCallData>} updateData - The data to update
|
||||
* @returns {Promise<ToolCallData|null>} The updated tool call document or null if not found
|
||||
*/
|
||||
async function updateToolCall(id, updateData) {
|
||||
try {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { isEnabled } = require('~/server/utils/handleText');
|
||||
const { transactionSchema } = require('@librechat/data-schemas');
|
||||
const transactionSchema = require('./schema/transaction');
|
||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||
const { logger } = require('~/config');
|
||||
const Balance = require('./Balance');
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { userSchema } = require('@librechat/data-schemas');
|
||||
const userSchema = require('~/models/schema/userSchema');
|
||||
|
||||
const User = mongoose.model('User', userSchema);
|
||||
|
||||
|
||||
@@ -1,127 +0,0 @@
|
||||
const User = require('./User');
|
||||
const Group = require('~/models/Group');
|
||||
|
||||
/**
|
||||
* Retrieve a group by ID and convert the found group document to a plain object.
|
||||
*
|
||||
* @param {string} groupId - The ID of the group to find and return as a plain object.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<Object|null>} A plain object representing the group document, or `null` if no group is found.
|
||||
*/
|
||||
const getGroupById = (groupId, fieldsToSelect = null) => {
|
||||
const query = Group.findById(groupId);
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
return query.lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Search for a single group or multiple groups based on partial data and return them as plain objects.
|
||||
*
|
||||
* @param {Partial<Object>} searchCriteria - The partial data to use for searching groups.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned documents.
|
||||
* @returns {Promise<Object[]>} An array of plain objects representing the group documents.
|
||||
*/
|
||||
const findGroup = (searchCriteria, fieldsToSelect = null) => {
|
||||
const query = Group.find(searchCriteria);
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
return query.lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Update a group with new data without overwriting existing properties.
|
||||
*
|
||||
* @param {string} groupId - The ID of the group to update.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @returns {Promise<Object|null>} The updated group document as a plain object, or `null` if no group is found.
|
||||
*/
|
||||
const updateGroup = (groupId, updateData) => {
|
||||
return Group.findByIdAndUpdate(
|
||||
groupId,
|
||||
{ $set: updateData },
|
||||
{ new: true, runValidators: true },
|
||||
).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new group.
|
||||
*
|
||||
* @param {Object} data - The group data to be created.
|
||||
* @returns {Promise<Object>} The created group document.
|
||||
*/
|
||||
const createGroup = async (data) => {
|
||||
return await Group.create(data);
|
||||
};
|
||||
|
||||
/**
|
||||
* Count the number of group documents in the collection based on the provided filter.
|
||||
*
|
||||
* @param {Object} [filter={}] - The filter to apply when counting the documents.
|
||||
* @returns {Promise<number>} The count of documents that match the filter.
|
||||
*/
|
||||
const countGroups = (filter = {}) => {
|
||||
return Group.countDocuments(filter);
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a group by its unique ID only if no user is assigned to it.
|
||||
*
|
||||
* @param {string} groupId - The ID of the group to delete.
|
||||
* @returns {Promise<{ deletedCount: number, message: string }>} An object indicating the number of deleted documents.
|
||||
*/
|
||||
const deleteGroupById = async (groupId) => {
|
||||
// Check if any users reference the group
|
||||
const userCount = await User.countDocuments({ groups: groupId });
|
||||
if (userCount > 0) {
|
||||
return { deletedCount: 0, message: `Cannot delete group; it is assigned to ${userCount} user(s).` };
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await Group.deleteOne({ _id: groupId });
|
||||
if (result.deletedCount === 0) {
|
||||
return { deletedCount: 0, message: 'No group found with that ID.' };
|
||||
}
|
||||
return { deletedCount: result.deletedCount, message: 'Group was deleted successfully.' };
|
||||
} catch (error) {
|
||||
throw new Error('Error deleting group: ' + error.message);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Override deletion of a group by its unique ID.
|
||||
* This function first removes the group ObjectId from all users' groups arrays,
|
||||
* then proceeds to delete the group document.
|
||||
*
|
||||
* @param {string} groupId - The ID of the group to delete.
|
||||
* @returns {Promise<{ deletedCount: number, message: string }>} An object indicating the deletion result.
|
||||
*/
|
||||
const overrideDeleteGroupById = async (groupId) => {
|
||||
// Remove group references from all users
|
||||
await User.updateMany(
|
||||
{ groups: groupId },
|
||||
{ $pull: { groups: groupId } },
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await Group.deleteOne({ _id: groupId });
|
||||
if (result.deletedCount === 0) {
|
||||
return { deletedCount: 0, message: 'No group found with that ID.' };
|
||||
}
|
||||
return { deletedCount: result.deletedCount, message: 'Group was deleted successfully (override).' };
|
||||
} catch (error) {
|
||||
throw new Error('Error deleting group: ' + error.message);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getGroupById,
|
||||
findGroup,
|
||||
updateGroup,
|
||||
createGroup,
|
||||
countGroups,
|
||||
deleteGroupById,
|
||||
overrideDeleteGroupById,
|
||||
};
|
||||
@@ -40,7 +40,6 @@ const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset')
|
||||
const { createToken, findToken, updateToken, deleteTokens } = require('./Token');
|
||||
const Balance = require('./Balance');
|
||||
const User = require('./User');
|
||||
const Group = require('./Group');
|
||||
const Key = require('./Key');
|
||||
|
||||
module.exports = {
|
||||
@@ -93,7 +92,6 @@ module.exports = {
|
||||
countActiveSessions,
|
||||
|
||||
User,
|
||||
Group,
|
||||
Key,
|
||||
Balance,
|
||||
};
|
||||
|
||||
@@ -4,28 +4,9 @@ const { MeiliSearch } = require('meilisearch');
|
||||
const { cleanUpPrimaryKeyValue } = require('~/lib/utils/misc');
|
||||
const logger = require('~/config/meiliLogger');
|
||||
|
||||
// Environment flags
|
||||
/**
|
||||
* Flag to indicate if search is enabled based on environment variables.
|
||||
* @type {boolean}
|
||||
*/
|
||||
const searchEnabled = process.env.SEARCH && process.env.SEARCH.toLowerCase() === 'true';
|
||||
|
||||
/**
|
||||
* Flag to indicate if MeiliSearch is enabled based on required environment variables.
|
||||
* @type {boolean}
|
||||
*/
|
||||
const meiliEnabled = process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY && searchEnabled;
|
||||
|
||||
/**
|
||||
* Validates the required options for configuring the mongoMeili plugin.
|
||||
*
|
||||
* @param {Object} options - The configuration options.
|
||||
* @param {string} options.host - The MeiliSearch host.
|
||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
||||
* @param {string} options.indexName - The name of the index.
|
||||
* @throws {Error} Throws an error if any required option is missing.
|
||||
*/
|
||||
const validateOptions = function (options) {
|
||||
const requiredKeys = ['host', 'apiKey', 'indexName'];
|
||||
requiredKeys.forEach((key) => {
|
||||
@@ -35,64 +16,53 @@ const validateOptions = function (options) {
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Factory function to create a MeiliMongooseModel class which extends a Mongoose model.
|
||||
* This class contains static and instance methods to synchronize and manage the MeiliSearch index
|
||||
* corresponding to the MongoDB collection.
|
||||
*
|
||||
* @param {Object} config - Configuration object.
|
||||
* @param {Object} config.index - The MeiliSearch index object.
|
||||
* @param {Array<string>} config.attributesToIndex - List of attributes to index.
|
||||
* @returns {Function} A class definition that will be loaded into the Mongoose schema.
|
||||
*/
|
||||
// const createMeiliMongooseModel = function ({ index, indexName, client, attributesToIndex }) {
|
||||
const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
// The primary key is assumed to be the first attribute in the attributesToIndex array.
|
||||
const primaryKey = attributesToIndex[0];
|
||||
|
||||
// MeiliMongooseModel is of type Mongoose.Model
|
||||
class MeiliMongooseModel {
|
||||
/**
|
||||
* Synchronizes the data between the MongoDB collection and the MeiliSearch index.
|
||||
* `syncWithMeili`: synchronizes the data between a MongoDB collection and a MeiliSearch index,
|
||||
* only triggered if there's ever a discrepancy determined by `api\lib\db\indexSync.js`.
|
||||
*
|
||||
* The synchronization process involves:
|
||||
* 1. Fetching all documents from the MongoDB collection and MeiliSearch index.
|
||||
* 2. Comparing documents from both sources.
|
||||
* 3. Deleting documents from MeiliSearch that no longer exist in MongoDB.
|
||||
* 4. Adding documents to MeiliSearch that exist in MongoDB but not in the index.
|
||||
* 5. Updating documents in MeiliSearch if key fields (such as `text` or `title`) differ.
|
||||
* 6. Updating the `_meiliIndex` field in MongoDB to indicate the indexing status.
|
||||
* 1. Fetches all documents from the MongoDB collection and the MeiliSearch index.
|
||||
* 2. Compares the documents from both sources.
|
||||
* 3. If a document exists in MeiliSearch but not in MongoDB, it's deleted from MeiliSearch.
|
||||
* 4. If a document exists in MongoDB but not in MeiliSearch, it's added to MeiliSearch.
|
||||
* 5. If a document exists in both but has different `text` or `title` fields (depending on the `primaryKey`), it's updated in MeiliSearch.
|
||||
* 6. After all operations, it updates the `_meiliIndex` field in MongoDB to indicate whether the document is indexed in MeiliSearch.
|
||||
*
|
||||
* Note: The function processes documents in batches because MeiliSearch's
|
||||
* `index.getDocuments` requires an exact limit and `index.addDocuments` does not handle
|
||||
* partial failures in a batch.
|
||||
* Note: This strategy does not use batch operations for Meilisearch as the `index.addDocuments` will discard
|
||||
* the entire batch if there's an error with one document, and will not throw an error if there's an issue.
|
||||
* Also, `index.getDocuments` needs an exact limit on the amount of documents to return, so we build the map in batches.
|
||||
*
|
||||
* @returns {Promise<void>} Resolves when the synchronization is complete.
|
||||
* @returns {Promise} A promise that resolves when the synchronization is complete.
|
||||
*
|
||||
* @throws {Error} Throws an error if there's an issue with adding a document to MeiliSearch.
|
||||
*/
|
||||
static async syncWithMeili() {
|
||||
try {
|
||||
let moreDocuments = true;
|
||||
// Retrieve all MongoDB documents from the collection as plain JavaScript objects.
|
||||
const mongoDocuments = await this.find().lean();
|
||||
const format = (doc) => _.pick(doc, attributesToIndex);
|
||||
|
||||
// Helper function to format a document by selecting only the attributes to index
|
||||
// and omitting keys starting with '$'.
|
||||
const format = (doc) =>
|
||||
_.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$'));
|
||||
|
||||
// Build a map of MongoDB documents for quick lookup based on the primary key.
|
||||
// Prepare for comparison
|
||||
const mongoMap = new Map(mongoDocuments.map((doc) => [doc[primaryKey], format(doc)]));
|
||||
const indexMap = new Map();
|
||||
let offset = 0;
|
||||
const batchSize = 1000;
|
||||
|
||||
// Fetch documents from the MeiliSearch index in batches.
|
||||
while (moreDocuments) {
|
||||
const batch = await index.getDocuments({ limit: batchSize, offset });
|
||||
|
||||
if (batch.results.length === 0) {
|
||||
moreDocuments = false;
|
||||
}
|
||||
|
||||
for (const doc of batch.results) {
|
||||
indexMap.set(doc[primaryKey], format(doc));
|
||||
}
|
||||
|
||||
offset += batchSize;
|
||||
}
|
||||
|
||||
@@ -100,12 +70,13 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
|
||||
const updateOps = [];
|
||||
|
||||
// Process documents present in the MeiliSearch index.
|
||||
// Iterate over Meili index documents
|
||||
for (const [id, doc] of indexMap) {
|
||||
const update = {};
|
||||
update[primaryKey] = id;
|
||||
if (mongoMap.has(id)) {
|
||||
// If document exists in MongoDB, check for discrepancies in key fields.
|
||||
// Case: Update
|
||||
// If document also exists in MongoDB, would be update case
|
||||
if (
|
||||
(doc.text && doc.text !== mongoMap.get(id).text) ||
|
||||
(doc.title && doc.title !== mongoMap.get(id).title)
|
||||
@@ -121,7 +92,8 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
await index.addDocuments([doc]);
|
||||
}
|
||||
} else {
|
||||
// If the document does not exist in MongoDB, delete it from MeiliSearch.
|
||||
// Case: Delete
|
||||
// If document does not exist in MongoDB, its a delete case from meili index
|
||||
await index.deleteDocument(id);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: false } } },
|
||||
@@ -129,25 +101,24 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
}
|
||||
}
|
||||
|
||||
// Process documents present in MongoDB.
|
||||
// Iterate over MongoDB documents
|
||||
for (const [id, doc] of mongoMap) {
|
||||
const update = {};
|
||||
update[primaryKey] = id;
|
||||
// If the document is missing in the Meili index, add it.
|
||||
// Case: Insert
|
||||
// If document does not exist in Meili Index, Its an insert case
|
||||
if (!indexMap.has(id)) {
|
||||
await index.addDocuments([doc]);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
} else if (doc._meiliIndex === false) {
|
||||
// If the document exists but is marked as not indexed, update the flag.
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Execute bulk update operations in MongoDB to update the _meiliIndex flags.
|
||||
if (updateOps.length > 0) {
|
||||
await this.collection.bulkWrite(updateOps);
|
||||
logger.debug(
|
||||
@@ -161,47 +132,34 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates settings for the MeiliSearch index.
|
||||
*
|
||||
* @param {Object} settings - The settings to update on the MeiliSearch index.
|
||||
* @returns {Promise<Object>} Promise resolving to the update result.
|
||||
*/
|
||||
// Set one or more settings of the meili index
|
||||
static async setMeiliIndexSettings(settings) {
|
||||
return await index.updateSettings(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the MeiliSearch index and optionally populates the results with data from MongoDB.
|
||||
*
|
||||
* @param {string} q - The search query.
|
||||
* @param {Object} params - Additional search parameters for MeiliSearch.
|
||||
* @param {boolean} populate - Whether to populate search hits with full MongoDB documents.
|
||||
* @returns {Promise<Object>} The search results with populated hits if requested.
|
||||
*/
|
||||
// Search the index
|
||||
static async meiliSearch(q, params, populate) {
|
||||
const data = await index.search(q, params);
|
||||
|
||||
// Populate hits with content from mongodb
|
||||
if (populate) {
|
||||
// Build a query using the primary key values from the search hits.
|
||||
// Find objects into mongodb matching `objectID` from Meili search
|
||||
const query = {};
|
||||
// query[primaryKey] = { $in: _.map(data.hits, primaryKey) };
|
||||
query[primaryKey] = _.map(data.hits, (hit) => cleanUpPrimaryKeyValue(hit[primaryKey]));
|
||||
// logger.debug('query', query);
|
||||
const hitsFromMongoose = await this.find(
|
||||
query,
|
||||
_.reduce(
|
||||
this.schema.obj,
|
||||
function (results, value, key) {
|
||||
return { ...results, [key]: 1 };
|
||||
},
|
||||
{ _id: 1, __v: 1 },
|
||||
),
|
||||
).lean();
|
||||
|
||||
// Build a projection object, including only keys that do not start with '$'.
|
||||
const projection = Object.keys(this.schema.obj).reduce(
|
||||
(results, key) => {
|
||||
if (!key.startsWith('$')) {
|
||||
results[key] = 1;
|
||||
}
|
||||
return results;
|
||||
},
|
||||
{ _id: 1, __v: 1 },
|
||||
);
|
||||
|
||||
// Retrieve the full documents from MongoDB.
|
||||
const hitsFromMongoose = await this.find(query, projection).lean();
|
||||
|
||||
// Merge the MongoDB documents with the search hits.
|
||||
// Add additional data from mongodb into Meili search hits
|
||||
const populatedHits = data.hits.map(function (hit) {
|
||||
const query = {};
|
||||
query[primaryKey] = hit[primaryKey];
|
||||
@@ -218,21 +176,10 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocesses the current document for indexing.
|
||||
*
|
||||
* This method:
|
||||
* - Picks only the defined attributes to index.
|
||||
* - Omits any keys starting with '$'.
|
||||
* - Replaces pipe characters ('|') in `conversationId` with '--'.
|
||||
* - Extracts and concatenates text from an array of content items.
|
||||
*
|
||||
* @returns {Object} The preprocessed object ready for indexing.
|
||||
*/
|
||||
preprocessObjectForIndex() {
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
const object = _.pick(this.toJSON(), attributesToIndex);
|
||||
// NOTE: MeiliSearch does not allow | in primary key, so we replace it with - for Bing convoIds
|
||||
// object.conversationId = object.conversationId.replace(/\|/g, '-');
|
||||
if (object.conversationId && object.conversationId.includes('|')) {
|
||||
object.conversationId = object.conversationId.replace(/\|/g, '--');
|
||||
}
|
||||
@@ -248,53 +195,32 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
return object;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the current document to the MeiliSearch index.
|
||||
*
|
||||
* The method preprocesses the document, adds it to MeiliSearch, and then updates
|
||||
* the MongoDB document's `_meiliIndex` flag to true.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
// Push new document to Meili
|
||||
async addObjectToMeili() {
|
||||
const object = this.preprocessObjectForIndex();
|
||||
try {
|
||||
// logger.debug('Adding document to Meili', object);
|
||||
await index.addDocuments([object]);
|
||||
} catch (error) {
|
||||
// Error handling can be enhanced as needed.
|
||||
logger.error('[addObjectToMeili] Error adding document to Meili', error);
|
||||
// logger.debug('Error adding document to Meili');
|
||||
// logger.error(error);
|
||||
}
|
||||
|
||||
await this.collection.updateMany({ _id: this._id }, { $set: { _meiliIndex: true } });
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the current document in the MeiliSearch index.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
// Update an existing document in Meili
|
||||
async updateObjectToMeili() {
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
const object = _.pick(this.toJSON(), attributesToIndex);
|
||||
await index.updateDocuments([object]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the current document from the MeiliSearch index.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
// Delete a document from Meili
|
||||
async deleteObjectFromMeili() {
|
||||
await index.deleteDocument(this._id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-save hook to synchronize the document with MeiliSearch.
|
||||
*
|
||||
* If the document is already indexed (i.e. `_meiliIndex` is true), it updates it;
|
||||
* otherwise, it adds the document to the index.
|
||||
*/
|
||||
// * schema.post('save')
|
||||
postSaveHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili();
|
||||
@@ -303,24 +229,14 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-update hook to update the document in MeiliSearch.
|
||||
*
|
||||
* This hook is triggered after a document update, ensuring that changes are
|
||||
* propagated to the MeiliSearch index if the document is indexed.
|
||||
*/
|
||||
// * schema.post('update')
|
||||
postUpdateHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-remove hook to delete the document from MeiliSearch.
|
||||
*
|
||||
* This hook is triggered after a document is removed, ensuring that the document
|
||||
* is also removed from the MeiliSearch index if it was previously indexed.
|
||||
*/
|
||||
// * schema.post('remove')
|
||||
postRemoveHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.deleteObjectFromMeili();
|
||||
@@ -331,27 +247,11 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
return MeiliMongooseModel;
|
||||
};
|
||||
|
||||
/**
|
||||
* Mongoose plugin to synchronize MongoDB collections with a MeiliSearch index.
|
||||
*
|
||||
* This plugin:
|
||||
* - Validates the provided options.
|
||||
* - Adds a `_meiliIndex` field to the schema to track indexing status.
|
||||
* - Sets up a MeiliSearch client and creates an index if it doesn't already exist.
|
||||
* - Loads class methods for syncing, searching, and managing documents in MeiliSearch.
|
||||
* - Registers Mongoose hooks (post-save, post-update, post-remove, etc.) to maintain index consistency.
|
||||
*
|
||||
* @param {mongoose.Schema} schema - The Mongoose schema to which the plugin is applied.
|
||||
* @param {Object} options - Configuration options.
|
||||
* @param {string} options.host - The MeiliSearch host.
|
||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
||||
* @param {string} options.indexName - The name of the MeiliSearch index.
|
||||
* @param {string} options.primaryKey - The primary key field for indexing.
|
||||
*/
|
||||
module.exports = function mongoMeili(schema, options) {
|
||||
// Vaidate Options for mongoMeili
|
||||
validateOptions(options);
|
||||
|
||||
// Add _meiliIndex field to the schema to track if a document has been indexed in MeiliSearch.
|
||||
// Add meiliIndex to schema
|
||||
schema.add({
|
||||
_meiliIndex: {
|
||||
type: Boolean,
|
||||
@@ -363,77 +263,69 @@ module.exports = function mongoMeili(schema, options) {
|
||||
|
||||
const { host, apiKey, indexName, primaryKey } = options;
|
||||
|
||||
// Setup the MeiliSearch client.
|
||||
// Setup MeiliSearch Client
|
||||
const client = new MeiliSearch({ host, apiKey });
|
||||
|
||||
// Create the index asynchronously if it doesn't exist.
|
||||
// Asynchronously create the index
|
||||
client.createIndex(indexName, { primaryKey });
|
||||
|
||||
// Setup the MeiliSearch index for this schema.
|
||||
// Setup the index to search for this schema
|
||||
const index = client.index(indexName);
|
||||
|
||||
// Collect attributes from the schema that should be indexed.
|
||||
const attributesToIndex = [
|
||||
..._.reduce(
|
||||
schema.obj,
|
||||
function (results, value, key) {
|
||||
return value.meiliIndex ? [...results, key] : results;
|
||||
// }, []), '_id'];
|
||||
},
|
||||
[],
|
||||
),
|
||||
];
|
||||
|
||||
// Load the class methods into the schema.
|
||||
schema.loadClass(createMeiliMongooseModel({ index, indexName, client, attributesToIndex }));
|
||||
|
||||
// Register Mongoose hooks to synchronize with MeiliSearch.
|
||||
|
||||
// Post-save: synchronize after a document is saved.
|
||||
// Register hooks
|
||||
schema.post('save', function (doc) {
|
||||
doc.postSaveHook();
|
||||
});
|
||||
|
||||
// Post-update: synchronize after a document is updated.
|
||||
schema.post('update', function (doc) {
|
||||
doc.postUpdateHook();
|
||||
});
|
||||
|
||||
// Post-remove: synchronize after a document is removed.
|
||||
schema.post('remove', function (doc) {
|
||||
doc.postRemoveHook();
|
||||
});
|
||||
|
||||
// Pre-deleteMany hook: remove corresponding documents from MeiliSearch when multiple documents are deleted.
|
||||
schema.pre('deleteMany', async function (next) {
|
||||
if (!meiliEnabled) {
|
||||
return next();
|
||||
next();
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if the schema has a "messages" field to determine if it's a conversation schema.
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messages')) {
|
||||
const convoIndex = client.index('convos');
|
||||
const deletedConvos = await mongoose.model('Conversation').find(this._conditions).lean();
|
||||
const promises = deletedConvos.map((convo) =>
|
||||
convoIndex.deleteDocument(convo.conversationId),
|
||||
);
|
||||
let promises = [];
|
||||
for (const convo of deletedConvos) {
|
||||
promises.push(convoIndex.deleteDocument(convo.conversationId));
|
||||
}
|
||||
await Promise.all(promises);
|
||||
}
|
||||
|
||||
// Check if the schema has a "messageId" field to determine if it's a message schema.
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messageId')) {
|
||||
const messageIndex = client.index('messages');
|
||||
const deletedMessages = await mongoose.model('Message').find(this._conditions).lean();
|
||||
const promises = deletedMessages.map((message) =>
|
||||
messageIndex.deleteDocument(message.messageId),
|
||||
);
|
||||
let promises = [];
|
||||
for (const message of deletedMessages) {
|
||||
promises.push(messageIndex.deleteDocument(message.messageId));
|
||||
}
|
||||
await Promise.all(promises);
|
||||
}
|
||||
return next();
|
||||
} catch (error) {
|
||||
if (meiliEnabled) {
|
||||
logger.error(
|
||||
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion. Next startup may be slow due to syncing.',
|
||||
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion, next startup may be slow due to syncing',
|
||||
error,
|
||||
);
|
||||
}
|
||||
@@ -441,19 +333,17 @@ module.exports = function mongoMeili(schema, options) {
|
||||
}
|
||||
});
|
||||
|
||||
// Post-findOneAndUpdate hook: update MeiliSearch index after a document is updated via findOneAndUpdate.
|
||||
schema.post('findOneAndUpdate', async function (doc) {
|
||||
if (!meiliEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the document is unfinished, do not update the index.
|
||||
if (doc.unfinished) {
|
||||
return;
|
||||
}
|
||||
|
||||
let meiliDoc;
|
||||
// For conversation documents, try to fetch the document from the "convos" index.
|
||||
// Doc is a Conversation
|
||||
if (doc.messages) {
|
||||
try {
|
||||
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
|
||||
@@ -466,12 +356,10 @@ module.exports = function mongoMeili(schema, options) {
|
||||
}
|
||||
}
|
||||
|
||||
// If the MeiliSearch document exists and the title is unchanged, do nothing.
|
||||
if (meiliDoc && meiliDoc.title === doc.title) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, trigger a post-save hook to synchronize the document.
|
||||
doc.postSaveHook();
|
||||
});
|
||||
};
|
||||
|
||||
60
api/models/schema/action.js
Normal file
60
api/models/schema/action.js
Normal file
@@ -0,0 +1,60 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const AuthSchema = new Schema(
|
||||
{
|
||||
authorization_type: String,
|
||||
custom_auth_header: String,
|
||||
type: {
|
||||
type: String,
|
||||
enum: ['service_http', 'oauth', 'none'],
|
||||
},
|
||||
authorization_content_type: String,
|
||||
authorization_url: String,
|
||||
client_url: String,
|
||||
scope: String,
|
||||
token_exchange_method: {
|
||||
type: String,
|
||||
enum: ['default_post', 'basic_auth_header', null],
|
||||
},
|
||||
},
|
||||
{ _id: false },
|
||||
);
|
||||
|
||||
const actionSchema = new Schema({
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
action_id: {
|
||||
type: String,
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
default: 'action_prototype',
|
||||
},
|
||||
settings: Schema.Types.Mixed,
|
||||
agent_id: String,
|
||||
assistant_id: String,
|
||||
metadata: {
|
||||
api_key: String, // private, encrypted
|
||||
auth: AuthSchema,
|
||||
domain: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
// json_schema: Schema.Types.Mixed,
|
||||
privacy_policy_url: String,
|
||||
raw_spec: String,
|
||||
oauth_client_id: String, // private, encrypted
|
||||
oauth_client_secret: String, // private, encrypted
|
||||
},
|
||||
});
|
||||
// }, { minimize: false }); // Prevent removal of empty objects
|
||||
|
||||
module.exports = actionSchema;
|
||||
@@ -1,34 +1,6 @@
|
||||
import { Schema, Document, Types } from 'mongoose';
|
||||
export interface IAgent extends Omit<Document, 'model'> {
|
||||
id: string;
|
||||
name?: string;
|
||||
description?: string;
|
||||
instructions?: string;
|
||||
avatar?: {
|
||||
filepath: string;
|
||||
source: string;
|
||||
};
|
||||
provider: string;
|
||||
model: string;
|
||||
model_parameters?: Record<string, unknown>;
|
||||
artifacts?: string;
|
||||
access_level?: number;
|
||||
recursion_limit?: number;
|
||||
tools?: string[];
|
||||
tool_kwargs?: Array<unknown>;
|
||||
actions?: string[];
|
||||
author: Types.ObjectId;
|
||||
authorName?: string;
|
||||
hide_sequential_outputs?: boolean;
|
||||
end_after_tools?: boolean;
|
||||
agent_ids?: string[];
|
||||
isCollaborative?: boolean;
|
||||
conversation_starters?: string[];
|
||||
tool_resources?: unknown;
|
||||
projectIds?: Types.ObjectId[];
|
||||
}
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const agentSchema = new Schema<IAgent>(
|
||||
const agentSchema = mongoose.Schema(
|
||||
{
|
||||
id: {
|
||||
type: String,
|
||||
@@ -46,7 +18,10 @@ const agentSchema = new Schema<IAgent>(
|
||||
type: String,
|
||||
},
|
||||
avatar: {
|
||||
type: Schema.Types.Mixed,
|
||||
type: {
|
||||
filepath: String,
|
||||
source: String,
|
||||
},
|
||||
default: undefined,
|
||||
},
|
||||
provider: {
|
||||
@@ -60,28 +35,22 @@ const agentSchema = new Schema<IAgent>(
|
||||
model_parameters: {
|
||||
type: Object,
|
||||
},
|
||||
artifacts: {
|
||||
type: String,
|
||||
},
|
||||
access_level: {
|
||||
type: Number,
|
||||
},
|
||||
recursion_limit: {
|
||||
type: Number,
|
||||
},
|
||||
tools: {
|
||||
type: [String],
|
||||
default: undefined,
|
||||
},
|
||||
tool_kwargs: {
|
||||
type: [{ type: Schema.Types.Mixed }],
|
||||
type: [{ type: mongoose.Schema.Types.Mixed }],
|
||||
},
|
||||
actions: {
|
||||
type: [String],
|
||||
default: undefined,
|
||||
},
|
||||
author: {
|
||||
type: Schema.Types.ObjectId,
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
@@ -107,11 +76,11 @@ const agentSchema = new Schema<IAgent>(
|
||||
default: [],
|
||||
},
|
||||
tool_resources: {
|
||||
type: Schema.Types.Mixed,
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
default: {},
|
||||
},
|
||||
projectIds: {
|
||||
type: [Schema.Types.ObjectId],
|
||||
type: [mongoose.Schema.Types.ObjectId],
|
||||
ref: 'Project',
|
||||
index: true,
|
||||
},
|
||||
@@ -121,4 +90,4 @@ const agentSchema = new Schema<IAgent>(
|
||||
},
|
||||
);
|
||||
|
||||
export default agentSchema;
|
||||
module.exports = agentSchema;
|
||||
@@ -1,23 +1,9 @@
|
||||
import { Schema, Document, Types } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
export interface IAssistant extends Document {
|
||||
user: Types.ObjectId;
|
||||
assistant_id: string;
|
||||
avatar?: {
|
||||
filepath: string;
|
||||
source: string;
|
||||
};
|
||||
conversation_starters?: string[];
|
||||
access_level?: number;
|
||||
file_ids?: string[];
|
||||
actions?: string[];
|
||||
append_current_datetime?: boolean;
|
||||
}
|
||||
|
||||
const assistantSchema = new Schema<IAssistant>(
|
||||
const assistantSchema = mongoose.Schema(
|
||||
{
|
||||
user: {
|
||||
type: Schema.Types.ObjectId,
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
@@ -27,7 +13,10 @@ const assistantSchema = new Schema<IAssistant>(
|
||||
required: true,
|
||||
},
|
||||
avatar: {
|
||||
type: Schema.Types.Mixed,
|
||||
type: {
|
||||
filepath: String,
|
||||
source: String,
|
||||
},
|
||||
default: undefined,
|
||||
},
|
||||
conversation_starters: {
|
||||
@@ -49,4 +38,4 @@ const assistantSchema = new Schema<IAssistant>(
|
||||
},
|
||||
);
|
||||
|
||||
export default assistantSchema;
|
||||
module.exports = assistantSchema;
|
||||
17
api/models/schema/balance.js
Normal file
17
api/models/schema/balance.js
Normal file
@@ -0,0 +1,17 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const balanceSchema = mongoose.Schema({
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
// 1000 tokenCredits = 1 mill ($0.001 USD)
|
||||
tokenCredits: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
});
|
||||
|
||||
module.exports = balanceSchema;
|
||||
@@ -1,15 +1,6 @@
|
||||
import { Schema, Document } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
export interface IBanner extends Document {
|
||||
bannerId: string;
|
||||
message: string;
|
||||
displayFrom: Date;
|
||||
displayTo?: Date;
|
||||
type: 'banner' | 'popup';
|
||||
isPublic: boolean;
|
||||
}
|
||||
|
||||
const bannerSchema = new Schema<IBanner>(
|
||||
const bannerSchema = mongoose.Schema(
|
||||
{
|
||||
bannerId: {
|
||||
type: String,
|
||||
@@ -37,7 +28,9 @@ const bannerSchema = new Schema<IBanner>(
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
export default bannerSchema;
|
||||
const Banner = mongoose.model('Banner', bannerSchema);
|
||||
module.exports = Banner;
|
||||
19
api/models/schema/categories.js
Normal file
19
api/models/schema/categories.js
Normal file
@@ -0,0 +1,19 @@
|
||||
const mongoose = require('mongoose');
|
||||
const Schema = mongoose.Schema;
|
||||
|
||||
const categoriesSchema = new Schema({
|
||||
label: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
value: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
});
|
||||
|
||||
const categories = mongoose.model('categories', categoriesSchema);
|
||||
|
||||
module.exports = { Categories: categories };
|
||||
32
api/models/schema/conversationTagSchema.js
Normal file
32
api/models/schema/conversationTagSchema.js
Normal file
@@ -0,0 +1,32 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const conversationTagSchema = mongoose.Schema(
|
||||
{
|
||||
tag: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
description: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
count: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
position: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
index: true,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
conversationTagSchema.index({ tag: 1, user: 1 }, { unique: true });
|
||||
|
||||
module.exports = mongoose.model('ConversationTag', conversationTagSchema);
|
||||
@@ -1,18 +1,63 @@
|
||||
const mongoose = require('mongoose');
|
||||
const mongoMeili = require('../plugins/mongoMeili');
|
||||
|
||||
const { convoSchema } = require('@librechat/data-schemas');
|
||||
const { conversationPreset } = require('./defaults');
|
||||
const convoSchema = mongoose.Schema(
|
||||
{
|
||||
conversationId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
required: true,
|
||||
index: true,
|
||||
meiliIndex: true,
|
||||
},
|
||||
title: {
|
||||
type: String,
|
||||
default: 'New Chat',
|
||||
meiliIndex: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }],
|
||||
// google only
|
||||
examples: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
agentOptions: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
},
|
||||
...conversationPreset,
|
||||
agent_id: {
|
||||
type: String,
|
||||
},
|
||||
tags: {
|
||||
type: [String],
|
||||
default: [],
|
||||
meiliIndex: true,
|
||||
},
|
||||
files: {
|
||||
type: [String],
|
||||
},
|
||||
expiredAt: {
|
||||
type: Date,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
convoSchema.plugin(mongoMeili, {
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
/** Note: Will get created automatically if it doesn't exist already */
|
||||
indexName: 'convos',
|
||||
indexName: 'convos', // Will get created automatically if it doesn't exist already
|
||||
primaryKey: 'conversationId',
|
||||
});
|
||||
}
|
||||
|
||||
// Create TTL index
|
||||
convoSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
||||
convoSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
convoSchema.index({ conversationId: 1, user: 1 }, { unique: true });
|
||||
|
||||
const Conversation = mongoose.models.Conversation || mongoose.model('Conversation', convoSchema);
|
||||
|
||||
module.exports = Conversation;
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import { Schema } from 'mongoose';
|
||||
|
||||
// @ts-ignore
|
||||
export const conversationPreset = {
|
||||
const conversationPreset = {
|
||||
// endpoint: [azureOpenAI, openAI, anthropic, chatGPTBrowser]
|
||||
endpoint: {
|
||||
type: String,
|
||||
@@ -27,7 +24,6 @@ export const conversationPreset = {
|
||||
required: false,
|
||||
},
|
||||
// for google only
|
||||
examples: { type: [{ type: Schema.Types.Mixed }], default: undefined },
|
||||
modelLabel: {
|
||||
type: String,
|
||||
required: false,
|
||||
@@ -57,10 +53,6 @@ export const conversationPreset = {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
maxTokens: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
presence_penalty: {
|
||||
type: Number,
|
||||
required: false,
|
||||
@@ -78,12 +70,6 @@ export const conversationPreset = {
|
||||
promptCache: {
|
||||
type: Boolean,
|
||||
},
|
||||
thinking: {
|
||||
type: Boolean,
|
||||
},
|
||||
thinkingBudget: {
|
||||
type: Number,
|
||||
},
|
||||
system: {
|
||||
type: String,
|
||||
},
|
||||
@@ -136,3 +122,57 @@ export const conversationPreset = {
|
||||
type: String,
|
||||
},
|
||||
};
|
||||
|
||||
const agentOptions = {
|
||||
model: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
// for azureOpenAI, openAI only
|
||||
chatGptLabel: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
modelLabel: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
promptPrefix: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
temperature: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
top_p: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
// for google only
|
||||
topP: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
topK: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
maxOutputTokens: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
presence_penalty: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
frequency_penalty: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
conversationPreset,
|
||||
agentOptions,
|
||||
};
|
||||
111
api/models/schema/fileSchema.js
Normal file
111
api/models/schema/fileSchema.js
Normal file
@@ -0,0 +1,111 @@
|
||||
const { FileSources } = require('librechat-data-provider');
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoFile
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {number} [__v] - MongoDB Version Key
|
||||
* @property {ObjectId} user - User ID
|
||||
* @property {string} [conversationId] - Optional conversation ID
|
||||
* @property {string} file_id - File identifier
|
||||
* @property {string} [temp_file_id] - Temporary File identifier
|
||||
* @property {number} bytes - Size of the file in bytes
|
||||
* @property {string} filename - Name of the file
|
||||
* @property {string} filepath - Location of the file
|
||||
* @property {'file'} object - Type of object, always 'file'
|
||||
* @property {string} type - Type of file
|
||||
* @property {number} [usage=0] - Number of uses of the file
|
||||
* @property {string} [context] - Context of the file origin
|
||||
* @property {boolean} [embedded=false] - Whether or not the file is embedded in vector db
|
||||
* @property {string} [model] - The model to identify the group region of the file (for Azure OpenAI hosting)
|
||||
* @property {string} [source] - The source of the file (e.g., from FileSources)
|
||||
* @property {number} [width] - Optional width of the file
|
||||
* @property {number} [height] - Optional height of the file
|
||||
* @property {Object} [metadata] - Metadata related to the file
|
||||
* @property {string} [metadata.fileIdentifier] - Unique identifier for the file in metadata
|
||||
* @property {Date} [expiresAt] - Optional expiration date of the file
|
||||
* @property {Date} [createdAt] - Date when the file was created
|
||||
* @property {Date} [updatedAt] - Date when the file was updated
|
||||
*/
|
||||
|
||||
/** @type {MongooseSchema<MongoFile>} */
|
||||
const fileSchema = mongoose.Schema(
|
||||
{
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
conversationId: {
|
||||
type: String,
|
||||
ref: 'Conversation',
|
||||
index: true,
|
||||
},
|
||||
file_id: {
|
||||
type: String,
|
||||
// required: true,
|
||||
index: true,
|
||||
},
|
||||
temp_file_id: {
|
||||
type: String,
|
||||
// required: true,
|
||||
},
|
||||
bytes: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
filename: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
filepath: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
object: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: 'file',
|
||||
},
|
||||
embedded: {
|
||||
type: Boolean,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
context: {
|
||||
type: String,
|
||||
// required: true,
|
||||
},
|
||||
usage: {
|
||||
type: Number,
|
||||
required: true,
|
||||
default: 0,
|
||||
},
|
||||
source: {
|
||||
type: String,
|
||||
default: FileSources.local,
|
||||
},
|
||||
model: {
|
||||
type: String,
|
||||
},
|
||||
width: Number,
|
||||
height: Number,
|
||||
metadata: {
|
||||
fileIdentifier: String,
|
||||
},
|
||||
expiresAt: {
|
||||
type: Date,
|
||||
expires: 3600, // 1 hour in seconds
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
fileSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
|
||||
module.exports = fileSchema;
|
||||
@@ -1,13 +1,6 @@
|
||||
import mongoose, { Schema, Document, Types } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
export interface IKey extends Document {
|
||||
userId: Types.ObjectId;
|
||||
name: string;
|
||||
value: string;
|
||||
expiresAt?: Date;
|
||||
}
|
||||
|
||||
const keySchema: Schema<IKey> = new Schema({
|
||||
const keySchema = mongoose.Schema({
|
||||
userId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
@@ -28,4 +21,4 @@ const keySchema: Schema<IKey> = new Schema({
|
||||
|
||||
keySchema.index({ expiresAt: 1 }, { expireAfterSeconds: 0 });
|
||||
|
||||
export default keySchema;
|
||||
module.exports = keySchema;
|
||||
@@ -1,6 +1,145 @@
|
||||
const mongoose = require('mongoose');
|
||||
const mongoMeili = require('~/models/plugins/mongoMeili');
|
||||
const { messageSchema } = require('@librechat/data-schemas');
|
||||
const messageSchema = mongoose.Schema(
|
||||
{
|
||||
messageId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
required: true,
|
||||
index: true,
|
||||
meiliIndex: true,
|
||||
},
|
||||
conversationId: {
|
||||
type: String,
|
||||
index: true,
|
||||
required: true,
|
||||
meiliIndex: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
index: true,
|
||||
required: true,
|
||||
default: null,
|
||||
},
|
||||
model: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
endpoint: {
|
||||
type: String,
|
||||
},
|
||||
conversationSignature: {
|
||||
type: String,
|
||||
},
|
||||
clientId: {
|
||||
type: String,
|
||||
},
|
||||
invocationId: {
|
||||
type: Number,
|
||||
},
|
||||
parentMessageId: {
|
||||
type: String,
|
||||
},
|
||||
tokenCount: {
|
||||
type: Number,
|
||||
},
|
||||
summaryTokenCount: {
|
||||
type: Number,
|
||||
},
|
||||
sender: {
|
||||
type: String,
|
||||
meiliIndex: true,
|
||||
},
|
||||
text: {
|
||||
type: String,
|
||||
meiliIndex: true,
|
||||
},
|
||||
summary: {
|
||||
type: String,
|
||||
},
|
||||
isCreatedByUser: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
default: false,
|
||||
},
|
||||
unfinished: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
error: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
finish_reason: {
|
||||
type: String,
|
||||
},
|
||||
_meiliIndex: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
select: false,
|
||||
default: false,
|
||||
},
|
||||
files: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
plugin: {
|
||||
type: {
|
||||
latest: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
inputs: {
|
||||
type: [mongoose.Schema.Types.Mixed],
|
||||
required: false,
|
||||
default: undefined,
|
||||
},
|
||||
outputs: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
default: undefined,
|
||||
},
|
||||
plugins: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
content: {
|
||||
type: [{ type: mongoose.Schema.Types.Mixed }],
|
||||
default: undefined,
|
||||
meiliIndex: true,
|
||||
},
|
||||
thread_id: {
|
||||
type: String,
|
||||
},
|
||||
/* frontend components */
|
||||
iconURL: {
|
||||
type: String,
|
||||
},
|
||||
attachments: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
/*
|
||||
attachments: {
|
||||
type: [
|
||||
{
|
||||
file_id: String,
|
||||
filename: String,
|
||||
filepath: String,
|
||||
expiresAt: Date,
|
||||
width: Number,
|
||||
height: Number,
|
||||
type: String,
|
||||
conversationId: String,
|
||||
messageId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
toolCallId: String,
|
||||
},
|
||||
],
|
||||
default: undefined,
|
||||
},
|
||||
*/
|
||||
expiredAt: {
|
||||
type: Date,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
messageSchema.plugin(mongoMeili, {
|
||||
@@ -10,7 +149,11 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
primaryKey: 'messageId',
|
||||
});
|
||||
}
|
||||
messageSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
||||
messageSchema.index({ createdAt: 1 });
|
||||
messageSchema.index({ messageId: 1, user: 1 }, { unique: true });
|
||||
|
||||
/** @type {mongoose.Model<TMessage>} */
|
||||
const Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
|
||||
|
||||
module.exports = Message;
|
||||
|
||||
@@ -1,5 +1,25 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { pluginAuthSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const pluginAuthSchema = mongoose.Schema(
|
||||
{
|
||||
authField: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
value: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
userId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
pluginKey: {
|
||||
type: String,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
const PluginAuth = mongoose.models.Plugin || mongoose.model('PluginAuth', pluginAuthSchema);
|
||||
|
||||
|
||||
@@ -1,5 +1,38 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { presetSchema } = require('@librechat/data-schemas');
|
||||
const { conversationPreset } = require('./defaults');
|
||||
const presetSchema = mongoose.Schema(
|
||||
{
|
||||
presetId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
title: {
|
||||
type: String,
|
||||
default: 'New Chat',
|
||||
meiliIndex: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
defaultPreset: {
|
||||
type: Boolean,
|
||||
},
|
||||
order: {
|
||||
type: Number,
|
||||
},
|
||||
// google only
|
||||
examples: [{ type: mongoose.Schema.Types.Mixed }],
|
||||
...conversationPreset,
|
||||
agentOptions: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
default: null,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
const Preset = mongoose.models.Preset || mongoose.model('Preset', presetSchema);
|
||||
|
||||
|
||||
35
api/models/schema/projectSchema.js
Normal file
35
api/models/schema/projectSchema.js
Normal file
@@ -0,0 +1,35 @@
|
||||
const { Schema } = require('mongoose');
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoProject
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {string} name - The name of the project
|
||||
* @property {ObjectId[]} promptGroupIds - Array of PromptGroup IDs associated with the project
|
||||
* @property {Date} [createdAt] - Date when the project was created (added by timestamps)
|
||||
* @property {Date} [updatedAt] - Date when the project was last updated (added by timestamps)
|
||||
*/
|
||||
|
||||
const projectSchema = new Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
promptGroupIds: {
|
||||
type: [Schema.Types.ObjectId],
|
||||
ref: 'PromptGroup',
|
||||
default: [],
|
||||
},
|
||||
agentIds: {
|
||||
type: [String],
|
||||
ref: 'Agent',
|
||||
default: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = projectSchema;
|
||||
118
api/models/schema/promptSchema.js
Normal file
118
api/models/schema/promptSchema.js
Normal file
@@ -0,0 +1,118 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const Schema = mongoose.Schema;
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoPromptGroup
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {string} name - The name of the prompt group
|
||||
* @property {ObjectId} author - The author of the prompt group
|
||||
* @property {ObjectId} [projectId=null] - The project ID of the prompt group
|
||||
* @property {ObjectId} [productionId=null] - The project ID of the prompt group
|
||||
* @property {string} authorName - The name of the author of the prompt group
|
||||
* @property {number} [numberOfGenerations=0] - Number of generations the prompt group has
|
||||
* @property {string} [oneliner=''] - Oneliner description of the prompt group
|
||||
* @property {string} [category=''] - Category of the prompt group
|
||||
* @property {string} [command] - Command for the prompt group
|
||||
* @property {Date} [createdAt] - Date when the prompt group was created (added by timestamps)
|
||||
* @property {Date} [updatedAt] - Date when the prompt group was last updated (added by timestamps)
|
||||
*/
|
||||
|
||||
const promptGroupSchema = new Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
numberOfGenerations: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
oneliner: {
|
||||
type: String,
|
||||
default: '',
|
||||
},
|
||||
category: {
|
||||
type: String,
|
||||
default: '',
|
||||
index: true,
|
||||
},
|
||||
projectIds: {
|
||||
type: [Schema.Types.ObjectId],
|
||||
ref: 'Project',
|
||||
index: true,
|
||||
},
|
||||
productionId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Prompt',
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
author: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
authorName: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
command: {
|
||||
type: String,
|
||||
index: true,
|
||||
validate: {
|
||||
validator: function (v) {
|
||||
return v === undefined || v === null || v === '' || /^[a-z0-9-]+$/.test(v);
|
||||
},
|
||||
message: (props) =>
|
||||
`${props.value} is not a valid command. Only lowercase alphanumeric characters and highfins (') are allowed.`,
|
||||
},
|
||||
maxlength: [
|
||||
Constants.COMMANDS_MAX_LENGTH,
|
||||
`Command cannot be longer than ${Constants.COMMANDS_MAX_LENGTH} characters`,
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
|
||||
|
||||
const promptSchema = new Schema(
|
||||
{
|
||||
groupId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'PromptGroup',
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
author: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
prompt: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
enum: ['text', 'chat'],
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
const Prompt = mongoose.model('Prompt', promptSchema);
|
||||
|
||||
promptSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
promptGroupSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
|
||||
module.exports = { Prompt, PromptGroup };
|
||||
55
api/models/schema/roleSchema.js
Normal file
55
api/models/schema/roleSchema.js
Normal file
@@ -0,0 +1,55 @@
|
||||
const { PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const roleSchema = new mongoose.Schema({
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
index: true,
|
||||
},
|
||||
[PermissionTypes.BOOKMARKS]: {
|
||||
[Permissions.USE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
[Permissions.SHARED_GLOBAL]: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
[Permissions.USE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
[Permissions.CREATE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
[PermissionTypes.AGENTS]: {
|
||||
[Permissions.SHARED_GLOBAL]: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
[Permissions.USE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
[Permissions.CREATE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: {
|
||||
[Permissions.USE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const Role = mongoose.model('Role', roleSchema);
|
||||
|
||||
module.exports = Role;
|
||||
20
api/models/schema/session.js
Normal file
20
api/models/schema/session.js
Normal file
@@ -0,0 +1,20 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const sessionSchema = mongoose.Schema({
|
||||
refreshTokenHash: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
expiration: {
|
||||
type: Date,
|
||||
required: true,
|
||||
expires: 0,
|
||||
},
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
});
|
||||
|
||||
module.exports = sessionSchema;
|
||||
@@ -1,17 +1,6 @@
|
||||
import mongoose, { Schema, Document, Types } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
export interface ISharedLink extends Document {
|
||||
conversationId: string;
|
||||
title?: string;
|
||||
user?: string;
|
||||
messages?: Types.ObjectId[];
|
||||
shareId?: string;
|
||||
isPublic: boolean;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
const shareSchema: Schema<ISharedLink> = new Schema(
|
||||
const shareSchema = mongoose.Schema(
|
||||
{
|
||||
conversationId: {
|
||||
type: String,
|
||||
@@ -38,4 +27,4 @@ const shareSchema: Schema<ISharedLink> = new Schema(
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
export default shareSchema;
|
||||
module.exports = mongoose.model('SharedLink', shareSchema);
|
||||
30
api/models/schema/tokenSchema.js
Normal file
30
api/models/schema/tokenSchema.js
Normal file
@@ -0,0 +1,30 @@
|
||||
const mongoose = require('mongoose');
|
||||
const Schema = mongoose.Schema;
|
||||
|
||||
const tokenSchema = new Schema({
|
||||
userId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
required: true,
|
||||
ref: 'user',
|
||||
},
|
||||
email: {
|
||||
type: String,
|
||||
},
|
||||
token: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
createdAt: {
|
||||
type: Date,
|
||||
required: true,
|
||||
default: Date.now,
|
||||
},
|
||||
expiresAt: {
|
||||
type: Date,
|
||||
required: true,
|
||||
},
|
||||
});
|
||||
|
||||
tokenSchema.index({ expiresAt: 1 }, { expireAfterSeconds: 0 });
|
||||
|
||||
module.exports = tokenSchema;
|
||||
54
api/models/schema/toolCallSchema.js
Normal file
54
api/models/schema/toolCallSchema.js
Normal file
@@ -0,0 +1,54 @@
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
/**
|
||||
* @typedef {Object} ToolCallData
|
||||
* @property {string} conversationId - The ID of the conversation
|
||||
* @property {string} messageId - The ID of the message
|
||||
* @property {string} toolId - The ID of the tool
|
||||
* @property {string | ObjectId} user - The user's ObjectId
|
||||
* @property {unknown} [result] - Optional result data
|
||||
* @property {TAttachment[]} [attachments] - Optional attachments data
|
||||
* @property {number} [blockIndex] - Optional code block index
|
||||
* @property {number} [partIndex] - Optional part index
|
||||
*/
|
||||
|
||||
/** @type {MongooseSchema<ToolCallData>} */
|
||||
const toolCallSchema = mongoose.Schema(
|
||||
{
|
||||
conversationId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
messageId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
toolId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
result: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
},
|
||||
attachments: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
},
|
||||
blockIndex: {
|
||||
type: Number,
|
||||
},
|
||||
partIndex: {
|
||||
type: Number,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
toolCallSchema.index({ messageId: 1, user: 1 });
|
||||
toolCallSchema.index({ conversationId: 1, user: 1 });
|
||||
|
||||
module.exports = mongoose.model('ToolCall', toolCallSchema);
|
||||
@@ -1,24 +1,6 @@
|
||||
import mongoose, { Schema, Document, Types } from 'mongoose';
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
// @ts-ignore
|
||||
export interface ITransaction extends Document {
|
||||
user: Types.ObjectId;
|
||||
conversationId?: string;
|
||||
tokenType: 'prompt' | 'completion' | 'credits';
|
||||
model?: string;
|
||||
context?: string;
|
||||
valueKey?: string;
|
||||
rate?: number;
|
||||
rawAmount?: number;
|
||||
tokenValue?: number;
|
||||
inputTokens?: number;
|
||||
writeTokens?: number;
|
||||
readTokens?: number;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
const transactionSchema: Schema<ITransaction> = new Schema(
|
||||
const transactionSchema = mongoose.Schema(
|
||||
{
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
@@ -57,4 +39,4 @@ const transactionSchema: Schema<ITransaction> = new Schema(
|
||||
},
|
||||
);
|
||||
|
||||
export default transactionSchema;
|
||||
module.exports = transactionSchema;
|
||||
140
api/models/schema/userSchema.js
Normal file
140
api/models/schema/userSchema.js
Normal file
@@ -0,0 +1,140 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { SystemRoles } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoSession
|
||||
* @property {string} [refreshToken] - The refresh token
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoUser
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {string} [name] - The user's name
|
||||
* @property {string} [username] - The user's username, in lowercase
|
||||
* @property {string} email - The user's email address
|
||||
* @property {boolean} emailVerified - Whether the user's email is verified
|
||||
* @property {string} [password] - The user's password, trimmed with 8-128 characters
|
||||
* @property {string} [avatar] - The URL of the user's avatar
|
||||
* @property {string} provider - The provider of the user's account (e.g., 'local', 'google')
|
||||
* @property {string} [role='USER'] - The role of the user
|
||||
* @property {string} [googleId] - Optional Google ID for the user
|
||||
* @property {string} [facebookId] - Optional Facebook ID for the user
|
||||
* @property {string} [openidId] - Optional OpenID ID for the user
|
||||
* @property {string} [ldapId] - Optional LDAP ID for the user
|
||||
* @property {string} [githubId] - Optional GitHub ID for the user
|
||||
* @property {string} [discordId] - Optional Discord ID for the user
|
||||
* @property {string} [appleId] - Optional Apple ID for the user
|
||||
* @property {Array} [plugins=[]] - List of plugins used by the user
|
||||
* @property {Array.<MongoSession>} [refreshToken] - List of sessions with refresh tokens
|
||||
* @property {Date} [expiresAt] - Optional expiration date of the file
|
||||
* @property {Date} [createdAt] - Date when the user was created (added by timestamps)
|
||||
* @property {Date} [updatedAt] - Date when the user was last updated (added by timestamps)
|
||||
*/
|
||||
|
||||
/** @type {MongooseSchema<MongoSession>} */
|
||||
const Session = mongoose.Schema({
|
||||
refreshToken: {
|
||||
type: String,
|
||||
default: '',
|
||||
},
|
||||
});
|
||||
|
||||
/** @type {MongooseSchema<MongoUser>} */
|
||||
const userSchema = mongoose.Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
},
|
||||
username: {
|
||||
type: String,
|
||||
lowercase: true,
|
||||
default: '',
|
||||
},
|
||||
email: {
|
||||
type: String,
|
||||
required: [true, 'can\'t be blank'],
|
||||
lowercase: true,
|
||||
unique: true,
|
||||
match: [/\S+@\S+\.\S+/, 'is invalid'],
|
||||
index: true,
|
||||
},
|
||||
emailVerified: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
default: false,
|
||||
},
|
||||
password: {
|
||||
type: String,
|
||||
trim: true,
|
||||
minlength: 8,
|
||||
maxlength: 128,
|
||||
},
|
||||
avatar: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
provider: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: 'local',
|
||||
},
|
||||
role: {
|
||||
type: String,
|
||||
default: SystemRoles.USER,
|
||||
},
|
||||
googleId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
facebookId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
openidId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
ldapId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
githubId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
discordId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
appleId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
plugins: {
|
||||
type: Array,
|
||||
default: [],
|
||||
},
|
||||
refreshToken: {
|
||||
type: [Session],
|
||||
},
|
||||
expiresAt: {
|
||||
type: Date,
|
||||
expires: 604800, // 7 days in seconds
|
||||
},
|
||||
termsAccepted: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
module.exports = userSchema;
|
||||
@@ -61,7 +61,6 @@ const bedrockValues = {
|
||||
'amazon.nova-micro-v1:0': { prompt: 0.035, completion: 0.14 },
|
||||
'amazon.nova-lite-v1:0': { prompt: 0.06, completion: 0.24 },
|
||||
'amazon.nova-pro-v1:0': { prompt: 0.8, completion: 3.2 },
|
||||
'deepseek.r1': { prompt: 1.35, completion: 5.4 },
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -80,7 +79,6 @@ const tokenValues = Object.assign(
|
||||
'o1-mini': { prompt: 1.1, completion: 4.4 },
|
||||
'o1-preview': { prompt: 15, completion: 60 },
|
||||
o1: { prompt: 15, completion: 60 },
|
||||
'gpt-4.5': { prompt: 75, completion: 150 },
|
||||
'gpt-4o-mini': { prompt: 0.15, completion: 0.6 },
|
||||
'gpt-4o': { prompt: 2.5, completion: 10 },
|
||||
'gpt-4o-2024-05-13': { prompt: 5, completion: 15 },
|
||||
@@ -90,8 +88,6 @@ const tokenValues = Object.assign(
|
||||
'claude-3-sonnet': { prompt: 3, completion: 15 },
|
||||
'claude-3-5-sonnet': { prompt: 3, completion: 15 },
|
||||
'claude-3.5-sonnet': { prompt: 3, completion: 15 },
|
||||
'claude-3-7-sonnet': { prompt: 3, completion: 15 },
|
||||
'claude-3.7-sonnet': { prompt: 3, completion: 15 },
|
||||
'claude-3-5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3.5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3-haiku': { prompt: 0.25, completion: 1.25 },
|
||||
@@ -114,14 +110,6 @@ const tokenValues = Object.assign(
|
||||
'gemini-1.5': { prompt: 2.5, completion: 10 },
|
||||
'gemini-pro-vision': { prompt: 0.5, completion: 1.5 },
|
||||
gemini: { prompt: 0.5, completion: 1.5 },
|
||||
'grok-2-vision-1212': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2-vision-latest': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2-vision': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-vision-beta': { prompt: 5.0, completion: 15.0 },
|
||||
'grok-2-1212': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2-latest': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-beta': { prompt: 5.0, completion: 15.0 },
|
||||
},
|
||||
bedrockValues,
|
||||
);
|
||||
@@ -133,8 +121,6 @@ const tokenValues = Object.assign(
|
||||
* @type {Object.<string, {write: number, read: number }>}
|
||||
*/
|
||||
const cacheTokenValues = {
|
||||
'claude-3.7-sonnet': { write: 3.75, read: 0.3 },
|
||||
'claude-3-7-sonnet': { write: 3.75, read: 0.3 },
|
||||
'claude-3.5-sonnet': { write: 3.75, read: 0.3 },
|
||||
'claude-3-5-sonnet': { write: 3.75, read: 0.3 },
|
||||
'claude-3.5-haiku': { write: 1, read: 0.08 },
|
||||
@@ -169,8 +155,6 @@ const getValueKey = (model, endpoint) => {
|
||||
return 'o1-mini';
|
||||
} else if (modelName.includes('o1')) {
|
||||
return 'o1';
|
||||
} else if (modelName.includes('gpt-4.5')) {
|
||||
return 'gpt-4.5';
|
||||
} else if (modelName.includes('gpt-4o-2024-05-13')) {
|
||||
return 'gpt-4o-2024-05-13';
|
||||
} else if (modelName.includes('gpt-4o-mini')) {
|
||||
|
||||
@@ -50,16 +50,6 @@ describe('getValueKey', () => {
|
||||
expect(getValueKey('gpt-4-0125')).toBe('gpt-4-1106');
|
||||
});
|
||||
|
||||
it('should return "gpt-4.5" for model type of "gpt-4.5"', () => {
|
||||
expect(getValueKey('gpt-4.5-preview')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-2024-08-06')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-2024-08-06-0718')).toBe('gpt-4.5');
|
||||
expect(getValueKey('openai/gpt-4.5')).toBe('gpt-4.5');
|
||||
expect(getValueKey('openai/gpt-4.5-2024-08-06')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-turbo')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-0125')).toBe('gpt-4.5');
|
||||
});
|
||||
|
||||
it('should return "gpt-4o" for model type of "gpt-4o"', () => {
|
||||
expect(getValueKey('gpt-4o-2024-08-06')).toBe('gpt-4o');
|
||||
expect(getValueKey('gpt-4o-2024-08-06-0718')).toBe('gpt-4o');
|
||||
@@ -90,20 +80,6 @@ describe('getValueKey', () => {
|
||||
expect(getValueKey('chatgpt-4o-latest-0718')).toBe('gpt-4o');
|
||||
});
|
||||
|
||||
it('should return "claude-3-7-sonnet" for model type of "claude-3-7-sonnet-"', () => {
|
||||
expect(getValueKey('claude-3-7-sonnet-20240620')).toBe('claude-3-7-sonnet');
|
||||
expect(getValueKey('anthropic/claude-3-7-sonnet')).toBe('claude-3-7-sonnet');
|
||||
expect(getValueKey('claude-3-7-sonnet-turbo')).toBe('claude-3-7-sonnet');
|
||||
expect(getValueKey('claude-3-7-sonnet-0125')).toBe('claude-3-7-sonnet');
|
||||
});
|
||||
|
||||
it('should return "claude-3.7-sonnet" for model type of "claude-3.7-sonnet-"', () => {
|
||||
expect(getValueKey('claude-3.7-sonnet-20240620')).toBe('claude-3.7-sonnet');
|
||||
expect(getValueKey('anthropic/claude-3.7-sonnet')).toBe('claude-3.7-sonnet');
|
||||
expect(getValueKey('claude-3.7-sonnet-turbo')).toBe('claude-3.7-sonnet');
|
||||
expect(getValueKey('claude-3.7-sonnet-0125')).toBe('claude-3.7-sonnet');
|
||||
});
|
||||
|
||||
it('should return "claude-3-5-sonnet" for model type of "claude-3-5-sonnet-"', () => {
|
||||
expect(getValueKey('claude-3-5-sonnet-20240620')).toBe('claude-3-5-sonnet');
|
||||
expect(getValueKey('anthropic/claude-3-5-sonnet')).toBe('claude-3-5-sonnet');
|
||||
@@ -288,7 +264,7 @@ describe('AWS Bedrock Model Tests', () => {
|
||||
});
|
||||
|
||||
describe('Deepseek Model Tests', () => {
|
||||
const deepseekModels = ['deepseek-chat', 'deepseek-coder', 'deepseek-reasoner', 'deepseek.r1'];
|
||||
const deepseekModels = ['deepseek-chat', 'deepseek-coder', 'deepseek-reasoner'];
|
||||
|
||||
it('should return the correct prompt multipliers for all models', () => {
|
||||
const results = deepseekModels.map((model) => {
|
||||
@@ -482,30 +458,3 @@ describe('Google Model Tests', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Grok Model Tests - Pricing', () => {
|
||||
describe('getMultiplier', () => {
|
||||
test('should return correct prompt and completion rates for Grok vision models', () => {
|
||||
const models = ['grok-2-vision-1212', 'grok-2-vision', 'grok-2-vision-latest'];
|
||||
models.forEach((model) => {
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(2.0);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(10.0);
|
||||
});
|
||||
});
|
||||
|
||||
test('should return correct prompt and completion rates for Grok text models', () => {
|
||||
const models = ['grok-2-1212', 'grok-2', 'grok-2-latest'];
|
||||
models.forEach((model) => {
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(2.0);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(10.0);
|
||||
});
|
||||
});
|
||||
|
||||
test('should return correct prompt and completion rates for Grok beta models', () => {
|
||||
expect(getMultiplier({ model: 'grok-vision-beta', tokenType: 'prompt' })).toBe(5.0);
|
||||
expect(getMultiplier({ model: 'grok-vision-beta', tokenType: 'completion' })).toBe(15.0);
|
||||
expect(getMultiplier({ model: 'grok-beta', tokenType: 'prompt' })).toBe(5.0);
|
||||
expect(getMultiplier({ model: 'grok-beta', tokenType: 'completion' })).toBe(15.0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "v0.7.7",
|
||||
"version": "v0.7.6",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
@@ -34,24 +34,22 @@
|
||||
},
|
||||
"homepage": "https://librechat.ai",
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.37.0",
|
||||
"@aws-sdk/client-s3": "^3.758.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.758.0",
|
||||
"@anthropic-ai/sdk": "^0.32.1",
|
||||
"@azure/search-documents": "^12.0.0",
|
||||
"@google/generative-ai": "^0.23.0",
|
||||
"@google/generative-ai": "^0.21.0",
|
||||
"@googleapis/youtube": "^20.0.0",
|
||||
"@keyv/mongo": "^2.1.8",
|
||||
"@keyv/redis": "^2.8.1",
|
||||
"@langchain/community": "^0.3.34",
|
||||
"@langchain/core": "^0.3.40",
|
||||
"@langchain/google-genai": "^0.1.11",
|
||||
"@langchain/google-vertexai": "^0.2.2",
|
||||
"@langchain/community": "^0.3.14",
|
||||
"@langchain/core": "^0.3.37",
|
||||
"@langchain/google-genai": "^0.1.7",
|
||||
"@langchain/google-vertexai": "^0.1.8",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.2.8",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@librechat/agents": "^2.0.2",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
"axios": "^1.7.7",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"cohere-ai": "^7.9.1",
|
||||
"compression": "^1.7.4",
|
||||
"connect-redis": "^7.1.0",
|
||||
@@ -60,23 +58,22 @@
|
||||
"cors": "^2.8.5",
|
||||
"dedent": "^1.5.3",
|
||||
"dotenv": "^16.0.3",
|
||||
"eventsource": "^3.0.2",
|
||||
"express": "^4.21.2",
|
||||
"express-mongo-sanitize": "^2.2.0",
|
||||
"express-rate-limit": "^7.4.1",
|
||||
"express-session": "^1.18.1",
|
||||
"express-static-gzip": "^2.2.0",
|
||||
"file-type": "^18.7.0",
|
||||
"firebase": "^11.0.2",
|
||||
"googleapis": "^126.0.1",
|
||||
"handlebars": "^4.7.7",
|
||||
"https-proxy-agent": "^7.0.6",
|
||||
"html": "^1.0.0",
|
||||
"ioredis": "^5.3.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"keyv": "^4.5.4",
|
||||
"keyv-file": "^0.2.0",
|
||||
"klona": "^2.0.6",
|
||||
"langchain": "^0.2.19",
|
||||
"librechat-data-provider": "*",
|
||||
"librechat-mcp": "*",
|
||||
"lodash": "^4.17.21",
|
||||
@@ -84,7 +81,7 @@
|
||||
"memorystore": "^1.6.7",
|
||||
"mime": "^3.0.0",
|
||||
"module-alias": "^2.2.3",
|
||||
"mongoose": "^8.12.1",
|
||||
"mongoose": "^8.9.5",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"nanoid": "^3.3.7",
|
||||
"nodemailer": "^6.9.15",
|
||||
@@ -94,6 +91,7 @@
|
||||
"openid-client": "^5.4.2",
|
||||
"passport": "^0.6.0",
|
||||
"passport-apple": "^2.0.2",
|
||||
"passport-custom": "^1.1.1",
|
||||
"passport-discord": "^0.1.4",
|
||||
"passport-facebook": "^3.0.0",
|
||||
"passport-github2": "^0.1.12",
|
||||
@@ -101,6 +99,7 @@
|
||||
"passport-jwt": "^4.0.1",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"passport-local": "^1.0.0",
|
||||
"pino": "^8.12.1",
|
||||
"sharp": "^0.32.6",
|
||||
"tiktoken": "^1.0.15",
|
||||
"traverse": "^0.6.7",
|
||||
@@ -112,8 +111,8 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"jest": "^29.7.0",
|
||||
"mongodb-memory-server": "^10.1.3",
|
||||
"nodemon": "^3.0.3",
|
||||
"supertest": "^7.0.0"
|
||||
"mongodb-memory-server": "^10.0.0",
|
||||
"nodemon": "^3.0.1",
|
||||
"supertest": "^6.3.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -150,13 +150,11 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
} catch (error) {
|
||||
const partialText = getText && getText();
|
||||
handleAbortError(res, req, error, {
|
||||
sender,
|
||||
partialText,
|
||||
conversationId,
|
||||
sender,
|
||||
messageId: responseMessageId,
|
||||
parentMessageId: overrideParentMessageId ?? userMessageId ?? parentMessageId,
|
||||
}).catch((err) => {
|
||||
logger.error('[AskController] Error in `handleAbortError`', err);
|
||||
parentMessageId: userMessageId ?? parentMessageId,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user