Compare commits
217 Commits
a11y/focus
...
feat/granu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95214c43c2 | ||
|
|
f8c8b89f4d | ||
|
|
5512c55d71 | ||
|
|
e07067c86d | ||
|
|
2bd1eb3cca | ||
|
|
57084f2caa | ||
|
|
669af746ed | ||
|
|
f9994d1547 | ||
|
|
c62a23fafc | ||
|
|
6bbefcd16e | ||
|
|
1016a33b89 | ||
|
|
c9aa10d3d5 | ||
|
|
5979efd607 | ||
|
|
25b97ba388 | ||
|
|
e97444a863 | ||
|
|
717f61d878 | ||
|
|
dbb234d3bf | ||
|
|
718af1ada1 | ||
|
|
ee4cdaed20 | ||
|
|
f897420586 | ||
|
|
004f128aec | ||
|
|
c07a342c97 | ||
|
|
fd64e2380e | ||
|
|
3f82aed9eb | ||
|
|
0143ae5728 | ||
|
|
eed43e6662 | ||
|
|
fa54c9ae90 | ||
|
|
97085073d2 | ||
|
|
299c484c7a | ||
|
|
d53cd1f391 | ||
|
|
a5e8d009a1 | ||
|
|
3e4b01de82 | ||
|
|
8b15bb2ed6 | ||
|
|
c7e4523d7c | ||
|
|
dba0ec4320 | ||
|
|
3c9357580e | ||
|
|
10c0d7d474 | ||
|
|
ec7370dfe9 | ||
|
|
b412455e9d | ||
|
|
3af2666890 | ||
|
|
0103b4b08a | ||
|
|
5eb0703f78 | ||
|
|
4419e2c294 | ||
|
|
5f2d1c5dc9 | ||
|
|
46ff008b07 | ||
|
|
55f79bd2d1 | ||
|
|
1bd874591a | ||
|
|
6488873bad | ||
|
|
13c7ceb918 | ||
|
|
cdf42b3a03 | ||
|
|
c2a18f61b4 | ||
|
|
a57224c1d5 | ||
|
|
118ad943c9 | ||
|
|
272522452a | ||
|
|
b0054c775a | ||
|
|
9bb9aba8ec | ||
|
|
293ac02b95 | ||
|
|
29ef91b4dd | ||
|
|
cd7dd576c1 | ||
|
|
c22d74d41e | ||
|
|
2c39ccd2af | ||
|
|
53df6a1a71 | ||
|
|
dff4fcac00 | ||
|
|
be4cf5846c | ||
|
|
6bb78247b3 | ||
|
|
cbddd394a5 | ||
|
|
830be18b90 | ||
|
|
32bab33499 | ||
|
|
1806b70418 | ||
|
|
5ccdb83e5e | ||
|
|
8cade2120d | ||
|
|
c7f2ee36c5 | ||
|
|
f2f4bf87ca | ||
|
|
442b149d55 | ||
|
|
aca89091d9 | ||
|
|
37c94beeac | ||
|
|
80bc49db8d | ||
|
|
d3a504857a | ||
|
|
09e3500d39 | ||
|
|
8458401ce6 | ||
|
|
f9d40784f0 | ||
|
|
a2fc7d312a | ||
|
|
4cbab86b45 | ||
|
|
4808c5be48 | ||
|
|
c517f668fc | ||
|
|
939b4ce659 | ||
|
|
87255dac81 | ||
|
|
442976c74f | ||
|
|
fb88ac00c6 | ||
|
|
b846f562be | ||
|
|
5cf86b347f | ||
|
|
f556aaeaea | ||
|
|
2f462c9b3c | ||
|
|
077b7e7e79 | ||
|
|
c68cc0a550 | ||
|
|
deb8a00e27 | ||
|
|
b45ff8e4ed | ||
|
|
fc8d24fa5b | ||
|
|
449d9b7613 | ||
|
|
ddb0a7a216 | ||
|
|
b2f44fc90f | ||
|
|
cede5d120c | ||
|
|
ed9ab8842a | ||
|
|
b344ed12a1 | ||
|
|
afee1a2cbd | ||
|
|
0dbbf7de04 | ||
|
|
bf80cf30b3 | ||
|
|
d47d827ed9 | ||
|
|
5be446edff | ||
|
|
2265413387 | ||
|
|
7e98702a87 | ||
|
|
a2f330e6ca | ||
|
|
28b76ce339 | ||
|
|
eb1668ff22 | ||
|
|
e86842fd19 | ||
|
|
af96666ff4 | ||
|
|
59109cd2dd | ||
|
|
c8f5f5131e | ||
|
|
8c0be0e2f0 | ||
|
|
f8cb0cdcda | ||
|
|
55d52d07f2 | ||
|
|
7ce782fec6 | ||
|
|
c79ee32006 | ||
|
|
739b0d3012 | ||
|
|
9c9fe4e03a | ||
|
|
844bbbb162 | ||
|
|
26780bddf0 | ||
|
|
353adceb0c | ||
|
|
a92ac23c44 | ||
|
|
2a3bf259aa | ||
|
|
3152a1e536 | ||
|
|
2f4a03b581 | ||
|
|
7a91f6ca62 | ||
|
|
fe311df969 | ||
|
|
535e7798b3 | ||
|
|
621fa6e1aa | ||
|
|
f6cc394eab | ||
|
|
5b402a755e | ||
|
|
b0405be9ea | ||
|
|
3f4dd08589 | ||
|
|
d5b399550e | ||
|
|
a5ff8253a4 | ||
|
|
0b44142383 | ||
|
|
502617db24 | ||
|
|
f2f285ca1e | ||
|
|
6dd1b39886 | ||
|
|
5a43f87584 | ||
|
|
4af72aac9b | ||
|
|
f7777a2723 | ||
|
|
e5b234bc72 | ||
|
|
4f2ed46450 | ||
|
|
66093b1eb3 | ||
|
|
d7390d24ec | ||
|
|
71105cd49c | ||
|
|
3606349a0f | ||
|
|
e3e796293c | ||
|
|
7c4c3a8796 | ||
|
|
20c9f1a783 | ||
|
|
8e1012c5aa | ||
|
|
7c92cef2b7 | ||
|
|
4fbb81c774 | ||
|
|
fc6e14efe2 | ||
|
|
6e663b2480 | ||
|
|
ddb2141eac | ||
|
|
37b50736bc | ||
|
|
5d6d13efe8 | ||
|
|
5efad8f646 | ||
|
|
9a7f763714 | ||
|
|
e6e7935fd8 | ||
|
|
18dc3f8686 | ||
|
|
fe512005fc | ||
|
|
da131b6c59 | ||
|
|
dd23559d1f | ||
|
|
a6f0a8244f | ||
|
|
f04f8f53be | ||
|
|
a89a3f4146 | ||
|
|
55f5f2d11a | ||
|
|
0e8041bcac | ||
|
|
fc30482f65 | ||
|
|
6826c0ed43 | ||
|
|
550c7cc68a | ||
|
|
c0ebb434a6 | ||
|
|
0ee1dcc479 | ||
|
|
e467fbebfa | ||
|
|
7f1d01c35a | ||
|
|
150116eefe | ||
|
|
52f146dd97 | ||
|
|
88f4ad7c47 | ||
|
|
851938e7a6 | ||
|
|
6edd93f99e | ||
|
|
16aa5ed466 | ||
|
|
000f3a3733 | ||
|
|
d32f34e5d7 | ||
|
|
650e9b4f6c | ||
|
|
77a21719fd | ||
|
|
d0332c6e07 | ||
|
|
5d56f48879 | ||
|
|
c49f883e1a | ||
|
|
52b3ed54ca | ||
|
|
64bd373bc8 | ||
|
|
339882eea4 | ||
|
|
37964975c1 | ||
|
|
1e6b1b9554 | ||
|
|
12f4dbb8c5 | ||
|
|
e16a6190a5 | ||
|
|
24c0433dcf | ||
|
|
5d668748f9 | ||
|
|
910c73359b | ||
|
|
018143b5cc | ||
|
|
4afab52fc5 | ||
|
|
175cfe8ffb | ||
|
|
9b0678da16 | ||
|
|
ac35b8490c | ||
|
|
0551a562d8 | ||
|
|
710fde6a6f | ||
|
|
93e679e173 | ||
|
|
cff392e578 |
124
.env.example
124
.env.example
@@ -20,8 +20,8 @@ DOMAIN_CLIENT=http://localhost:3080
|
||||
DOMAIN_SERVER=http://localhost:3080
|
||||
|
||||
NO_INDEX=true
|
||||
# Use the address that is at most n number of hops away from the Express application.
|
||||
# req.socket.remoteAddress is the first hop, and the rest are looked for in the X-Forwarded-For header from right to left.
|
||||
# Use the address that is at most n number of hops away from the Express application.
|
||||
# req.socket.remoteAddress is the first hop, and the rest are looked for in the X-Forwarded-For header from right to left.
|
||||
# A value of 0 means that the first untrusted address would be req.socket.remoteAddress, i.e. there is no reverse proxy.
|
||||
# Defaulted to 1.
|
||||
TRUST_PROXY=1
|
||||
@@ -88,7 +88,7 @@ PROXY=
|
||||
#============#
|
||||
|
||||
ANTHROPIC_API_KEY=user_provided
|
||||
# ANTHROPIC_MODELS=claude-3-7-sonnet-latest,claude-3-7-sonnet-20250219,claude-3-5-haiku-20241022,claude-3-5-sonnet-20241022,claude-3-5-sonnet-latest,claude-3-5-sonnet-20240620,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
||||
# ANTHROPIC_MODELS=claude-opus-4-20250514,claude-sonnet-4-20250514,claude-3-7-sonnet-20250219,claude-3-5-sonnet-20241022,claude-3-5-haiku-20241022,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307
|
||||
# ANTHROPIC_REVERSE_PROXY=
|
||||
|
||||
#============#
|
||||
@@ -142,12 +142,12 @@ GOOGLE_KEY=user_provided
|
||||
# GOOGLE_AUTH_HEADER=true
|
||||
|
||||
# Gemini API (AI Studio)
|
||||
# GOOGLE_MODELS=gemini-2.5-pro-exp-03-25,gemini-2.0-flash-exp,gemini-2.0-flash-thinking-exp-1219,gemini-exp-1121,gemini-exp-1114,gemini-1.5-flash-latest,gemini-1.0-pro,gemini-1.0-pro-001,gemini-1.0-pro-latest,gemini-1.0-pro-vision-latest,gemini-1.5-pro-latest,gemini-pro,gemini-pro-vision
|
||||
# GOOGLE_MODELS=gemini-2.5-pro-preview-05-06,gemini-2.5-flash-preview-04-17,gemini-2.0-flash-001,gemini-2.0-flash-exp,gemini-2.0-flash-lite-001,gemini-1.5-pro-002,gemini-1.5-flash-002
|
||||
|
||||
# Vertex AI
|
||||
# GOOGLE_MODELS=gemini-1.5-flash-preview-0514,gemini-1.5-pro-preview-0514,gemini-1.0-pro-vision-001,gemini-1.0-pro-002,gemini-1.0-pro-001,gemini-pro-vision,gemini-1.0-pro
|
||||
# GOOGLE_MODELS=gemini-2.5-pro-preview-05-06,gemini-2.5-flash-preview-04-17,gemini-2.0-flash-001,gemini-2.0-flash-exp,gemini-2.0-flash-lite-001,gemini-1.5-pro-002,gemini-1.5-flash-002
|
||||
|
||||
# GOOGLE_TITLE_MODEL=gemini-pro
|
||||
# GOOGLE_TITLE_MODEL=gemini-2.0-flash-lite-001
|
||||
|
||||
# GOOGLE_LOC=us-central1
|
||||
|
||||
@@ -231,6 +231,14 @@ AZURE_AI_SEARCH_SEARCH_OPTION_QUERY_TYPE=
|
||||
AZURE_AI_SEARCH_SEARCH_OPTION_TOP=
|
||||
AZURE_AI_SEARCH_SEARCH_OPTION_SELECT=
|
||||
|
||||
# OpenAI Image Tools Customization
|
||||
#----------------
|
||||
# IMAGE_GEN_OAI_DESCRIPTION_WITH_FILES=Custom description for image generation tool when files are present
|
||||
# IMAGE_GEN_OAI_DESCRIPTION_NO_FILES=Custom description for image generation tool when no files are present
|
||||
# IMAGE_EDIT_OAI_DESCRIPTION=Custom description for image editing tool
|
||||
# IMAGE_GEN_OAI_PROMPT_DESCRIPTION=Custom prompt description for image generation tool
|
||||
# IMAGE_EDIT_OAI_PROMPT_DESCRIPTION=Custom prompt description for image editing tool
|
||||
|
||||
# DALL·E
|
||||
#----------------
|
||||
# DALLE_API_KEY=
|
||||
@@ -435,6 +443,62 @@ OPENID_IMAGE_URL=
|
||||
# Set to true to automatically redirect to the OpenID provider when a user visits the login page
|
||||
# This will bypass the login form completely for users, only use this if OpenID is your only authentication method
|
||||
OPENID_AUTO_REDIRECT=false
|
||||
# Set to true to use PKCE (Proof Key for Code Exchange) for OpenID authentication
|
||||
OPENID_USE_PKCE=false
|
||||
#Set to true to reuse openid tokens for authentication management instead of using the mongodb session and the custom refresh token.
|
||||
OPENID_REUSE_TOKENS=
|
||||
#By default, signing key verification results are cached in order to prevent excessive HTTP requests to the JWKS endpoint.
|
||||
#If a signing key matching the kid is found, this will be cached and the next time this kid is requested the signing key will be served from the cache.
|
||||
#Default is true.
|
||||
OPENID_JWKS_URL_CACHE_ENABLED=
|
||||
OPENID_JWKS_URL_CACHE_TIME= # 600000 ms eq to 10 minutes leave empty to disable caching
|
||||
#Set to true to trigger token exchange flow to acquire access token for the userinfo endpoint.
|
||||
OPENID_ON_BEHALF_FLOW_FOR_USERINFRO_REQUIRED=
|
||||
OPENID_ON_BEHALF_FLOW_USERINFRO_SCOPE = "user.read" # example for Scope Needed for Microsoft Graph API
|
||||
# Set to true to use the OpenID Connect end session endpoint for logout
|
||||
OPENID_USE_END_SESSION_ENDPOINT=
|
||||
|
||||
|
||||
# SAML
|
||||
# Note: If OpenID is enabled, SAML authentication will be automatically disabled.
|
||||
SAML_ENTRY_POINT=
|
||||
SAML_ISSUER=
|
||||
SAML_CERT=
|
||||
SAML_CALLBACK_URL=/oauth/saml/callback
|
||||
SAML_SESSION_SECRET=
|
||||
|
||||
# Attribute mappings (optional)
|
||||
SAML_EMAIL_CLAIM=
|
||||
SAML_USERNAME_CLAIM=
|
||||
SAML_GIVEN_NAME_CLAIM=
|
||||
SAML_FAMILY_NAME_CLAIM=
|
||||
SAML_PICTURE_CLAIM=
|
||||
SAML_NAME_CLAIM=
|
||||
|
||||
# Logint buttion settings (optional)
|
||||
SAML_BUTTON_LABEL=
|
||||
SAML_IMAGE_URL=
|
||||
|
||||
# Whether the SAML Response should be signed.
|
||||
# - If "true", the entire `SAML Response` will be signed.
|
||||
# - If "false" or unset, only the `SAML Assertion` will be signed (default behavior).
|
||||
# SAML_USE_AUTHN_RESPONSE_SIGNED=
|
||||
|
||||
|
||||
#===============================================#
|
||||
# Microsoft Graph API / Entra ID Integration #
|
||||
#===============================================#
|
||||
|
||||
# Enable Entra ID people search integration in permissions/sharing system
|
||||
# When enabled, the people picker will search both local database and Entra ID
|
||||
USE_ENTRA_ID_FOR_PEOPLE_SEARCH=false
|
||||
|
||||
# When enabled, entra id groups owners will be considered as members of the group
|
||||
ENTRA_ID_INCLUDE_OWNERS_AS_MEMBERS=false
|
||||
|
||||
# Microsoft Graph API scopes needed for people/group search
|
||||
# Default scopes provide access to user profiles and group memberships
|
||||
OPENID_GRAPH_SCOPES=User.Read,People.Read,GroupMember.Read.All
|
||||
|
||||
# LDAP
|
||||
LDAP_URL=
|
||||
@@ -466,6 +530,18 @@ EMAIL_PASSWORD=
|
||||
EMAIL_FROM_NAME=
|
||||
EMAIL_FROM=noreply@librechat.ai
|
||||
|
||||
#========================#
|
||||
# Mailgun API #
|
||||
#========================#
|
||||
|
||||
# MAILGUN_API_KEY=your-mailgun-api-key
|
||||
# MAILGUN_DOMAIN=mg.yourdomain.com
|
||||
# EMAIL_FROM=noreply@yourdomain.com
|
||||
# EMAIL_FROM_NAME="LibreChat"
|
||||
|
||||
# # Optional: For EU region
|
||||
# MAILGUN_HOST=https://api.eu.mailgun.net
|
||||
|
||||
#========================#
|
||||
# Firebase CDN #
|
||||
#========================#
|
||||
@@ -555,9 +631,9 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
# users always get the latest version. Customize #
|
||||
# only if you understand caching implications. #
|
||||
|
||||
# INDEX_HTML_CACHE_CONTROL=no-cache, no-store, must-revalidate
|
||||
# INDEX_HTML_PRAGMA=no-cache
|
||||
# INDEX_HTML_EXPIRES=0
|
||||
# INDEX_CACHE_CONTROL=no-cache, no-store, must-revalidate
|
||||
# INDEX_PRAGMA=no-cache
|
||||
# INDEX_EXPIRES=0
|
||||
|
||||
# no-cache: Forces validation with server before using cached version
|
||||
# no-store: Prevents storing the response entirely
|
||||
@@ -567,3 +643,33 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
# OpenWeather #
|
||||
#=====================================================#
|
||||
OPENWEATHER_API_KEY=
|
||||
|
||||
#====================================#
|
||||
# LibreChat Code Interpreter API #
|
||||
#====================================#
|
||||
|
||||
# https://code.librechat.ai
|
||||
# LIBRECHAT_CODE_API_KEY=your-key
|
||||
|
||||
#======================#
|
||||
# Web Search #
|
||||
#======================#
|
||||
|
||||
# Note: All of the following variable names can be customized.
|
||||
# Omit values to allow user to provide them.
|
||||
|
||||
# For more information on configuration values, see:
|
||||
# https://librechat.ai/docs/features/web_search
|
||||
|
||||
# Search Provider (Required)
|
||||
# SERPER_API_KEY=your_serper_api_key
|
||||
|
||||
# Scraper (Required)
|
||||
# FIRECRAWL_API_KEY=your_firecrawl_api_key
|
||||
# Optional: Custom Firecrawl API URL
|
||||
# FIRECRAWL_API_URL=your_firecrawl_api_url
|
||||
|
||||
# Reranker (Required)
|
||||
# JINA_API_KEY=your_jina_api_key
|
||||
# or
|
||||
# COHERE_API_KEY=your_cohere_api_key
|
||||
50
.github/CONTRIBUTING.md
vendored
50
.github/CONTRIBUTING.md
vendored
@@ -24,22 +24,40 @@ Project maintainers have the right and responsibility to remove, edit, or reject
|
||||
|
||||
## To contribute to this project, please adhere to the following guidelines:
|
||||
|
||||
## 1. Development notes
|
||||
## 1. Development Setup
|
||||
|
||||
1. Before starting work, make sure your main branch has the latest commits with `npm run update`
|
||||
2. Run linting command to find errors: `npm run lint`. Alternatively, ensure husky pre-commit checks are functioning.
|
||||
1. Use Node.JS 20.x.
|
||||
2. Install typescript globally: `npm i -g typescript`.
|
||||
3. Run `npm ci` to install dependencies.
|
||||
4. Build the data provider: `npm run build:data-provider`.
|
||||
5. Build data schemas: `npm run build:data-schemas`.
|
||||
6. Build API methods: `npm run build:api`.
|
||||
7. Setup and run unit tests:
|
||||
- Copy `.env.test`: `cp api/test/.env.test.example api/test/.env.test`.
|
||||
- Run backend unit tests: `npm run test:api`.
|
||||
- Run frontend unit tests: `npm run test:client`.
|
||||
8. Setup and run integration tests:
|
||||
- Build client: `cd client && npm run build`.
|
||||
- Create `.env`: `cp .env.example .env`.
|
||||
- Install [MongoDB Community Edition](https://www.mongodb.com/docs/manual/administration/install-community/), ensure that `mongosh` connects to your local instance.
|
||||
- Run: `npx install playwright`, then `npx playwright install`.
|
||||
- Copy `config.local`: `cp e2e/config.local.example.ts e2e/config.local.ts`.
|
||||
- Copy `librechat.yaml`: `cp librechat.example.yaml librechat.yaml`.
|
||||
- Run: `npm run e2e`.
|
||||
|
||||
## 2. Development Notes
|
||||
|
||||
1. Before starting work, make sure your main branch has the latest commits with `npm run update`.
|
||||
3. Run linting command to find errors: `npm run lint`. Alternatively, ensure husky pre-commit checks are functioning.
|
||||
3. After your changes, reinstall packages in your current branch using `npm run reinstall` and ensure everything still works.
|
||||
- Restart the ESLint server ("ESLint: Restart ESLint Server" in VS Code command bar) and your IDE after reinstalling or updating.
|
||||
4. Clear web app localStorage and cookies before and after changes.
|
||||
5. For frontend changes:
|
||||
- Install typescript globally: `npm i -g typescript`.
|
||||
- Compile typescript before and after changes to check for introduced errors: `cd client && tsc --noEmit`.
|
||||
6. Run tests locally:
|
||||
- Backend unit tests: `npm run test:api`
|
||||
- Frontend unit tests: `npm run test:client`
|
||||
- Integration tests: `npm run e2e` (requires playwright installed, `npx install playwright`)
|
||||
5. For frontend changes, compile typescript before and after changes to check for introduced errors: `cd client && npm run build`.
|
||||
6. Run backend unit tests: `npm run test:api`.
|
||||
7. Run frontend unit tests: `npm run test:client`.
|
||||
8. Run integration tests: `npm run e2e`.
|
||||
|
||||
## 2. Git Workflow
|
||||
## 3. Git Workflow
|
||||
|
||||
We utilize a GitFlow workflow to manage changes to this project's codebase. Follow these general steps when contributing code:
|
||||
|
||||
@@ -49,7 +67,7 @@ We utilize a GitFlow workflow to manage changes to this project's codebase. Foll
|
||||
4. Submit a pull request with a clear and concise description of your changes and the reasons behind them.
|
||||
5. We will review your pull request, provide feedback as needed, and eventually merge the approved changes into the main branch.
|
||||
|
||||
## 3. Commit Message Format
|
||||
## 4. Commit Message Format
|
||||
|
||||
We follow the [semantic format](https://gist.github.com/joshbuchea/6f47e86d2510bce28f8e7f42ae84c716) for commit messages.
|
||||
|
||||
@@ -76,7 +94,7 @@ feat: add hat wobble
|
||||
```
|
||||
|
||||
|
||||
## 4. Pull Request Process
|
||||
## 5. Pull Request Process
|
||||
|
||||
When submitting a pull request, please follow these guidelines:
|
||||
|
||||
@@ -91,7 +109,7 @@ Ensure that your changes meet the following criteria:
|
||||
- The commit history is clean and easy to follow. You can use `git rebase` or `git merge --squash` to clean your commit history before submitting the pull request.
|
||||
- The pull request description clearly outlines the changes and the reasons behind them. Be sure to include the steps to test the pull request.
|
||||
|
||||
## 5. Naming Conventions
|
||||
## 6. Naming Conventions
|
||||
|
||||
Apply the following naming conventions to branches, labels, and other Git-related entities:
|
||||
|
||||
@@ -100,7 +118,7 @@ Apply the following naming conventions to branches, labels, and other Git-relate
|
||||
- **JS/TS:** Directories and file names: Descriptive and camelCase. First letter uppercased for React files (e.g., `helperFunction.ts, ReactComponent.tsx`).
|
||||
- **Docs:** Directories and file names: Descriptive and snake_case (e.g., `config_files.md`).
|
||||
|
||||
## 6. TypeScript Conversion
|
||||
## 7. TypeScript Conversion
|
||||
|
||||
1. **Original State**: The project was initially developed entirely in JavaScript (JS).
|
||||
|
||||
@@ -126,7 +144,7 @@ Apply the following naming conventions to branches, labels, and other Git-relate
|
||||
|
||||
- **Current Stance**: At present, this backend transition is of lower priority and might not be pursued.
|
||||
|
||||
## 7. Module Import Conventions
|
||||
## 8. Module Import Conventions
|
||||
|
||||
- `npm` packages first,
|
||||
- from shortest line (top) to longest (bottom)
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/BUG-REPORT.yml
vendored
2
.github/ISSUE_TEMPLATE/BUG-REPORT.yml
vendored
@@ -79,6 +79,8 @@ body:
|
||||
|
||||
For UI-related issues, browser console logs can be very helpful. You can provide these as screenshots or paste the text here.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
|
||||
14
.github/workflows/backend-review.yml
vendored
14
.github/workflows/backend-review.yml
vendored
@@ -7,6 +7,7 @@ on:
|
||||
- release/*
|
||||
paths:
|
||||
- 'api/**'
|
||||
- 'packages/api/**'
|
||||
jobs:
|
||||
tests_Backend:
|
||||
name: Run Backend unit tests
|
||||
@@ -36,12 +37,12 @@ jobs:
|
||||
- name: Install Data Provider Package
|
||||
run: npm run build:data-provider
|
||||
|
||||
- name: Install MCP Package
|
||||
run: npm run build:mcp
|
||||
|
||||
- name: Install Data Schemas Package
|
||||
run: npm run build:data-schemas
|
||||
|
||||
- name: Install API Package
|
||||
run: npm run build:api
|
||||
|
||||
- name: Create empty auth.json file
|
||||
run: |
|
||||
mkdir -p api/data
|
||||
@@ -66,5 +67,8 @@ jobs:
|
||||
- name: Run librechat-data-provider unit tests
|
||||
run: cd packages/data-provider && npm run test:ci
|
||||
|
||||
- name: Run librechat-mcp unit tests
|
||||
run: cd packages/mcp && npm run test:ci
|
||||
- name: Run @librechat/data-schemas unit tests
|
||||
run: cd packages/data-schemas && npm run test:ci
|
||||
|
||||
- name: Run @librechat/api unit tests
|
||||
run: cd packages/api && npm run test:ci
|
||||
17
.github/workflows/deploy-dev.yml
vendored
17
.github/workflows/deploy-dev.yml
vendored
@@ -2,7 +2,7 @@ name: Update Test Server
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Docker Dev Images Build"]
|
||||
workflows: ["Docker Dev Branch Images Build"]
|
||||
types:
|
||||
- completed
|
||||
workflow_dispatch:
|
||||
@@ -12,7 +12,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.repository == 'danny-avila/LibreChat' &&
|
||||
(github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success')
|
||||
(github.event_name == 'workflow_dispatch' ||
|
||||
(github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.head_branch == 'dev'))
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
@@ -29,13 +30,17 @@ jobs:
|
||||
DO_USER: ${{ secrets.DO_USER }}
|
||||
run: |
|
||||
ssh -o StrictHostKeyChecking=no ${DO_USER}@${DO_HOST} << EOF
|
||||
sudo -i -u danny bash << EEOF
|
||||
sudo -i -u danny bash << 'EEOF'
|
||||
cd ~/LibreChat && \
|
||||
git fetch origin main && \
|
||||
npm run update:deployed && \
|
||||
sudo npm run stop:deployed && \
|
||||
sudo docker images --format "{{.Repository}}:{{.ID}}" | grep -E "lc-dev|librechat" | cut -d: -f2 | xargs -r sudo docker rmi -f || true && \
|
||||
sudo npm run update:deployed && \
|
||||
git checkout dev && \
|
||||
git pull origin dev && \
|
||||
git checkout do-deploy && \
|
||||
git rebase main && \
|
||||
npm run start:deployed && \
|
||||
git rebase dev && \
|
||||
sudo npm run start:deployed && \
|
||||
echo "Update completed. Application should be running now."
|
||||
EEOF
|
||||
EOF
|
||||
|
||||
72
.github/workflows/dev-branch-images.yml
vendored
Normal file
72
.github/workflows/dev-branch-images.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
name: Docker Dev Branch Images Build
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
paths:
|
||||
- 'api/**'
|
||||
- 'client/**'
|
||||
- 'packages/**'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- target: api-build
|
||||
file: Dockerfile.multi
|
||||
image_name: lc-dev-api
|
||||
- target: node
|
||||
file: Dockerfile
|
||||
image_name: lc-dev
|
||||
|
||||
steps:
|
||||
# Check out the repository
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Set up QEMU
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
# Set up Docker Buildx
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Log in to GitHub Container Registry
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Login to Docker Hub
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# Prepare the environment
|
||||
- name: Prepare environment
|
||||
run: |
|
||||
cp .env.example .env
|
||||
|
||||
# Build and push Docker images for each target
|
||||
- name: Build and push Docker images
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.file }}
|
||||
push: true
|
||||
tags: |
|
||||
ghcr.io/${{ github.repository_owner }}/${{ matrix.image_name }}:${{ github.sha }}
|
||||
ghcr.io/${{ github.repository_owner }}/${{ matrix.image_name }}:latest
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/${{ matrix.image_name }}:${{ github.sha }}
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/${{ matrix.image_name }}:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
target: ${{ matrix.target }}
|
||||
@@ -4,6 +4,7 @@ on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
generate-release-changelog-pr:
|
||||
@@ -88,7 +89,7 @@ jobs:
|
||||
base: main
|
||||
branch: "changelog/${{ github.ref_name }}"
|
||||
reviewers: danny-avila
|
||||
title: "chore: update CHANGELOG for release ${{ github.ref_name }}"
|
||||
title: "📜 docs: Changelog for release ${{ github.ref_name }}"
|
||||
body: |
|
||||
**Description**:
|
||||
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${{ github.ref_name }} above previous releases.
|
||||
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${{ github.ref_name }} above previous releases.
|
||||
|
||||
@@ -3,6 +3,7 @@ name: Generate Unreleased Changelog PR
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 1" # Runs every Monday at 00:00 UTC
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
generate-unreleased-changelog-pr:
|
||||
@@ -98,9 +99,9 @@ jobs:
|
||||
branch: "changelog/unreleased-update"
|
||||
sign-commits: true
|
||||
commit-message: "action: update Unreleased changelog"
|
||||
title: "action: update Unreleased changelog"
|
||||
title: "📜 docs: Unreleased Changelog"
|
||||
body: |
|
||||
**Description**:
|
||||
- This PR updates the Unreleased section in CHANGELOG.md.
|
||||
- It compares the current main branch with the latest version tag (determined as ${{ steps.get_latest_tag.outputs.tag }}),
|
||||
regenerates the Unreleased changelog, removes any old Unreleased block, and inserts the new content.
|
||||
regenerates the Unreleased changelog, removes any old Unreleased block, and inserts the new content.
|
||||
|
||||
7
.github/workflows/helmcharts.yml
vendored
7
.github/workflows/helmcharts.yml
vendored
@@ -26,8 +26,15 @@ jobs:
|
||||
uses: azure/setup-helm@v4
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
- name: Build Subchart Deps
|
||||
run: |
|
||||
cd helm/librechat-rag-api
|
||||
helm dependency build
|
||||
|
||||
- name: Run chart-releaser
|
||||
uses: helm/chart-releaser-action@v1.6.0
|
||||
with:
|
||||
charts_dir: helm
|
||||
skip_existing: true
|
||||
env:
|
||||
CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
40
.github/workflows/i18n-unused-keys.yml
vendored
40
.github/workflows/i18n-unused-keys.yml
vendored
@@ -5,12 +5,13 @@ on:
|
||||
paths:
|
||||
- "client/src/**"
|
||||
- "api/**"
|
||||
- "packages/data-provider/src/**"
|
||||
|
||||
jobs:
|
||||
detect-unused-i18n-keys:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write # Required for posting PR comments
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
@@ -22,7 +23,7 @@ jobs:
|
||||
|
||||
# Define paths
|
||||
I18N_FILE="client/src/locales/en/translation.json"
|
||||
SOURCE_DIRS=("client/src" "api")
|
||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src")
|
||||
|
||||
# Check if translation file exists
|
||||
if [[ ! -f "$I18N_FILE" ]]; then
|
||||
@@ -39,12 +40,35 @@ jobs:
|
||||
# Check if each key is used in the source code
|
||||
for KEY in $KEYS; do
|
||||
FOUND=false
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -q "$KEY" "$DIR"; then
|
||||
FOUND=true
|
||||
break
|
||||
|
||||
# Special case for dynamically constructed special variable keys
|
||||
if [[ "$KEY" == com_ui_special_var_* ]]; then
|
||||
# Check if TSpecialVarLabel is used in the codebase
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -q "TSpecialVarLabel" "$DIR"; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# Also check if the key is directly used somewhere
|
||||
if [[ "$FOUND" == false ]]; then
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -q "$KEY" "$DIR"; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
done
|
||||
else
|
||||
# Regular check for other keys
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -q "$KEY" "$DIR"; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ "$FOUND" == false ]]; then
|
||||
UNUSED_KEYS+=("$KEY")
|
||||
@@ -90,4 +114,4 @@ jobs:
|
||||
|
||||
- name: Fail workflow if unused keys found
|
||||
if: env.unused_keys != '[]'
|
||||
run: exit 1
|
||||
run: exit 1
|
||||
|
||||
2
.github/workflows/unused-packages.yml
vendored
2
.github/workflows/unused-packages.yml
vendored
@@ -98,6 +98,8 @@ jobs:
|
||||
cd client
|
||||
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt | sort) || echo "")
|
||||
# Filter out false positives
|
||||
UNUSED=$(echo "$UNUSED" | grep -v "^micromark-extension-llm-math$" || echo "")
|
||||
echo "CLIENT_UNUSED<<EOF" >> $GITHUB_ENV
|
||||
echo "$UNUSED" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
14
.gitignore
vendored
14
.gitignore
vendored
@@ -52,6 +52,11 @@ bower_components/
|
||||
*.d.ts
|
||||
!vite-env.d.ts
|
||||
|
||||
# AI
|
||||
.clineignore
|
||||
.cursor
|
||||
.aider*
|
||||
|
||||
# Floobits
|
||||
.floo
|
||||
.floobit
|
||||
@@ -110,4 +115,13 @@ uploads/
|
||||
|
||||
# owner
|
||||
release/
|
||||
|
||||
# Helm
|
||||
helm/librechat/Chart.lock
|
||||
helm/**/charts/
|
||||
helm/**/.values.yaml
|
||||
|
||||
!/client/src/@types/i18next.d.ts
|
||||
|
||||
# SAML Idp cert
|
||||
*.cert
|
||||
|
||||
3
.vscode/launch.json
vendored
3
.vscode/launch.json
vendored
@@ -8,7 +8,8 @@
|
||||
"skipFiles": ["<node_internals>/**"],
|
||||
"program": "${workspaceFolder}/api/server/index.js",
|
||||
"env": {
|
||||
"NODE_ENV": "production"
|
||||
"NODE_ENV": "production",
|
||||
"NODE_TLS_REJECT_UNAUTHORIZED": "0"
|
||||
},
|
||||
"console": "integratedTerminal",
|
||||
"envFile": "${workspaceFolder}/.env"
|
||||
|
||||
226
CHANGELOG.md
226
CHANGELOG.md
@@ -2,15 +2,235 @@
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### ✨ New Features
|
||||
|
||||
- 🪄 feat: Agent Artifacts by **@danny-avila** in [#5804](https://github.com/danny-avila/LibreChat/pull/5804)
|
||||
- ✨ feat: implement search parameter updates by **@mawburn** in [#7151](https://github.com/danny-avila/LibreChat/pull/7151)
|
||||
- 🎏 feat: Add MCP support for Streamable HTTP Transport by **@benverhees** in [#7353](https://github.com/danny-avila/LibreChat/pull/7353)
|
||||
- 🔒 feat: Add Content Security Policy using Helmet middleware by **@rubentalstra** in [#7377](https://github.com/danny-avila/LibreChat/pull/7377)
|
||||
- ✨ feat: Add Normalization for MCP Server Names by **@danny-avila** in [#7421](https://github.com/danny-avila/LibreChat/pull/7421)
|
||||
- 📊 feat: Improve Helm Chart by **@hofq** in [#3638](https://github.com/danny-avila/LibreChat/pull/3638)
|
||||
- 🦾 feat: Claude-4 Support by **@danny-avila** in [#7509](https://github.com/danny-avila/LibreChat/pull/7509)
|
||||
- 🪨 feat: Bedrock Support for Claude-4 Reasoning by **@danny-avila** in [#7517](https://github.com/danny-avila/LibreChat/pull/7517)
|
||||
|
||||
### 🌍 Internationalization
|
||||
|
||||
- 🌍 i18n: Add `Danish` and `Czech` and `Catalan` localization support by **@rubentalstra** in [#7373](https://github.com/danny-avila/LibreChat/pull/7373)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7375](https://github.com/danny-avila/LibreChat/pull/7375)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7468](https://github.com/danny-avila/LibreChat/pull/7468)
|
||||
|
||||
### 🔧 Fixes
|
||||
|
||||
- 💬 fix: update aria-label for accessibility in ConvoLink component by **@berry-13** in [#7320](https://github.com/danny-avila/LibreChat/pull/7320)
|
||||
- 🔑 fix: use `apiKey` instead of `openAIApiKey` in OpenAI-like Config by **@danny-avila** in [#7337](https://github.com/danny-avila/LibreChat/pull/7337)
|
||||
- 🔄 fix: update navigation logic in `useFocusChatEffect` to ensure correct search parameters are used by **@mawburn** in [#7340](https://github.com/danny-avila/LibreChat/pull/7340)
|
||||
- 🔄 fix: Improve MCP Connection Cleanup by **@danny-avila** in [#7400](https://github.com/danny-avila/LibreChat/pull/7400)
|
||||
- 🛡️ fix: Preset and Validation Logic for URL Query Params by **@danny-avila** in [#7407](https://github.com/danny-avila/LibreChat/pull/7407)
|
||||
- 🌘 fix: artifact of preview text is illegible in dark mode by **@nhtruong** in [#7405](https://github.com/danny-avila/LibreChat/pull/7405)
|
||||
- 🛡️ fix: Temporarily Remove CSP until Configurable by **@danny-avila** in [#7419](https://github.com/danny-avila/LibreChat/pull/7419)
|
||||
- 💽 fix: Exclude index page `/` from static cache settings by **@sbruel** in [#7382](https://github.com/danny-avila/LibreChat/pull/7382)
|
||||
|
||||
### ⚙️ Other Changes
|
||||
|
||||
- 🔄 chore: Enforce 18next Language Keys by **@rubentalstra** in [#5803](https://github.com/danny-avila/LibreChat/pull/5803)
|
||||
- 🔃 refactor: Parent Message ID Handling on Error, Update Translations, Bump Agents by **@danny-avila** in [#5833](https://github.com/danny-avila/LibreChat/pull/5833)
|
||||
- 📜 docs: CHANGELOG for release v0.7.8 by **@github-actions[bot]** in [#7290](https://github.com/danny-avila/LibreChat/pull/7290)
|
||||
- 📦 chore: Update API Package Dependencies by **@danny-avila** in [#7359](https://github.com/danny-avila/LibreChat/pull/7359)
|
||||
- 📜 docs: Unreleased Changelog by **@github-actions[bot]** in [#7321](https://github.com/danny-avila/LibreChat/pull/7321)
|
||||
- 📜 docs: Unreleased Changelog by **@github-actions[bot]** in [#7434](https://github.com/danny-avila/LibreChat/pull/7434)
|
||||
- 🛡️ chore: `multer` v2.0.0 for CVE-2025-47935 and CVE-2025-47944 by **@danny-avila** in [#7454](https://github.com/danny-avila/LibreChat/pull/7454)
|
||||
- 📂 refactor: Improve `FileAttachment` & File Form Deletion by **@danny-avila** in [#7471](https://github.com/danny-avila/LibreChat/pull/7471)
|
||||
- 📊 chore: Remove Old Helm Chart by **@hofq** in [#7512](https://github.com/danny-avila/LibreChat/pull/7512)
|
||||
- 🪖 chore: bump helm app version to v0.7.8 by **@austin-barrington** in [#7524](https://github.com/danny-avila/LibreChat/pull/7524)
|
||||
|
||||
|
||||
|
||||
---
|
||||
## [v0.7.8] -
|
||||
|
||||
Changes from v0.7.8-rc1 to v0.7.8.
|
||||
|
||||
### ✨ New Features
|
||||
|
||||
- ✨ feat: Enhance form submission for touch screens by **@berry-13** in [#7198](https://github.com/danny-avila/LibreChat/pull/7198)
|
||||
- 🔍 feat: Additional Tavily API Tool Parameters by **@glowforge-opensource** in [#7232](https://github.com/danny-avila/LibreChat/pull/7232)
|
||||
- 🐋 feat: Add python to Dockerfile for increased MCP compatibility by **@technicalpickles** in [#7270](https://github.com/danny-avila/LibreChat/pull/7270)
|
||||
|
||||
### 🔧 Fixes
|
||||
|
||||
- 🔧 fix: Google Gemma Support & OpenAI Reasoning Instructions by **@danny-avila** in [#7196](https://github.com/danny-avila/LibreChat/pull/7196)
|
||||
- 🛠️ fix: Conversation Navigation State by **@danny-avila** in [#7210](https://github.com/danny-avila/LibreChat/pull/7210)
|
||||
- 🔄 fix: o-Series Model Regex for System Messages by **@danny-avila** in [#7245](https://github.com/danny-avila/LibreChat/pull/7245)
|
||||
- 🔖 fix: Custom Headers for Initial MCP SSE Connection by **@danny-avila** in [#7246](https://github.com/danny-avila/LibreChat/pull/7246)
|
||||
- 🛡️ fix: Deep Clone `MCPOptions` for User MCP Connections by **@danny-avila** in [#7247](https://github.com/danny-avila/LibreChat/pull/7247)
|
||||
- 🔄 fix: URL Param Race Condition and File Draft Persistence by **@danny-avila** in [#7257](https://github.com/danny-avila/LibreChat/pull/7257)
|
||||
- 🔄 fix: Assistants Endpoint & Minor Issues by **@danny-avila** in [#7274](https://github.com/danny-avila/LibreChat/pull/7274)
|
||||
- 🔄 fix: Ollama Think Tag Edge Case with Tools by **@danny-avila** in [#7275](https://github.com/danny-avila/LibreChat/pull/7275)
|
||||
|
||||
### ⚙️ Other Changes
|
||||
|
||||
- 📜 docs: CHANGELOG for release v0.7.8-rc1 by **@github-actions[bot]** in [#7153](https://github.com/danny-avila/LibreChat/pull/7153)
|
||||
- 🔄 refactor: Artifact Visibility Management by **@danny-avila** in [#7181](https://github.com/danny-avila/LibreChat/pull/7181)
|
||||
- 📦 chore: Bump Package Security by **@danny-avila** in [#7183](https://github.com/danny-avila/LibreChat/pull/7183)
|
||||
- 🌿 refactor: Unmount Fork Popover on Hide for Better Performance by **@danny-avila** in [#7189](https://github.com/danny-avila/LibreChat/pull/7189)
|
||||
- 🧰 chore: ESLint configuration to enforce Prettier formatting rules by **@mawburn** in [#7186](https://github.com/danny-avila/LibreChat/pull/7186)
|
||||
- 🎨 style: Improve KaTeX Rendering for LaTeX Equations by **@andresgit** in [#7223](https://github.com/danny-avila/LibreChat/pull/7223)
|
||||
- 📝 docs: Update `.env.example` Google models by **@marlonka** in [#7254](https://github.com/danny-avila/LibreChat/pull/7254)
|
||||
- 💬 refactor: MCP Chat Visibility Option, Google Rates, Remove OpenAPI Plugins by **@danny-avila** in [#7286](https://github.com/danny-avila/LibreChat/pull/7286)
|
||||
- 📜 docs: Unreleased Changelog by **@github-actions[bot]** in [#7214](https://github.com/danny-avila/LibreChat/pull/7214)
|
||||
|
||||
|
||||
|
||||
[See full release details][release-v0.7.8]
|
||||
|
||||
[release-v0.7.8]: https://github.com/danny-avila/LibreChat/releases/tag/v0.7.8
|
||||
|
||||
---
|
||||
## [v0.7.8-rc1] -
|
||||
|
||||
Changes from v0.7.7 to v0.7.8-rc1.
|
||||
|
||||
### ✨ New Features
|
||||
|
||||
- 🔍 feat: Mistral OCR API / Upload Files as Text by **@danny-avila** in [#6274](https://github.com/danny-avila/LibreChat/pull/6274)
|
||||
- 🤖 feat: Support OpenAI Web Search models by **@danny-avila** in [#6313](https://github.com/danny-avila/LibreChat/pull/6313)
|
||||
- 🔗 feat: Agent Chain (Mixture-of-Agents) by **@danny-avila** in [#6374](https://github.com/danny-avila/LibreChat/pull/6374)
|
||||
- ⌛ feat: `initTimeout` for Slow Starting MCP Servers by **@perweij** in [#6383](https://github.com/danny-avila/LibreChat/pull/6383)
|
||||
- 🚀 feat: `S3` Integration for File handling and Image uploads by **@rubentalstra** in [#6142](https://github.com/danny-avila/LibreChat/pull/6142)
|
||||
- 🔒feat: Enable OpenID Auto-Redirect by **@leondape** in [#6066](https://github.com/danny-avila/LibreChat/pull/6066)
|
||||
- 🚀 feat: Integrate `Azure Blob Storage` for file handling and image uploads by **@rubentalstra** in [#6153](https://github.com/danny-avila/LibreChat/pull/6153)
|
||||
- 🚀 feat: Add support for custom `AWS` endpoint in `S3` by **@rubentalstra** in [#6431](https://github.com/danny-avila/LibreChat/pull/6431)
|
||||
- 🚀 feat: Add support for LDAP STARTTLS in LDAP authentication by **@rubentalstra** in [#6438](https://github.com/danny-avila/LibreChat/pull/6438)
|
||||
- 🚀 feat: Refactor schema exports and update package version to 0.0.4 by **@rubentalstra** in [#6455](https://github.com/danny-avila/LibreChat/pull/6455)
|
||||
- 🔼 feat: Add Auto Submit For URL Query Params by **@mjaverto** in [#6440](https://github.com/danny-avila/LibreChat/pull/6440)
|
||||
- 🛠 feat: Enhance Redis Integration, Rate Limiters & Log Headers by **@danny-avila** in [#6462](https://github.com/danny-avila/LibreChat/pull/6462)
|
||||
- 💵 feat: Add Automatic Balance Refill by **@rubentalstra** in [#6452](https://github.com/danny-avila/LibreChat/pull/6452)
|
||||
- 🗣️ feat: add support for gpt-4o-transcribe models by **@berry-13** in [#6483](https://github.com/danny-avila/LibreChat/pull/6483)
|
||||
- 🎨 feat: UI Refresh for Enhanced UX by **@berry-13** in [#6346](https://github.com/danny-avila/LibreChat/pull/6346)
|
||||
- 🌍 feat: Add support for Hungarian language localization by **@rubentalstra** in [#6508](https://github.com/danny-avila/LibreChat/pull/6508)
|
||||
- 🚀 feat: Add Gemini 2.5 Token/Context Values, Increase Max Possible Output to 64k by **@danny-avila** in [#6563](https://github.com/danny-avila/LibreChat/pull/6563)
|
||||
- 🚀 feat: Enhance MCP Connections For Multi-User Support by **@danny-avila** in [#6610](https://github.com/danny-avila/LibreChat/pull/6610)
|
||||
- 🚀 feat: Enhance S3 URL Expiry with Refresh; fix: S3 File Deletion by **@danny-avila** in [#6647](https://github.com/danny-avila/LibreChat/pull/6647)
|
||||
- 🚀 feat: enhance UI components and refactor settings by **@berry-13** in [#6625](https://github.com/danny-avila/LibreChat/pull/6625)
|
||||
- 💬 feat: move TemporaryChat to the Header by **@berry-13** in [#6646](https://github.com/danny-avila/LibreChat/pull/6646)
|
||||
- 🚀 feat: Use Model Specs + Specific Endpoints, Limit Providers for Agents by **@danny-avila** in [#6650](https://github.com/danny-avila/LibreChat/pull/6650)
|
||||
- 🪙 feat: Sync Balance Config on Login by **@danny-avila** in [#6671](https://github.com/danny-avila/LibreChat/pull/6671)
|
||||
- 🔦 feat: MCP Support for Non-Agent Endpoints by **@danny-avila** in [#6775](https://github.com/danny-avila/LibreChat/pull/6775)
|
||||
- 🗃️ feat: Code Interpreter File Persistence between Sessions by **@danny-avila** in [#6790](https://github.com/danny-avila/LibreChat/pull/6790)
|
||||
- 🖥️ feat: Code Interpreter API for Non-Agent Endpoints by **@danny-avila** in [#6803](https://github.com/danny-avila/LibreChat/pull/6803)
|
||||
- ⚡ feat: Self-hosted Artifacts Static Bundler URL by **@danny-avila** in [#6827](https://github.com/danny-avila/LibreChat/pull/6827)
|
||||
- 🐳 feat: Add Jemalloc and UV to Docker Builds by **@danny-avila** in [#6836](https://github.com/danny-avila/LibreChat/pull/6836)
|
||||
- 🤖 feat: GPT-4.1 by **@danny-avila** in [#6880](https://github.com/danny-avila/LibreChat/pull/6880)
|
||||
- 👋 feat: remove Edge TTS by **@berry-13** in [#6885](https://github.com/danny-avila/LibreChat/pull/6885)
|
||||
- feat: nav optimization by **@berry-13** in [#5785](https://github.com/danny-avila/LibreChat/pull/5785)
|
||||
- 🗺️ feat: Add Parameter Location Mapping for OpenAPI actions by **@peeeteeer** in [#6858](https://github.com/danny-avila/LibreChat/pull/6858)
|
||||
- 🤖 feat: Support `o4-mini` and `o3` Models by **@danny-avila** in [#6928](https://github.com/danny-avila/LibreChat/pull/6928)
|
||||
- 🎨 feat: OpenAI Image Tools (GPT-Image-1) by **@danny-avila** in [#7079](https://github.com/danny-avila/LibreChat/pull/7079)
|
||||
- 🗓️ feat: Add Special Variables for Prompts & Agents, Prompt UI Improvements by **@danny-avila** in [#7123](https://github.com/danny-avila/LibreChat/pull/7123)
|
||||
|
||||
### 🌍 Internationalization
|
||||
|
||||
- 🌍 i18n: Add Thai Language Support and Update Translations by **@rubentalstra** in [#6219](https://github.com/danny-avila/LibreChat/pull/6219)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6220](https://github.com/danny-avila/LibreChat/pull/6220)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6240](https://github.com/danny-avila/LibreChat/pull/6240)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6241](https://github.com/danny-avila/LibreChat/pull/6241)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6277](https://github.com/danny-avila/LibreChat/pull/6277)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6414](https://github.com/danny-avila/LibreChat/pull/6414)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6505](https://github.com/danny-avila/LibreChat/pull/6505)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6530](https://github.com/danny-avila/LibreChat/pull/6530)
|
||||
- 🌍 i18n: Add Persian Localization Support by **@rubentalstra** in [#6669](https://github.com/danny-avila/LibreChat/pull/6669)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6667](https://github.com/danny-avila/LibreChat/pull/6667)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7126](https://github.com/danny-avila/LibreChat/pull/7126)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7148](https://github.com/danny-avila/LibreChat/pull/7148)
|
||||
|
||||
### 👐 Accessibility
|
||||
|
||||
- 🎨 a11y: Update Model Spec Description Text by **@berry-13** in [#6294](https://github.com/danny-avila/LibreChat/pull/6294)
|
||||
- 🗑️ a11y: Add Accessible Name to Button for File Attachment Removal by **@kangabell** in [#6709](https://github.com/danny-avila/LibreChat/pull/6709)
|
||||
- ⌨️ a11y: enhance accessibility & visual consistency by **@berry-13** in [#6866](https://github.com/danny-avila/LibreChat/pull/6866)
|
||||
- 🙌 a11y: Searchbar/Conversations List Focus by **@danny-avila** in [#7096](https://github.com/danny-avila/LibreChat/pull/7096)
|
||||
- 👐 a11y: Improve Fork and SplitText Accessibility by **@danny-avila** in [#7147](https://github.com/danny-avila/LibreChat/pull/7147)
|
||||
|
||||
### 🔧 Fixes
|
||||
|
||||
- 🐛 fix: Avatar Type Definitions in Agent/Assistant Schemas by **@danny-avila** in [#6235](https://github.com/danny-avila/LibreChat/pull/6235)
|
||||
- 🔧 fix: MeiliSearch Field Error and Patch Incorrect Import by #6210 by **@rubentalstra** in [#6245](https://github.com/danny-avila/LibreChat/pull/6245)
|
||||
- 🔏 fix: Enhance Two-Factor Authentication by **@rubentalstra** in [#6247](https://github.com/danny-avila/LibreChat/pull/6247)
|
||||
- 🐛 fix: Await saveMessage in abortMiddleware to ensure proper execution by **@sh4shii** in [#6248](https://github.com/danny-avila/LibreChat/pull/6248)
|
||||
- 🔧 fix: Axios Proxy Usage And Bump `mongoose` by **@danny-avila** in [#6298](https://github.com/danny-avila/LibreChat/pull/6298)
|
||||
- 🔧 fix: comment out MCP servers to resolve service run issues by **@KunalScriptz** in [#6316](https://github.com/danny-avila/LibreChat/pull/6316)
|
||||
- 🔧 fix: Update Token Calculations and Mapping, MCP `env` Initialization by **@danny-avila** in [#6406](https://github.com/danny-avila/LibreChat/pull/6406)
|
||||
- 🐞 fix: Agent "Resend" Message Attachments + Source Icon Styling by **@danny-avila** in [#6408](https://github.com/danny-avila/LibreChat/pull/6408)
|
||||
- 🐛 fix: Prevent Crash on Duplicate Message ID by **@Odrec** in [#6392](https://github.com/danny-avila/LibreChat/pull/6392)
|
||||
- 🔐 fix: Invalid Key Length in 2FA Encryption by **@rubentalstra** in [#6432](https://github.com/danny-avila/LibreChat/pull/6432)
|
||||
- 🏗️ fix: Fix Agents Token Spend Race Conditions, Expand Test Coverage by **@danny-avila** in [#6480](https://github.com/danny-avila/LibreChat/pull/6480)
|
||||
- 🔃 fix: Draft Clearing, Claude Titles, Remove Default Vision Max Tokens by **@danny-avila** in [#6501](https://github.com/danny-avila/LibreChat/pull/6501)
|
||||
- 🔧 fix: Update username reference to use user.name in greeting display by **@rubentalstra** in [#6534](https://github.com/danny-avila/LibreChat/pull/6534)
|
||||
- 🔧 fix: S3 Download Stream with Key Extraction and Blob Storage Encoding for Vision by **@danny-avila** in [#6557](https://github.com/danny-avila/LibreChat/pull/6557)
|
||||
- 🔧 fix: Mistral type strictness for `usage` & update token values/windows by **@danny-avila** in [#6562](https://github.com/danny-avila/LibreChat/pull/6562)
|
||||
- 🔧 fix: Consolidate Text Parsing and TTS Edge Initialization by **@danny-avila** in [#6582](https://github.com/danny-avila/LibreChat/pull/6582)
|
||||
- 🔧 fix: Ensure continuation in image processing on base64 encoding from Blob Storage by **@danny-avila** in [#6619](https://github.com/danny-avila/LibreChat/pull/6619)
|
||||
- ✉️ fix: Fallback For User Name In Email Templates by **@danny-avila** in [#6620](https://github.com/danny-avila/LibreChat/pull/6620)
|
||||
- 🔧 fix: Azure Blob Integration and File Source References by **@rubentalstra** in [#6575](https://github.com/danny-avila/LibreChat/pull/6575)
|
||||
- 🐛 fix: Safeguard against undefined addedEndpoints by **@wipash** in [#6654](https://github.com/danny-avila/LibreChat/pull/6654)
|
||||
- 🤖 fix: Gemini 2.5 Vision Support by **@danny-avila** in [#6663](https://github.com/danny-avila/LibreChat/pull/6663)
|
||||
- 🔄 fix: Avatar & Error Handling Enhancements by **@danny-avila** in [#6687](https://github.com/danny-avila/LibreChat/pull/6687)
|
||||
- 🔧 fix: Chat Middleware, Zod Conversion, Auto-Save and S3 URL Refresh by **@danny-avila** in [#6720](https://github.com/danny-avila/LibreChat/pull/6720)
|
||||
- 🔧 fix: Agent Capability Checks & DocumentDB Compatibility for Agent Resource Removal by **@danny-avila** in [#6726](https://github.com/danny-avila/LibreChat/pull/6726)
|
||||
- 🔄 fix: Improve audio MIME type detection and handling by **@berry-13** in [#6707](https://github.com/danny-avila/LibreChat/pull/6707)
|
||||
- 🪺 fix: Update Role Handling due to New Schema Shape by **@danny-avila** in [#6774](https://github.com/danny-avila/LibreChat/pull/6774)
|
||||
- 🗨️ fix: Show ModelSpec Greeting by **@berry-13** in [#6770](https://github.com/danny-avila/LibreChat/pull/6770)
|
||||
- 🔧 fix: Keyv and Proxy Issues, and More Memory Optimizations by **@danny-avila** in [#6867](https://github.com/danny-avila/LibreChat/pull/6867)
|
||||
- ✨ fix: Implement dynamic text sizing for greeting and name display by **@berry-13** in [#6833](https://github.com/danny-avila/LibreChat/pull/6833)
|
||||
- 📝 fix: Mistral OCR Image Support and Azure Agent Titles by **@danny-avila** in [#6901](https://github.com/danny-avila/LibreChat/pull/6901)
|
||||
- 📢 fix: Invalid `engineTTS` and Conversation State on Navigation by **@berry-13** in [#6904](https://github.com/danny-avila/LibreChat/pull/6904)
|
||||
- 🛠️ fix: Improve Accessibility and Display of Conversation Menu by **@danny-avila** in [#6913](https://github.com/danny-avila/LibreChat/pull/6913)
|
||||
- 🔧 fix: Agent Resource Form, Convo Menu Style, Ensure Draft Clears on Submission by **@danny-avila** in [#6925](https://github.com/danny-avila/LibreChat/pull/6925)
|
||||
- 🔀 fix: MCP Improvements, Auto-Save Drafts, Artifact Markup by **@danny-avila** in [#7040](https://github.com/danny-avila/LibreChat/pull/7040)
|
||||
- 🐋 fix: Improve Deepseek Compatbility by **@danny-avila** in [#7132](https://github.com/danny-avila/LibreChat/pull/7132)
|
||||
- 🐙 fix: Add Redis Ping Interval to Prevent Connection Drops by **@peeeteeer** in [#7127](https://github.com/danny-avila/LibreChat/pull/7127)
|
||||
|
||||
### ⚙️ Other Changes
|
||||
|
||||
- 📦 refactor: Move DB Models to `@librechat/data-schemas` by **@rubentalstra** in [#6210](https://github.com/danny-avila/LibreChat/pull/6210)
|
||||
- 📦 chore: Patch `axios` to address CVE-2025-27152 by **@danny-avila** in [#6222](https://github.com/danny-avila/LibreChat/pull/6222)
|
||||
- ⚠️ refactor: Use Error Content Part Instead Of Throwing Error for Agents by **@danny-avila** in [#6262](https://github.com/danny-avila/LibreChat/pull/6262)
|
||||
- 🏃♂️ refactor: Improve Agent Run Context & Misc. Changes by **@danny-avila** in [#6448](https://github.com/danny-avila/LibreChat/pull/6448)
|
||||
- 📝 docs: librechat.example.yaml by **@ineiti** in [#6442](https://github.com/danny-avila/LibreChat/pull/6442)
|
||||
- 🏃♂️ refactor: More Agent Context Improvements during Run by **@danny-avila** in [#6477](https://github.com/danny-avila/LibreChat/pull/6477)
|
||||
- 🔃 refactor: Allow streaming for `o1` models by **@danny-avila** in [#6509](https://github.com/danny-avila/LibreChat/pull/6509)
|
||||
- 🔧 chore: `Vite` Plugin Upgrades & Config Optimizations by **@rubentalstra** in [#6547](https://github.com/danny-avila/LibreChat/pull/6547)
|
||||
- 🔧 refactor: Consolidate Logging, Model Selection & Actions Optimizations, Minor Fixes by **@danny-avila** in [#6553](https://github.com/danny-avila/LibreChat/pull/6553)
|
||||
- 🎨 style: Address Minor UI Refresh Issues by **@berry-13** in [#6552](https://github.com/danny-avila/LibreChat/pull/6552)
|
||||
- 🔧 refactor: Enhance Model & Endpoint Configurations with Global Indicators 🌍 by **@berry-13** in [#6578](https://github.com/danny-avila/LibreChat/pull/6578)
|
||||
- 💬 style: Chat UI, Greeting, and Message adjustments by **@berry-13** in [#6612](https://github.com/danny-avila/LibreChat/pull/6612)
|
||||
- ⚡ refactor: DocumentDB Compatibility for Balance Updates by **@danny-avila** in [#6673](https://github.com/danny-avila/LibreChat/pull/6673)
|
||||
- 🧹 chore: Update ESLint rules for React hooks by **@rubentalstra** in [#6685](https://github.com/danny-avila/LibreChat/pull/6685)
|
||||
- 🪙 chore: Update Gemini Pricing by **@RedwindA** in [#6731](https://github.com/danny-avila/LibreChat/pull/6731)
|
||||
- 🪺 refactor: Nest Permission fields for Roles by **@rubentalstra** in [#6487](https://github.com/danny-avila/LibreChat/pull/6487)
|
||||
- 📦 chore: Update `caniuse-lite` dependency to version 1.0.30001706 by **@rubentalstra** in [#6482](https://github.com/danny-avila/LibreChat/pull/6482)
|
||||
- ⚙️ refactor: OAuth Flow Signal, Type Safety, Tool Progress & Updated Packages by **@danny-avila** in [#6752](https://github.com/danny-avila/LibreChat/pull/6752)
|
||||
- 📦 chore: bump vite from 6.2.3 to 6.2.5 by **@dependabot[bot]** in [#6745](https://github.com/danny-avila/LibreChat/pull/6745)
|
||||
- 💾 chore: Enhance Local Storage Handling and Update MCP SDK by **@danny-avila** in [#6809](https://github.com/danny-avila/LibreChat/pull/6809)
|
||||
- 🤖 refactor: Improve Agents Memory Usage, Bump Keyv, Grok 3 by **@danny-avila** in [#6850](https://github.com/danny-avila/LibreChat/pull/6850)
|
||||
- 💾 refactor: Enhance Memory In Image Encodings & Client Disposal by **@danny-avila** in [#6852](https://github.com/danny-avila/LibreChat/pull/6852)
|
||||
- 🔁 refactor: Token Event Handler and Standardize `maxTokens` Key by **@danny-avila** in [#6886](https://github.com/danny-avila/LibreChat/pull/6886)
|
||||
- 🔍 refactor: Search & Message Retrieval by **@berry-13** in [#6903](https://github.com/danny-avila/LibreChat/pull/6903)
|
||||
- 🎨 style: standardize dropdown styling & fix z-Index layering by **@berry-13** in [#6939](https://github.com/danny-avila/LibreChat/pull/6939)
|
||||
- 📙 docs: CONTRIBUTING.md by **@dblock** in [#6831](https://github.com/danny-avila/LibreChat/pull/6831)
|
||||
- 🧭 refactor: Modernize Nav/Header by **@danny-avila** in [#7094](https://github.com/danny-avila/LibreChat/pull/7094)
|
||||
- 🪶 refactor: Chat Input Focus for Conversation Navigations & ChatForm Optimizations by **@danny-avila** in [#7100](https://github.com/danny-avila/LibreChat/pull/7100)
|
||||
- 🔃 refactor: Streamline Navigation, Message Loading UX by **@danny-avila** in [#7118](https://github.com/danny-avila/LibreChat/pull/7118)
|
||||
- 📜 docs: Unreleased changelog by **@github-actions[bot]** in [#6265](https://github.com/danny-avila/LibreChat/pull/6265)
|
||||
|
||||
|
||||
|
||||
[See full release details][release-v0.7.8-rc1]
|
||||
|
||||
[release-v0.7.8-rc1]: https://github.com/danny-avila/LibreChat/releases/tag/v0.7.8-rc1
|
||||
|
||||
---
|
||||
|
||||
15
Dockerfile
15
Dockerfile
@@ -1,9 +1,18 @@
|
||||
# v0.7.7
|
||||
# v0.7.8
|
||||
|
||||
# Base node image
|
||||
FROM node:20-alpine AS node
|
||||
|
||||
RUN apk --no-cache add curl
|
||||
# Install jemalloc
|
||||
RUN apk add --no-cache jemalloc
|
||||
RUN apk add --no-cache python3 py3-pip uv
|
||||
|
||||
# Set environment variable to use jemalloc
|
||||
ENV LD_PRELOAD=/usr/lib/libjemalloc.so.2
|
||||
|
||||
# Add `uv` for extended MCP support
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.6.13 /uv /uvx /bin/
|
||||
RUN uv --version
|
||||
|
||||
RUN mkdir -p /app && chown node:node /app
|
||||
WORKDIR /app
|
||||
@@ -38,4 +47,4 @@ CMD ["npm", "run", "backend"]
|
||||
# WORKDIR /usr/share/nginx/html
|
||||
# COPY --from=node /app/client/dist /usr/share/nginx/html
|
||||
# COPY client/nginx.conf /etc/nginx/conf.d/default.conf
|
||||
# ENTRYPOINT ["nginx", "-g", "daemon off;"]
|
||||
# ENTRYPOINT ["nginx", "-g", "daemon off;"]
|
||||
@@ -1,8 +1,12 @@
|
||||
# Dockerfile.multi
|
||||
# v0.7.7
|
||||
# v0.7.8
|
||||
|
||||
# Base for all builds
|
||||
FROM node:20-alpine AS base-min
|
||||
# Install jemalloc
|
||||
RUN apk add --no-cache jemalloc
|
||||
# Set environment variable to use jemalloc
|
||||
ENV LD_PRELOAD=/usr/lib/libjemalloc.so.2
|
||||
WORKDIR /app
|
||||
RUN apk --no-cache add curl
|
||||
RUN npm config set fetch-retry-maxtimeout 600000 && \
|
||||
@@ -10,7 +14,7 @@ RUN npm config set fetch-retry-maxtimeout 600000 && \
|
||||
npm config set fetch-retry-mintimeout 15000
|
||||
COPY package*.json ./
|
||||
COPY packages/data-provider/package*.json ./packages/data-provider/
|
||||
COPY packages/mcp/package*.json ./packages/mcp/
|
||||
COPY packages/api/package*.json ./packages/api/
|
||||
COPY packages/data-schemas/package*.json ./packages/data-schemas/
|
||||
COPY client/package*.json ./client/
|
||||
COPY api/package*.json ./api/
|
||||
@@ -20,26 +24,27 @@ FROM base-min AS base
|
||||
WORKDIR /app
|
||||
RUN npm ci
|
||||
|
||||
# Build data-provider
|
||||
# Build `data-provider` package
|
||||
FROM base AS data-provider-build
|
||||
WORKDIR /app/packages/data-provider
|
||||
COPY packages/data-provider ./
|
||||
RUN npm run build
|
||||
|
||||
# Build mcp package
|
||||
FROM base AS mcp-build
|
||||
WORKDIR /app/packages/mcp
|
||||
COPY packages/mcp ./
|
||||
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
||||
RUN npm run build
|
||||
|
||||
# Build data-schemas
|
||||
# Build `data-schemas` package
|
||||
FROM base AS data-schemas-build
|
||||
WORKDIR /app/packages/data-schemas
|
||||
COPY packages/data-schemas ./
|
||||
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
||||
RUN npm run build
|
||||
|
||||
# Build `api` package
|
||||
FROM base AS api-package-build
|
||||
WORKDIR /app/packages/api
|
||||
COPY packages/api ./
|
||||
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
||||
COPY --from=data-schemas-build /app/packages/data-schemas/dist /app/packages/data-schemas/dist
|
||||
RUN npm run build
|
||||
|
||||
# Client build
|
||||
FROM base AS client-build
|
||||
WORKDIR /app/client
|
||||
@@ -50,14 +55,17 @@ RUN npm run build
|
||||
|
||||
# API setup (including client dist)
|
||||
FROM base-min AS api-build
|
||||
# Add `uv` for extended MCP support
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.6.13 /uv /uvx /bin/
|
||||
RUN uv --version
|
||||
WORKDIR /app
|
||||
# Install only production deps
|
||||
RUN npm ci --omit=dev
|
||||
COPY api ./api
|
||||
COPY config ./config
|
||||
COPY --from=data-provider-build /app/packages/data-provider/dist ./packages/data-provider/dist
|
||||
COPY --from=mcp-build /app/packages/mcp/dist ./packages/mcp/dist
|
||||
COPY --from=data-schemas-build /app/packages/data-schemas/dist ./packages/data-schemas/dist
|
||||
COPY --from=api-package-build /app/packages/api/dist ./packages/api/dist
|
||||
COPY --from=client-build /app/client/dist ./client/dist
|
||||
WORKDIR /app/api
|
||||
EXPOSE 3080
|
||||
|
||||
14
README.md
14
README.md
@@ -71,9 +71,19 @@
|
||||
- [Model Context Protocol (MCP) Support](https://modelcontextprotocol.io/clients#librechat) for Tools
|
||||
- Use LibreChat Agents and OpenAI Assistants with Files, Code Interpreter, Tools, and API Actions
|
||||
|
||||
- 🔍 **Web Search**:
|
||||
- Search the internet and retrieve relevant information to enhance your AI context
|
||||
- Combines search providers, content scrapers, and result rerankers for optimal results
|
||||
- **[Learn More →](https://www.librechat.ai/docs/features/web_search)**
|
||||
|
||||
- 🪄 **Generative UI with Code Artifacts**:
|
||||
- [Code Artifacts](https://youtu.be/GfTj7O4gmd0?si=WJbdnemZpJzBrJo3) allow creation of React, HTML, and Mermaid diagrams directly in chat
|
||||
|
||||
- 🎨 **Image Generation & Editing**
|
||||
- Text-to-image and image-to-image with [GPT-Image-1](https://www.librechat.ai/docs/features/image_gen#1--openai-image-tools-recommended)
|
||||
- Text-to-image with [DALL-E (3/2)](https://www.librechat.ai/docs/features/image_gen#2--dalle-legacy), [Stable Diffusion](https://www.librechat.ai/docs/features/image_gen#3--stable-diffusion-local), [Flux](https://www.librechat.ai/docs/features/image_gen#4--flux), or any [MCP server](https://www.librechat.ai/docs/features/image_gen#5--model-context-protocol-mcp)
|
||||
- Produce stunning visuals from prompts or refine existing images with a single instruction
|
||||
|
||||
- 💾 **Presets & Context Management**:
|
||||
- Create, Save, & Share Custom Presets
|
||||
- Switch between AI Endpoints and Presets mid-chat
|
||||
@@ -140,8 +150,8 @@ Click on the thumbnail to open the video☝️
|
||||
|
||||
**Other:**
|
||||
- **Website:** [librechat.ai](https://librechat.ai)
|
||||
- **Documentation:** [docs.librechat.ai](https://docs.librechat.ai)
|
||||
- **Blog:** [blog.librechat.ai](https://blog.librechat.ai)
|
||||
- **Documentation:** [librechat.ai/docs](https://librechat.ai/docs)
|
||||
- **Blog:** [librechat.ai/blog](https://librechat.ai/blog)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -4,11 +4,13 @@ const {
|
||||
Constants,
|
||||
ErrorTypes,
|
||||
EModelEndpoint,
|
||||
parseTextParts,
|
||||
anthropicSettings,
|
||||
getResponseSender,
|
||||
validateVisionModel,
|
||||
} = require('librechat-data-provider');
|
||||
const { SplitStreamHandler: _Handler, GraphEvents } = require('@librechat/agents');
|
||||
const { SplitStreamHandler: _Handler } = require('@librechat/agents');
|
||||
const { Tokenizer, createFetch, createStreamEventHandlers } = require('@librechat/api');
|
||||
const {
|
||||
truncateText,
|
||||
formatMessage,
|
||||
@@ -25,10 +27,9 @@ const {
|
||||
const { getModelMaxTokens, getModelMaxOutputTokens, matchModelName } = require('~/utils');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const Tokenizer = require('~/server/services/Tokenizer');
|
||||
const { logger, sendEvent } = require('~/config');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const BaseClient = require('./BaseClient');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const HUMAN_PROMPT = '\n\nHuman:';
|
||||
const AI_PROMPT = '\n\nAssistant:';
|
||||
@@ -68,13 +69,10 @@ class AnthropicClient extends BaseClient {
|
||||
this.message_delta;
|
||||
/** Whether the model is part of the Claude 3 Family
|
||||
* @type {boolean} */
|
||||
this.isClaude3;
|
||||
this.isClaudeLatest;
|
||||
/** Whether to use Messages API or Completions API
|
||||
* @type {boolean} */
|
||||
this.useMessages;
|
||||
/** Whether or not the model is limited to the legacy amount of output tokens
|
||||
* @type {boolean} */
|
||||
this.isLegacyOutput;
|
||||
/** Whether or not the model supports Prompt Caching
|
||||
* @type {boolean} */
|
||||
this.supportsCacheControl;
|
||||
@@ -114,21 +112,25 @@ class AnthropicClient extends BaseClient {
|
||||
);
|
||||
|
||||
const modelMatch = matchModelName(this.modelOptions.model, EModelEndpoint.anthropic);
|
||||
this.isClaude3 = modelMatch.includes('claude-3');
|
||||
this.isLegacyOutput = !(
|
||||
/claude-3[-.]5-sonnet/.test(modelMatch) || /claude-3[-.]7/.test(modelMatch)
|
||||
this.isClaudeLatest =
|
||||
/claude-[3-9]/.test(modelMatch) || /claude-(?:sonnet|opus|haiku)-[4-9]/.test(modelMatch);
|
||||
const isLegacyOutput = !(
|
||||
/claude-3[-.]5-sonnet/.test(modelMatch) ||
|
||||
/claude-3[-.]7/.test(modelMatch) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(modelMatch) ||
|
||||
/claude-[4-9]/.test(modelMatch)
|
||||
);
|
||||
this.supportsCacheControl = this.options.promptCache && checkPromptCacheSupport(modelMatch);
|
||||
|
||||
if (
|
||||
this.isLegacyOutput &&
|
||||
isLegacyOutput &&
|
||||
this.modelOptions.maxOutputTokens &&
|
||||
this.modelOptions.maxOutputTokens > legacy.maxOutputTokens.default
|
||||
) {
|
||||
this.modelOptions.maxOutputTokens = legacy.maxOutputTokens.default;
|
||||
}
|
||||
|
||||
this.useMessages = this.isClaude3 || !!this.options.attachments;
|
||||
this.useMessages = this.isClaudeLatest || !!this.options.attachments;
|
||||
|
||||
this.defaultVisionModel = this.options.visionModel ?? 'claude-3-sonnet-20240229';
|
||||
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
|
||||
@@ -183,12 +185,16 @@ class AnthropicClient extends BaseClient {
|
||||
getClient(requestOptions) {
|
||||
/** @type {Anthropic.ClientOptions} */
|
||||
const options = {
|
||||
fetch: this.fetch,
|
||||
fetch: createFetch({
|
||||
directEndpoint: this.options.directEndpoint,
|
||||
reverseProxyUrl: this.options.reverseProxyUrl,
|
||||
}),
|
||||
apiKey: this.apiKey,
|
||||
fetchOptions: {},
|
||||
};
|
||||
|
||||
if (this.options.proxy) {
|
||||
options.httpAgent = new HttpsProxyAgent(this.options.proxy);
|
||||
options.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy);
|
||||
}
|
||||
|
||||
if (this.options.reverseProxyUrl) {
|
||||
@@ -391,13 +397,13 @@ class AnthropicClient extends BaseClient {
|
||||
const formattedMessages = orderedMessages.map((message, i) => {
|
||||
const formattedMessage = this.useMessages
|
||||
? formatMessage({
|
||||
message,
|
||||
endpoint: EModelEndpoint.anthropic,
|
||||
})
|
||||
message,
|
||||
endpoint: EModelEndpoint.anthropic,
|
||||
})
|
||||
: {
|
||||
author: message.isCreatedByUser ? this.userLabel : this.assistantLabel,
|
||||
content: message?.content ?? message.text,
|
||||
};
|
||||
author: message.isCreatedByUser ? this.userLabel : this.assistantLabel,
|
||||
content: message?.content ?? message.text,
|
||||
};
|
||||
|
||||
const needsTokenCount = this.contextStrategy && !orderedMessages[i].tokenCount;
|
||||
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
|
||||
@@ -413,6 +419,9 @@ class AnthropicClient extends BaseClient {
|
||||
this.contextHandlers?.processFile(file);
|
||||
continue;
|
||||
}
|
||||
if (file.metadata?.fileIdentifier) {
|
||||
continue;
|
||||
}
|
||||
|
||||
orderedMessages[i].tokenCount += this.calculateImageTokenCost({
|
||||
width: file.width,
|
||||
@@ -646,7 +655,10 @@ class AnthropicClient extends BaseClient {
|
||||
);
|
||||
};
|
||||
|
||||
if (this.modelOptions.model.includes('claude-3')) {
|
||||
if (
|
||||
/claude-[3-9]/.test(this.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(this.modelOptions.model)
|
||||
) {
|
||||
await buildMessagesPayload();
|
||||
processTokens();
|
||||
return {
|
||||
@@ -672,7 +684,7 @@ class AnthropicClient extends BaseClient {
|
||||
}
|
||||
|
||||
getCompletion() {
|
||||
logger.debug('AnthropicClient doesn\'t use getCompletion (all handled in sendCompletion)');
|
||||
logger.debug("AnthropicClient doesn't use getCompletion (all handled in sendCompletion)");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -696,15 +708,8 @@ class AnthropicClient extends BaseClient {
|
||||
if (msg.text != null && msg.text && msg.text.startsWith(':::thinking')) {
|
||||
msg.text = msg.text.replace(/:::thinking.*?:::/gs, '').trim();
|
||||
} else if (msg.content != null) {
|
||||
/** @type {import('@librechat/agents').MessageContentComplex} */
|
||||
const newContent = [];
|
||||
for (let part of msg.content) {
|
||||
if (part.think != null) {
|
||||
continue;
|
||||
}
|
||||
newContent.push(part);
|
||||
}
|
||||
msg.content = newContent;
|
||||
msg.text = parseTextParts(msg.content, true);
|
||||
delete msg.content;
|
||||
}
|
||||
|
||||
return msg;
|
||||
@@ -801,14 +806,11 @@ class AnthropicClient extends BaseClient {
|
||||
}
|
||||
|
||||
logger.debug('[AnthropicClient]', { ...requestOptions });
|
||||
const handlers = createStreamEventHandlers(this.options.res);
|
||||
this.streamHandler = new SplitStreamHandler({
|
||||
accumulate: true,
|
||||
runId: this.responseMessageId,
|
||||
handlers: {
|
||||
[GraphEvents.ON_RUN_STEP]: (event) => sendEvent(this.options.res, event),
|
||||
[GraphEvents.ON_MESSAGE_DELTA]: (event) => sendEvent(this.options.res, event),
|
||||
[GraphEvents.ON_REASONING_DELTA]: (event) => sendEvent(this.options.res, event),
|
||||
},
|
||||
handlers,
|
||||
});
|
||||
|
||||
let intermediateReply = this.streamHandler.tokens;
|
||||
@@ -890,7 +892,7 @@ class AnthropicClient extends BaseClient {
|
||||
}
|
||||
|
||||
getBuildMessagesOptions() {
|
||||
logger.debug('AnthropicClient doesn\'t use getBuildMessagesOptions');
|
||||
logger.debug("AnthropicClient doesn't use getBuildMessagesOptions");
|
||||
}
|
||||
|
||||
getEncoding() {
|
||||
|
||||
@@ -28,15 +28,10 @@ class BaseClient {
|
||||
month: 'long',
|
||||
day: 'numeric',
|
||||
});
|
||||
this.fetch = this.fetch.bind(this);
|
||||
/** @type {boolean} */
|
||||
this.skipSaveConvo = false;
|
||||
/** @type {boolean} */
|
||||
this.skipSaveUserMessage = false;
|
||||
/** @type {ClientDatabaseSavePromise} */
|
||||
this.userMessagePromise;
|
||||
/** @type {ClientDatabaseSavePromise} */
|
||||
this.responsePromise;
|
||||
/** @type {string} */
|
||||
this.user;
|
||||
/** @type {string} */
|
||||
@@ -68,15 +63,15 @@ class BaseClient {
|
||||
}
|
||||
|
||||
setOptions() {
|
||||
throw new Error('Method \'setOptions\' must be implemented.');
|
||||
throw new Error("Method 'setOptions' must be implemented.");
|
||||
}
|
||||
|
||||
async getCompletion() {
|
||||
throw new Error('Method \'getCompletion\' must be implemented.');
|
||||
throw new Error("Method 'getCompletion' must be implemented.");
|
||||
}
|
||||
|
||||
async sendCompletion() {
|
||||
throw new Error('Method \'sendCompletion\' must be implemented.');
|
||||
throw new Error("Method 'sendCompletion' must be implemented.");
|
||||
}
|
||||
|
||||
getSaveOptions() {
|
||||
@@ -242,11 +237,11 @@ class BaseClient {
|
||||
const userMessage = opts.isEdited
|
||||
? this.currentMessages[this.currentMessages.length - 2]
|
||||
: this.createUserMessage({
|
||||
messageId: userMessageId,
|
||||
parentMessageId,
|
||||
conversationId,
|
||||
text: message,
|
||||
});
|
||||
messageId: userMessageId,
|
||||
parentMessageId,
|
||||
conversationId,
|
||||
text: message,
|
||||
});
|
||||
|
||||
if (typeof opts?.getReqData === 'function') {
|
||||
opts.getReqData({
|
||||
@@ -564,6 +559,8 @@ class BaseClient {
|
||||
}
|
||||
|
||||
async sendMessage(message, opts = {}) {
|
||||
/** @type {Promise<TMessage>} */
|
||||
let userMessagePromise;
|
||||
const { user, head, isEdited, conversationId, responseMessageId, saveOptions, userMessage } =
|
||||
await this.handleStartMethods(message, opts);
|
||||
|
||||
@@ -625,11 +622,11 @@ class BaseClient {
|
||||
}
|
||||
|
||||
if (!isEdited && !this.skipSaveUserMessage) {
|
||||
this.userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user);
|
||||
userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user);
|
||||
this.savedMessageIds.add(userMessage.messageId);
|
||||
if (typeof opts?.getReqData === 'function') {
|
||||
opts.getReqData({
|
||||
userMessagePromise: this.userMessagePromise,
|
||||
userMessagePromise,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -655,7 +652,9 @@ class BaseClient {
|
||||
|
||||
/** @type {string|string[]|undefined} */
|
||||
const completion = await this.sendCompletion(payload, opts);
|
||||
this.abortController.requestCompleted = true;
|
||||
if (this.abortController) {
|
||||
this.abortController.requestCompleted = true;
|
||||
}
|
||||
|
||||
/** @type {TMessage} */
|
||||
const responseMessage = {
|
||||
@@ -676,7 +675,8 @@ class BaseClient {
|
||||
responseMessage.text = addSpaceIfNeeded(generation) + completion;
|
||||
} else if (
|
||||
Array.isArray(completion) &&
|
||||
isParamEndpoint(this.options.endpoint, this.options.endpointType)
|
||||
(this.clientName === EModelEndpoint.agents ||
|
||||
isParamEndpoint(this.options.endpoint, this.options.endpointType))
|
||||
) {
|
||||
responseMessage.text = '';
|
||||
responseMessage.content = completion;
|
||||
@@ -702,7 +702,13 @@ class BaseClient {
|
||||
if (usage != null && Number(usage[this.outputTokensKey]) > 0) {
|
||||
responseMessage.tokenCount = usage[this.outputTokensKey];
|
||||
completionTokens = responseMessage.tokenCount;
|
||||
await this.updateUserMessageTokenCount({ usage, tokenCountMap, userMessage, opts });
|
||||
await this.updateUserMessageTokenCount({
|
||||
usage,
|
||||
tokenCountMap,
|
||||
userMessage,
|
||||
userMessagePromise,
|
||||
opts,
|
||||
});
|
||||
} else {
|
||||
responseMessage.tokenCount = this.getTokenCountForResponse(responseMessage);
|
||||
completionTokens = responseMessage.tokenCount;
|
||||
@@ -711,8 +717,8 @@ class BaseClient {
|
||||
await this.recordTokenUsage({ promptTokens, completionTokens, usage });
|
||||
}
|
||||
|
||||
if (this.userMessagePromise) {
|
||||
await this.userMessagePromise;
|
||||
if (userMessagePromise) {
|
||||
await userMessagePromise;
|
||||
}
|
||||
|
||||
if (this.artifactPromises) {
|
||||
@@ -727,7 +733,11 @@ class BaseClient {
|
||||
}
|
||||
}
|
||||
|
||||
this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
|
||||
responseMessage.databasePromise = this.saveMessageToDatabase(
|
||||
responseMessage,
|
||||
saveOptions,
|
||||
user,
|
||||
);
|
||||
this.savedMessageIds.add(responseMessage.messageId);
|
||||
delete responseMessage.tokenCount;
|
||||
return responseMessage;
|
||||
@@ -748,9 +758,16 @@ class BaseClient {
|
||||
* @param {StreamUsage} params.usage
|
||||
* @param {Record<string, number>} params.tokenCountMap
|
||||
* @param {TMessage} params.userMessage
|
||||
* @param {Promise<TMessage>} params.userMessagePromise
|
||||
* @param {object} params.opts
|
||||
*/
|
||||
async updateUserMessageTokenCount({ usage, tokenCountMap, userMessage, opts }) {
|
||||
async updateUserMessageTokenCount({
|
||||
usage,
|
||||
tokenCountMap,
|
||||
userMessage,
|
||||
userMessagePromise,
|
||||
opts,
|
||||
}) {
|
||||
/** @type {boolean} */
|
||||
const shouldUpdateCount =
|
||||
this.calculateCurrentTokenCount != null &&
|
||||
@@ -786,7 +803,7 @@ class BaseClient {
|
||||
Note: we update the user message to be sure it gets the calculated token count;
|
||||
though `AskController` saves the user message, EditController does not
|
||||
*/
|
||||
await this.userMessagePromise;
|
||||
await userMessagePromise;
|
||||
await this.updateMessageInDatabase({
|
||||
messageId: userMessage.messageId,
|
||||
tokenCount: userMessageTokenCount,
|
||||
@@ -852,7 +869,7 @@ class BaseClient {
|
||||
}
|
||||
|
||||
const savedMessage = await saveMessage(
|
||||
this.options.req,
|
||||
this.options?.req,
|
||||
{
|
||||
...message,
|
||||
endpoint: this.options.endpoint,
|
||||
@@ -876,7 +893,7 @@ class BaseClient {
|
||||
const existingConvo =
|
||||
this.fetchedConvo === true
|
||||
? null
|
||||
: await getConvo(this.options.req?.user?.id, message.conversationId);
|
||||
: await getConvo(this.options?.req?.user?.id, message.conversationId);
|
||||
|
||||
const unsetFields = {};
|
||||
const exceptions = new Set(['spec', 'iconURL']);
|
||||
@@ -896,7 +913,7 @@ class BaseClient {
|
||||
}
|
||||
}
|
||||
|
||||
const conversation = await saveConvo(this.options.req, fieldsToKeep, {
|
||||
const conversation = await saveConvo(this.options?.req, fieldsToKeep, {
|
||||
context: 'api/app/clients/BaseClient.js - saveMessageToDatabase #saveConvo',
|
||||
unsetFields,
|
||||
});
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
const Keyv = require('keyv');
|
||||
const { Keyv } = require('keyv');
|
||||
const crypto = require('crypto');
|
||||
const { CohereClient } = require('cohere-ai');
|
||||
const { fetchEventSource } = require('@waylaidwanderer/fetch-event-source');
|
||||
const { constructAzureURL, genAzureChatCompletion } = require('@librechat/api');
|
||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
||||
const {
|
||||
ImageDetail,
|
||||
@@ -10,9 +11,9 @@ const {
|
||||
CohereConstants,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { extractBaseURL, constructAzureURL, genAzureChatCompletion } = require('~/utils');
|
||||
const { createContextHandlers } = require('./prompts');
|
||||
const { createCoherePayload } = require('./llm');
|
||||
const { extractBaseURL } = require('~/utils');
|
||||
const BaseClient = require('./BaseClient');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -244,9 +245,9 @@ class ChatGPTClient extends BaseClient {
|
||||
|
||||
baseURL = this.langchainProxy
|
||||
? constructAzureURL({
|
||||
baseURL: this.langchainProxy,
|
||||
azureOptions: this.azure,
|
||||
})
|
||||
baseURL: this.langchainProxy,
|
||||
azureOptions: this.azure,
|
||||
})
|
||||
: this.azureEndpoint.split(/(?<!\/)\/(chat|completion)\//)[0];
|
||||
|
||||
if (this.options.forcePrompt) {
|
||||
@@ -339,7 +340,6 @@ class ChatGPTClient extends BaseClient {
|
||||
opts.body = JSON.stringify(modelOptions);
|
||||
|
||||
if (modelOptions.stream) {
|
||||
// eslint-disable-next-line no-async-promise-executor
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
let done = false;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const { google } = require('googleapis');
|
||||
const { Tokenizer } = require('@librechat/api');
|
||||
const { concat } = require('@langchain/core/utils/stream');
|
||||
const { ChatVertexAI } = require('@langchain/google-vertexai');
|
||||
const { ChatGoogleGenerativeAI } = require('@langchain/google-genai');
|
||||
@@ -9,6 +10,7 @@ const {
|
||||
validateVisionModel,
|
||||
getResponseSender,
|
||||
endpointSettings,
|
||||
parseTextParts,
|
||||
EModelEndpoint,
|
||||
ContentTypes,
|
||||
VisionModes,
|
||||
@@ -18,7 +20,6 @@ const {
|
||||
} = require('librechat-data-provider');
|
||||
const { getSafetySettings } = require('~/server/services/Endpoints/google/llm');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images');
|
||||
const Tokenizer = require('~/server/services/Tokenizer');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
const { sleep } = require('~/server/utils');
|
||||
@@ -33,7 +34,8 @@ const BaseClient = require('./BaseClient');
|
||||
|
||||
const loc = process.env.GOOGLE_LOC || 'us-central1';
|
||||
const publisher = 'google';
|
||||
const endpointPrefix = `${loc}-aiplatform.googleapis.com`;
|
||||
const endpointPrefix =
|
||||
loc === 'global' ? 'aiplatform.googleapis.com' : `${loc}-aiplatform.googleapis.com`;
|
||||
|
||||
const settings = endpointSettings[EModelEndpoint.google];
|
||||
const EXCLUDED_GENAI_MODELS = /gemini-(?:1\.0|1-0|pro)/;
|
||||
@@ -139,8 +141,7 @@ class GoogleClient extends BaseClient {
|
||||
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
|
||||
|
||||
/** @type {boolean} Whether using a "GenerativeAI" Model */
|
||||
this.isGenerativeModel =
|
||||
this.modelOptions.model.includes('gemini') || this.modelOptions.model.includes('learnlm');
|
||||
this.isGenerativeModel = /gemini|learnlm|gemma/.test(this.modelOptions.model);
|
||||
|
||||
this.maxContextTokens =
|
||||
this.options.maxContextTokens ??
|
||||
@@ -236,11 +237,11 @@ class GoogleClient extends BaseClient {
|
||||
msg.content = (
|
||||
!Array.isArray(msg.content)
|
||||
? [
|
||||
{
|
||||
type: ContentTypes.TEXT,
|
||||
[ContentTypes.TEXT]: msg.content,
|
||||
},
|
||||
]
|
||||
{
|
||||
type: ContentTypes.TEXT,
|
||||
[ContentTypes.TEXT]: msg.content,
|
||||
},
|
||||
]
|
||||
: msg.content
|
||||
).concat(message.image_urls);
|
||||
|
||||
@@ -317,6 +318,9 @@ class GoogleClient extends BaseClient {
|
||||
this.contextHandlers?.processFile(file);
|
||||
continue;
|
||||
}
|
||||
if (file.metadata?.fileIdentifier) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
this.augmentedPrompt = await this.contextHandlers.createContext();
|
||||
@@ -774,6 +778,22 @@ class GoogleClient extends BaseClient {
|
||||
return this.usage;
|
||||
}
|
||||
|
||||
getMessageMapMethod() {
|
||||
/**
|
||||
* @param {TMessage} msg
|
||||
*/
|
||||
return (msg) => {
|
||||
if (msg.text != null && msg.text && msg.text.startsWith(':::thinking')) {
|
||||
msg.text = msg.text.replace(/:::thinking.*?:::/gs, '').trim();
|
||||
} else if (msg.content != null) {
|
||||
msg.text = parseTextParts(msg.content, true);
|
||||
delete msg.content;
|
||||
}
|
||||
|
||||
return msg;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the correct token count for the current user message based on the token count map and API usage.
|
||||
* Edge case: If the calculation results in a negative value, it returns the original estimate.
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
const { z } = require('zod');
|
||||
const axios = require('axios');
|
||||
const { Ollama } = require('ollama');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { logAxiosError } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { deriveBaseURL, logAxiosError } = require('~/utils');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
const { deriveBaseURL } = require('~/utils');
|
||||
|
||||
const ollamaPayloadSchema = z.object({
|
||||
mirostat: z.number().optional(),
|
||||
@@ -67,7 +68,7 @@ class OllamaClient {
|
||||
return models;
|
||||
} catch (error) {
|
||||
const logMessage =
|
||||
'Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn\'t start with `ollama` (case-insensitive).';
|
||||
"Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn't start with `ollama` (case-insensitive).";
|
||||
logAxiosError({ message: logMessage, error });
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
const OpenAI = require('openai');
|
||||
const { OllamaClient } = require('./OllamaClient');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { SplitStreamHandler, GraphEvents } = require('@librechat/agents');
|
||||
const { SplitStreamHandler, CustomOpenAIClient: OpenAI } = require('@librechat/agents');
|
||||
const {
|
||||
isEnabled,
|
||||
Tokenizer,
|
||||
createFetch,
|
||||
constructAzureURL,
|
||||
genAzureChatCompletion,
|
||||
createStreamEventHandlers,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
Constants,
|
||||
ImageDetail,
|
||||
ContentTypes,
|
||||
parseTextParts,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
KnownEndpoints,
|
||||
@@ -16,13 +24,6 @@ const {
|
||||
validateVisionModel,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
extractBaseURL,
|
||||
constructAzureURL,
|
||||
getModelMaxTokens,
|
||||
genAzureChatCompletion,
|
||||
getModelMaxOutputTokens,
|
||||
} = require('~/utils');
|
||||
const {
|
||||
truncateText,
|
||||
formatMessage,
|
||||
@@ -30,18 +31,18 @@ const {
|
||||
titleInstruction,
|
||||
createContextHandlers,
|
||||
} = require('./prompts');
|
||||
const { extractBaseURL, getModelMaxTokens, getModelMaxOutputTokens } = require('~/utils');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { addSpaceIfNeeded, isEnabled, sleep } = require('~/server/utils');
|
||||
const Tokenizer = require('~/server/services/Tokenizer');
|
||||
const { addSpaceIfNeeded, sleep } = require('~/server/utils');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const { handleOpenAIErrors } = require('./tools/util');
|
||||
const { createLLM, RunManager } = require('./llm');
|
||||
const { logger, sendEvent } = require('~/config');
|
||||
const ChatGPTClient = require('./ChatGPTClient');
|
||||
const { summaryBuffer } = require('./memory');
|
||||
const { runTitleChain } = require('./chains');
|
||||
const { tokenSplit } = require('./document');
|
||||
const BaseClient = require('./BaseClient');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
class OpenAIClient extends BaseClient {
|
||||
constructor(apiKey, options = {}) {
|
||||
@@ -107,7 +108,7 @@ class OpenAIClient extends BaseClient {
|
||||
this.checkVisionRequest(this.options.attachments);
|
||||
}
|
||||
|
||||
const omniPattern = /\b(o1|o3)\b/i;
|
||||
const omniPattern = /\b(o\d)\b/i;
|
||||
this.isOmni = omniPattern.test(this.modelOptions.model);
|
||||
|
||||
const { OPENAI_FORCE_PROMPT } = process.env ?? {};
|
||||
@@ -454,6 +455,9 @@ class OpenAIClient extends BaseClient {
|
||||
this.contextHandlers?.processFile(file);
|
||||
continue;
|
||||
}
|
||||
if (file.metadata?.fileIdentifier) {
|
||||
continue;
|
||||
}
|
||||
|
||||
orderedMessages[i].tokenCount += this.calculateImageTokenCost({
|
||||
width: file.width,
|
||||
@@ -471,7 +475,9 @@ class OpenAIClient extends BaseClient {
|
||||
promptPrefix = this.augmentedPrompt + promptPrefix;
|
||||
}
|
||||
|
||||
if (promptPrefix && this.isOmni !== true) {
|
||||
const noSystemModelRegex = /\b(o1-preview|o1-mini)\b/i.test(this.modelOptions.model);
|
||||
|
||||
if (promptPrefix && !noSystemModelRegex) {
|
||||
promptPrefix = `Instructions:\n${promptPrefix.trim()}`;
|
||||
instructions = {
|
||||
role: 'system',
|
||||
@@ -499,7 +505,7 @@ class OpenAIClient extends BaseClient {
|
||||
};
|
||||
|
||||
/** EXPERIMENTAL */
|
||||
if (promptPrefix && this.isOmni === true) {
|
||||
if (promptPrefix && noSystemModelRegex) {
|
||||
const lastUserMessageIndex = payload.findLastIndex((message) => message.role === 'user');
|
||||
if (lastUserMessageIndex !== -1) {
|
||||
if (Array.isArray(payload[lastUserMessageIndex].content)) {
|
||||
@@ -608,7 +614,7 @@ class OpenAIClient extends BaseClient {
|
||||
return result.trim();
|
||||
}
|
||||
|
||||
logger.debug('[OpenAIClient] sendCompletion: result', result);
|
||||
logger.debug('[OpenAIClient] sendCompletion: result', { ...result });
|
||||
|
||||
if (this.isChatCompletion) {
|
||||
reply = result.choices[0].message.content;
|
||||
@@ -817,7 +823,7 @@ ${convo}
|
||||
|
||||
const completionTokens = this.getTokenCount(title);
|
||||
|
||||
this.recordTokenUsage({ promptTokens, completionTokens, context: 'title' });
|
||||
await this.recordTokenUsage({ promptTokens, completionTokens, context: 'title' });
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
'[OpenAIClient] There was an issue generating the title with the completion method',
|
||||
@@ -1121,15 +1127,8 @@ ${convo}
|
||||
if (msg.text != null && msg.text && msg.text.startsWith(':::thinking')) {
|
||||
msg.text = msg.text.replace(/:::thinking.*?:::/gs, '').trim();
|
||||
} else if (msg.content != null) {
|
||||
/** @type {import('@librechat/agents').MessageContentComplex} */
|
||||
const newContent = [];
|
||||
for (let part of msg.content) {
|
||||
if (part.think != null) {
|
||||
continue;
|
||||
}
|
||||
newContent.push(part);
|
||||
}
|
||||
msg.content = newContent;
|
||||
msg.text = parseTextParts(msg.content, true);
|
||||
delete msg.content;
|
||||
}
|
||||
|
||||
return msg;
|
||||
@@ -1160,6 +1159,7 @@ ${convo}
|
||||
logger.debug('[OpenAIClient] chatCompletion', { baseURL, modelOptions });
|
||||
const opts = {
|
||||
baseURL,
|
||||
fetchOptions: {},
|
||||
};
|
||||
|
||||
if (this.useOpenRouter) {
|
||||
@@ -1178,7 +1178,7 @@ ${convo}
|
||||
}
|
||||
|
||||
if (this.options.proxy) {
|
||||
opts.httpAgent = new HttpsProxyAgent(this.options.proxy);
|
||||
opts.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy);
|
||||
}
|
||||
|
||||
/** @type {TAzureConfig | undefined} */
|
||||
@@ -1230,9 +1230,9 @@ ${convo}
|
||||
|
||||
opts.baseURL = this.langchainProxy
|
||||
? constructAzureURL({
|
||||
baseURL: this.langchainProxy,
|
||||
azureOptions: this.azure,
|
||||
})
|
||||
baseURL: this.langchainProxy,
|
||||
azureOptions: this.azure,
|
||||
})
|
||||
: this.azureEndpoint.split(/(?<!\/)\/(chat|completion)\//)[0];
|
||||
|
||||
opts.defaultQuery = { 'api-version': this.azure.azureOpenAIApiVersion };
|
||||
@@ -1243,6 +1243,9 @@ ${convo}
|
||||
modelOptions.max_completion_tokens = modelOptions.max_tokens;
|
||||
delete modelOptions.max_tokens;
|
||||
}
|
||||
if (this.isOmni === true && modelOptions.temperature != null) {
|
||||
delete modelOptions.temperature;
|
||||
}
|
||||
|
||||
if (process.env.OPENAI_ORGANIZATION) {
|
||||
opts.organization = process.env.OPENAI_ORGANIZATION;
|
||||
@@ -1251,7 +1254,10 @@ ${convo}
|
||||
let chatCompletion;
|
||||
/** @type {OpenAI} */
|
||||
const openai = new OpenAI({
|
||||
fetch: this.fetch,
|
||||
fetch: createFetch({
|
||||
directEndpoint: this.options.directEndpoint,
|
||||
reverseProxyUrl: this.options.reverseProxyUrl,
|
||||
}),
|
||||
apiKey: this.apiKey,
|
||||
...opts,
|
||||
});
|
||||
@@ -1280,13 +1286,22 @@ ${convo}
|
||||
modelOptions.messages[0].role = 'user';
|
||||
}
|
||||
|
||||
if (
|
||||
(this.options.endpoint === EModelEndpoint.openAI ||
|
||||
this.options.endpoint === EModelEndpoint.azureOpenAI) &&
|
||||
modelOptions.stream === true
|
||||
) {
|
||||
modelOptions.stream_options = { include_usage: true };
|
||||
}
|
||||
|
||||
if (this.options.addParams && typeof this.options.addParams === 'object') {
|
||||
const addParams = { ...this.options.addParams };
|
||||
modelOptions = {
|
||||
...modelOptions,
|
||||
...this.options.addParams,
|
||||
...addParams,
|
||||
};
|
||||
logger.debug('[OpenAIClient] chatCompletion: added params', {
|
||||
addParams: this.options.addParams,
|
||||
addParams: addParams,
|
||||
modelOptions,
|
||||
});
|
||||
}
|
||||
@@ -1315,11 +1330,12 @@ ${convo}
|
||||
}
|
||||
|
||||
if (this.options.dropParams && Array.isArray(this.options.dropParams)) {
|
||||
this.options.dropParams.forEach((param) => {
|
||||
const dropParams = [...this.options.dropParams];
|
||||
dropParams.forEach((param) => {
|
||||
delete modelOptions[param];
|
||||
});
|
||||
logger.debug('[OpenAIClient] chatCompletion: dropped params', {
|
||||
dropParams: this.options.dropParams,
|
||||
dropParams: dropParams,
|
||||
modelOptions,
|
||||
});
|
||||
}
|
||||
@@ -1361,15 +1377,12 @@ ${convo}
|
||||
delete modelOptions.reasoning_effort;
|
||||
}
|
||||
|
||||
const handlers = createStreamEventHandlers(this.options.res);
|
||||
this.streamHandler = new SplitStreamHandler({
|
||||
reasoningKey,
|
||||
accumulate: true,
|
||||
runId: this.responseMessageId,
|
||||
handlers: {
|
||||
[GraphEvents.ON_RUN_STEP]: (event) => sendEvent(this.options.res, event),
|
||||
[GraphEvents.ON_MESSAGE_DELTA]: (event) => sendEvent(this.options.res, event),
|
||||
[GraphEvents.ON_REASONING_DELTA]: (event) => sendEvent(this.options.res, event),
|
||||
},
|
||||
handlers,
|
||||
});
|
||||
|
||||
intermediateReply = this.streamHandler.tokens;
|
||||
@@ -1383,13 +1396,7 @@ ${convo}
|
||||
...modelOptions,
|
||||
stream: true,
|
||||
};
|
||||
if (
|
||||
this.options.endpoint === EModelEndpoint.openAI ||
|
||||
this.options.endpoint === EModelEndpoint.azureOpenAI
|
||||
) {
|
||||
params.stream_options = { include_usage: true };
|
||||
}
|
||||
const stream = await openai.beta.chat.completions
|
||||
const stream = await openai.chat.completions
|
||||
.stream(params)
|
||||
.on('abort', () => {
|
||||
/* Do nothing here */
|
||||
@@ -1473,6 +1480,11 @@ ${convo}
|
||||
});
|
||||
}
|
||||
|
||||
if (openai.abortHandler && abortController.signal) {
|
||||
abortController.signal.removeEventListener('abort', openai.abortHandler);
|
||||
openai.abortHandler = undefined;
|
||||
}
|
||||
|
||||
if (!chatCompletion && UnexpectedRoleError) {
|
||||
throw new Error(
|
||||
'OpenAI error: Invalid final message: OpenAI expects final message to include role=assistant',
|
||||
|
||||
@@ -252,12 +252,14 @@ class PluginsClient extends OpenAIClient {
|
||||
await this.recordTokenUsage(responseMessage);
|
||||
}
|
||||
|
||||
this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
|
||||
const databasePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
|
||||
delete responseMessage.tokenCount;
|
||||
return { ...responseMessage, ...result };
|
||||
return { ...responseMessage, ...result, databasePromise };
|
||||
}
|
||||
|
||||
async sendMessage(message, opts = {}) {
|
||||
/** @type {Promise<TMessage>} */
|
||||
let userMessagePromise;
|
||||
/** @type {{ filteredTools: string[], includedTools: string[] }} */
|
||||
const { filteredTools = [], includedTools = [] } = this.options.req.app.locals;
|
||||
|
||||
@@ -327,10 +329,10 @@ class PluginsClient extends OpenAIClient {
|
||||
}
|
||||
|
||||
if (!this.skipSaveUserMessage) {
|
||||
this.userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user);
|
||||
userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user);
|
||||
if (typeof opts?.getReqData === 'function') {
|
||||
opts.getReqData({
|
||||
userMessagePromise: this.userMessagePromise,
|
||||
userMessagePromise,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
const { ChatOpenAI } = require('@langchain/openai');
|
||||
const { sanitizeModelName, constructAzureURL } = require('~/utils');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { isEnabled, sanitizeModelName, constructAzureURL } = require('@librechat/api');
|
||||
|
||||
/**
|
||||
* Creates a new instance of a language model (LLM) for chat interactions.
|
||||
@@ -34,6 +33,7 @@ function createLLM({
|
||||
let credentials = { openAIApiKey };
|
||||
let configuration = {
|
||||
apiKey: openAIApiKey,
|
||||
...(configOptions.basePath && { baseURL: configOptions.basePath }),
|
||||
};
|
||||
|
||||
/** @type {AzureOptions} */
|
||||
|
||||
@@ -15,7 +15,7 @@ describe('AnthropicClient', () => {
|
||||
{
|
||||
role: 'user',
|
||||
isCreatedByUser: true,
|
||||
text: 'What\'s up',
|
||||
text: "What's up",
|
||||
messageId: '3',
|
||||
parentMessageId: '2',
|
||||
},
|
||||
@@ -170,7 +170,7 @@ describe('AnthropicClient', () => {
|
||||
client.options.modelLabel = 'Claude-2';
|
||||
const result = await client.buildMessages(messages, parentMessageId);
|
||||
const { prompt } = result;
|
||||
expect(prompt).toContain('Human\'s name: John');
|
||||
expect(prompt).toContain("Human's name: John");
|
||||
expect(prompt).toContain('You are Claude-2');
|
||||
});
|
||||
});
|
||||
@@ -244,6 +244,64 @@ describe('AnthropicClient', () => {
|
||||
);
|
||||
});
|
||||
|
||||
describe('Claude 4 model headers', () => {
|
||||
it('should add "prompt-caching" beta header for claude-sonnet-4 model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-opus-4 model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-opus-4-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-4-sonnet model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-4-sonnet-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-4-opus model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-4-opus-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not add beta header for claude-3-5-sonnet-latest model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
@@ -251,7 +309,7 @@ describe('AnthropicClient', () => {
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not add beta header for other models', () => {
|
||||
@@ -262,7 +320,7 @@ describe('AnthropicClient', () => {
|
||||
},
|
||||
});
|
||||
const anthropicClient = client.getClient();
|
||||
expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -456,6 +514,34 @@ describe('AnthropicClient', () => {
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should not cap maxOutputTokens for Claude 4 Sonnet models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 10; // 40,960 tokens
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should not cap maxOutputTokens for Claude 4 Opus models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 6; // 24,576 tokens (under 32K limit)
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20250514',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should cap maxOutputTokens for Claude 3.5 Haiku models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 2;
|
||||
@@ -729,4 +815,223 @@ describe('AnthropicClient', () => {
|
||||
expect(capturedOptions).toHaveProperty('topK', 10);
|
||||
expect(capturedOptions).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
|
||||
describe('isClaudeLatest', () => {
|
||||
it('should set isClaudeLatest to true for claude-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-sonnet-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-3.5 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.5-sonnet-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-sonnet-4 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-opus-4 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-3.5-haiku models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.5-haiku-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-2 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-2',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-instant models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-instant',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-sonnet-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-opus-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-haiku-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-haiku-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('configureReasoning', () => {
|
||||
it('should enable thinking for claude-opus-4 and claude-sonnet-4 models', async () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
// Create a mock async generator function
|
||||
async function* mockAsyncGenerator() {
|
||||
yield { type: 'message_start', message: { usage: {} } };
|
||||
yield { delta: { text: 'Test response' } };
|
||||
yield { type: 'message_delta', usage: {} };
|
||||
}
|
||||
|
||||
// Mock createResponse to return the async generator
|
||||
jest.spyOn(client, 'createResponse').mockImplementation(() => {
|
||||
return mockAsyncGenerator();
|
||||
});
|
||||
|
||||
// Test claude-opus-4
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20250514',
|
||||
},
|
||||
thinking: true,
|
||||
thinkingBudget: 2000,
|
||||
});
|
||||
|
||||
let capturedOptions = null;
|
||||
jest.spyOn(client, 'getClient').mockImplementation((options) => {
|
||||
capturedOptions = options;
|
||||
return {};
|
||||
});
|
||||
|
||||
const payload = [{ role: 'user', content: 'Test message' }];
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('thinking');
|
||||
expect(capturedOptions.thinking).toEqual({
|
||||
type: 'enabled',
|
||||
budget_tokens: 2000,
|
||||
});
|
||||
|
||||
// Test claude-sonnet-4
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
},
|
||||
thinking: true,
|
||||
thinkingBudget: 2000,
|
||||
});
|
||||
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('thinking');
|
||||
expect(capturedOptions.thinking).toEqual({
|
||||
type: 'enabled',
|
||||
budget_tokens: 2000,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Claude Model Tests', () => {
|
||||
it('should handle Claude 3 and 4 series models correctly', () => {
|
||||
const client = new AnthropicClient('test-key');
|
||||
// Claude 3 series models
|
||||
const claude3Models = [
|
||||
'claude-3-opus-20240229',
|
||||
'claude-3-sonnet-20240229',
|
||||
'claude-3-haiku-20240307',
|
||||
'claude-3-5-sonnet-20240620',
|
||||
'claude-3-5-haiku-20240620',
|
||||
'claude-3.5-sonnet-20240620',
|
||||
'claude-3.5-haiku-20240620',
|
||||
'claude-3.7-sonnet-20240620',
|
||||
'claude-3.7-haiku-20240620',
|
||||
'anthropic/claude-3-opus-20240229',
|
||||
'claude-3-opus-20240229/anthropic',
|
||||
];
|
||||
|
||||
// Claude 4 series models
|
||||
const claude4Models = [
|
||||
'claude-sonnet-4-20250514',
|
||||
'claude-opus-4-20250514',
|
||||
'claude-4-sonnet-20250514',
|
||||
'claude-4-opus-20250514',
|
||||
'anthropic/claude-sonnet-4-20250514',
|
||||
'claude-sonnet-4-20250514/anthropic',
|
||||
];
|
||||
|
||||
// Test Claude 3 series
|
||||
claude3Models.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
// Test Claude 4 series
|
||||
claude4Models.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
// Test non-Claude 3/4 models
|
||||
const nonClaudeModels = ['claude-2', 'claude-instant', 'gpt-4', 'gpt-3.5-turbo'];
|
||||
|
||||
nonClaudeModels.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { initializeFakeClient } = require('./FakeClient');
|
||||
|
||||
jest.mock('~/lib/db/connectDb');
|
||||
jest.mock('~/db/connect');
|
||||
jest.mock('~/models', () => ({
|
||||
User: jest.fn(),
|
||||
Key: jest.fn(),
|
||||
@@ -32,8 +32,10 @@ jest.mock('~/models', () => ({
|
||||
|
||||
const { getConvo, saveConvo } = require('~/models');
|
||||
|
||||
jest.mock('@langchain/openai', () => {
|
||||
jest.mock('@librechat/agents', () => {
|
||||
const { Providers } = jest.requireActual('@librechat/agents');
|
||||
return {
|
||||
Providers,
|
||||
ChatOpenAI: jest.fn().mockImplementation(() => {
|
||||
return {};
|
||||
}),
|
||||
@@ -52,7 +54,7 @@ const messageHistory = [
|
||||
{
|
||||
role: 'user',
|
||||
isCreatedByUser: true,
|
||||
text: 'What\'s up',
|
||||
text: "What's up",
|
||||
messageId: '3',
|
||||
parentMessageId: '2',
|
||||
},
|
||||
@@ -456,7 +458,7 @@ describe('BaseClient', () => {
|
||||
|
||||
const chatMessages2 = await TestClient.loadHistory(conversationId, '3');
|
||||
expect(TestClient.currentMessages).toHaveLength(3);
|
||||
expect(chatMessages2[chatMessages2.length - 1].text).toEqual('What\'s up');
|
||||
expect(chatMessages2[chatMessages2.length - 1].text).toEqual("What's up");
|
||||
});
|
||||
|
||||
/* Most of the new sendMessage logic revolving around edited/continued AI messages
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
jest.mock('~/cache/getLogStores');
|
||||
require('dotenv').config();
|
||||
const OpenAI = require('openai');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { fetchEventSource } = require('@waylaidwanderer/fetch-event-source');
|
||||
const { genAzureChatCompletion } = require('~/utils/azureUtils');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const OpenAIClient = require('../OpenAIClient');
|
||||
jest.mock('meilisearch');
|
||||
|
||||
jest.mock('~/lib/db/connectDb');
|
||||
jest.mock('~/db/connect');
|
||||
jest.mock('~/models', () => ({
|
||||
User: jest.fn(),
|
||||
Key: jest.fn(),
|
||||
@@ -36,19 +34,21 @@ jest.mock('~/models', () => ({
|
||||
updateFileUsage: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@langchain/openai', () => {
|
||||
return {
|
||||
ChatOpenAI: jest.fn().mockImplementation(() => {
|
||||
return {};
|
||||
}),
|
||||
};
|
||||
// Import the actual module but mock specific parts
|
||||
const agents = jest.requireActual('@librechat/agents');
|
||||
const { CustomOpenAIClient } = agents;
|
||||
|
||||
// Also mock ChatOpenAI to prevent real API calls
|
||||
agents.ChatOpenAI = jest.fn().mockImplementation(() => {
|
||||
return {};
|
||||
});
|
||||
agents.AzureChatOpenAI = jest.fn().mockImplementation(() => {
|
||||
return {};
|
||||
});
|
||||
|
||||
jest.mock('openai');
|
||||
|
||||
jest.spyOn(OpenAI, 'constructor').mockImplementation(function (...options) {
|
||||
// We can add additional logic here if needed
|
||||
return new OpenAI(...options);
|
||||
// Mock only the CustomOpenAIClient constructor
|
||||
jest.spyOn(CustomOpenAIClient, 'constructor').mockImplementation(function (...options) {
|
||||
return new CustomOpenAIClient(...options);
|
||||
});
|
||||
|
||||
const finalChatCompletion = jest.fn().mockResolvedValue({
|
||||
@@ -120,7 +120,13 @@ const create = jest.fn().mockResolvedValue({
|
||||
],
|
||||
});
|
||||
|
||||
OpenAI.mockImplementation(() => ({
|
||||
// Mock the implementation of CustomOpenAIClient instances
|
||||
jest.spyOn(CustomOpenAIClient.prototype, 'constructor').mockImplementation(function () {
|
||||
return this;
|
||||
});
|
||||
|
||||
// Create a mock for the CustomOpenAIClient class
|
||||
const mockCustomOpenAIClient = jest.fn().mockImplementation(() => ({
|
||||
beta: {
|
||||
chat: {
|
||||
completions: {
|
||||
@@ -135,6 +141,8 @@ OpenAI.mockImplementation(() => ({
|
||||
},
|
||||
}));
|
||||
|
||||
CustomOpenAIClient.mockImplementation = mockCustomOpenAIClient;
|
||||
|
||||
describe('OpenAIClient', () => {
|
||||
beforeEach(() => {
|
||||
const mockCache = {
|
||||
@@ -454,17 +462,17 @@ describe('OpenAIClient', () => {
|
||||
role: 'system',
|
||||
name: 'example_user',
|
||||
content:
|
||||
'Let\'s circle back when we have more bandwidth to touch base on opportunities for increased leverage.',
|
||||
"Let's circle back when we have more bandwidth to touch base on opportunities for increased leverage.",
|
||||
},
|
||||
{
|
||||
role: 'system',
|
||||
name: 'example_assistant',
|
||||
content: 'Let\'s talk later when we\'re less busy about how to do better.',
|
||||
content: "Let's talk later when we're less busy about how to do better.",
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content:
|
||||
'This late pivot means we don\'t have time to boil the ocean for the client deliverable.',
|
||||
"This late pivot means we don't have time to boil the ocean for the client deliverable.",
|
||||
},
|
||||
];
|
||||
|
||||
@@ -559,41 +567,6 @@ describe('OpenAIClient', () => {
|
||||
expect(requestBody).toHaveProperty('model');
|
||||
expect(requestBody.model).toBe(model);
|
||||
});
|
||||
|
||||
it('[Azure OpenAI] should call chatCompletion and OpenAI.stream with correct args', async () => {
|
||||
// Set a default model
|
||||
process.env.AZURE_OPENAI_DEFAULT_MODEL = 'gpt4-turbo';
|
||||
|
||||
const onProgress = jest.fn().mockImplementation(() => ({}));
|
||||
client.azure = defaultAzureOptions;
|
||||
const chatCompletion = jest.spyOn(client, 'chatCompletion');
|
||||
await client.sendMessage('Hi mom!', {
|
||||
replaceOptions: true,
|
||||
...defaultOptions,
|
||||
modelOptions: { model: 'gpt4-turbo', stream: true },
|
||||
onProgress,
|
||||
azure: defaultAzureOptions,
|
||||
});
|
||||
|
||||
expect(chatCompletion).toHaveBeenCalled();
|
||||
expect(chatCompletion.mock.calls.length).toBe(1);
|
||||
|
||||
const chatCompletionArgs = chatCompletion.mock.calls[0][0];
|
||||
const { payload } = chatCompletionArgs;
|
||||
|
||||
expect(payload[0].role).toBe('user');
|
||||
expect(payload[0].content).toBe('Hi mom!');
|
||||
|
||||
// Azure OpenAI does not use the model property, and will error if it's passed
|
||||
// This check ensures the model property is not present
|
||||
const streamArgs = stream.mock.calls[0][0];
|
||||
expect(streamArgs).not.toHaveProperty('model');
|
||||
|
||||
// Check if the baseURL is correct
|
||||
const constructorArgs = OpenAI.mock.calls[0][0];
|
||||
const expectedURL = genAzureChatCompletion(defaultAzureOptions).split('/chat')[0];
|
||||
expect(constructorArgs.baseURL).toBe(expectedURL);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkVisionRequest functionality', () => {
|
||||
|
||||
@@ -3,7 +3,7 @@ const { Constants } = require('librechat-data-provider');
|
||||
const { HumanMessage, AIMessage } = require('@langchain/core/messages');
|
||||
const PluginsClient = require('../PluginsClient');
|
||||
|
||||
jest.mock('~/lib/db/connectDb');
|
||||
jest.mock('~/db/connect');
|
||||
jest.mock('~/models/Conversation', () => {
|
||||
return function () {
|
||||
return {
|
||||
|
||||
@@ -1,184 +0,0 @@
|
||||
require('dotenv').config();
|
||||
const fs = require('fs');
|
||||
const { z } = require('zod');
|
||||
const path = require('path');
|
||||
const yaml = require('js-yaml');
|
||||
const { createOpenAPIChain } = require('langchain/chains');
|
||||
const { DynamicStructuredTool } = require('@langchain/core/tools');
|
||||
const { ChatPromptTemplate, HumanMessagePromptTemplate } = require('@langchain/core/prompts');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
function addLinePrefix(text, prefix = '// ') {
|
||||
return text
|
||||
.split('\n')
|
||||
.map((line) => prefix + line)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function createPrompt(name, functions) {
|
||||
const prefix = `// The ${name} tool has the following functions. Determine the desired or most optimal function for the user's query:`;
|
||||
const functionDescriptions = functions
|
||||
.map((func) => `// - ${func.name}: ${func.description}`)
|
||||
.join('\n');
|
||||
return `${prefix}\n${functionDescriptions}
|
||||
// You are an expert manager and scrum master. You must provide a detailed intent to better execute the function.
|
||||
// Always format as such: {{"func": "function_name", "intent": "intent and expected result"}}`;
|
||||
}
|
||||
|
||||
const AuthBearer = z
|
||||
.object({
|
||||
type: z.string().includes('service_http'),
|
||||
authorization_type: z.string().includes('bearer'),
|
||||
verification_tokens: z.object({
|
||||
openai: z.string(),
|
||||
}),
|
||||
})
|
||||
.catch(() => false);
|
||||
|
||||
const AuthDefinition = z
|
||||
.object({
|
||||
type: z.string(),
|
||||
authorization_type: z.string(),
|
||||
verification_tokens: z.object({
|
||||
openai: z.string(),
|
||||
}),
|
||||
})
|
||||
.catch(() => false);
|
||||
|
||||
async function readSpecFile(filePath) {
|
||||
try {
|
||||
const fileContents = await fs.promises.readFile(filePath, 'utf8');
|
||||
if (path.extname(filePath) === '.json') {
|
||||
return JSON.parse(fileContents);
|
||||
}
|
||||
return yaml.load(fileContents);
|
||||
} catch (e) {
|
||||
logger.error('[readSpecFile] error', e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function getSpec(url) {
|
||||
const RegularUrl = z
|
||||
.string()
|
||||
.url()
|
||||
.catch(() => false);
|
||||
|
||||
if (RegularUrl.parse(url) && path.extname(url) === '.json') {
|
||||
const response = await fetch(url);
|
||||
return await response.json();
|
||||
}
|
||||
|
||||
const ValidSpecPath = z
|
||||
.string()
|
||||
.url()
|
||||
.catch(async () => {
|
||||
const spec = path.join(__dirname, '..', '.well-known', 'openapi', url);
|
||||
if (!fs.existsSync(spec)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return await readSpecFile(spec);
|
||||
});
|
||||
|
||||
return ValidSpecPath.parse(url);
|
||||
}
|
||||
|
||||
async function createOpenAPIPlugin({ data, llm, user, message, memory, signal }) {
|
||||
let spec;
|
||||
try {
|
||||
spec = await getSpec(data.api.url);
|
||||
} catch (error) {
|
||||
logger.error('[createOpenAPIPlugin] getSpec error', error);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!spec) {
|
||||
logger.warn('[createOpenAPIPlugin] No spec found');
|
||||
return null;
|
||||
}
|
||||
|
||||
const headers = {};
|
||||
const { auth, name_for_model, description_for_model, description_for_human } = data;
|
||||
if (auth && AuthDefinition.parse(auth)) {
|
||||
logger.debug('[createOpenAPIPlugin] auth detected', auth);
|
||||
const { openai } = auth.verification_tokens;
|
||||
if (AuthBearer.parse(auth)) {
|
||||
headers.authorization = `Bearer ${openai}`;
|
||||
logger.debug('[createOpenAPIPlugin] added auth bearer', headers);
|
||||
}
|
||||
}
|
||||
|
||||
const chainOptions = { llm };
|
||||
|
||||
if (data.headers && data.headers['librechat_user_id']) {
|
||||
logger.debug('[createOpenAPIPlugin] id detected', headers);
|
||||
headers[data.headers['librechat_user_id']] = user;
|
||||
}
|
||||
|
||||
if (Object.keys(headers).length > 0) {
|
||||
logger.debug('[createOpenAPIPlugin] headers detected', headers);
|
||||
chainOptions.headers = headers;
|
||||
}
|
||||
|
||||
if (data.params) {
|
||||
logger.debug('[createOpenAPIPlugin] params detected', data.params);
|
||||
chainOptions.params = data.params;
|
||||
}
|
||||
|
||||
let history = '';
|
||||
if (memory) {
|
||||
logger.debug('[createOpenAPIPlugin] openAPI chain: memory detected', memory);
|
||||
const { history: chat_history } = await memory.loadMemoryVariables({});
|
||||
history = chat_history?.length > 0 ? `\n\n## Chat History:\n${chat_history}\n` : '';
|
||||
}
|
||||
|
||||
chainOptions.prompt = ChatPromptTemplate.fromMessages([
|
||||
HumanMessagePromptTemplate.fromTemplate(
|
||||
`# Use the provided API's to respond to this query:\n\n{query}\n\n## Instructions:\n${addLinePrefix(
|
||||
description_for_model,
|
||||
)}${history}`,
|
||||
),
|
||||
]);
|
||||
|
||||
const chain = await createOpenAPIChain(spec, chainOptions);
|
||||
|
||||
const { functions } = chain.chains[0].lc_kwargs.llmKwargs;
|
||||
|
||||
return new DynamicStructuredTool({
|
||||
name: name_for_model,
|
||||
description_for_model: `${addLinePrefix(description_for_human)}${createPrompt(
|
||||
name_for_model,
|
||||
functions,
|
||||
)}`,
|
||||
description: `${description_for_human}`,
|
||||
schema: z.object({
|
||||
func: z
|
||||
.string()
|
||||
.describe(
|
||||
`The function to invoke. The functions available are: ${functions
|
||||
.map((func) => func.name)
|
||||
.join(', ')}`,
|
||||
),
|
||||
intent: z
|
||||
.string()
|
||||
.describe('Describe your intent with the function and your expected result'),
|
||||
}),
|
||||
func: async ({ func = '', intent = '' }) => {
|
||||
const filteredFunctions = functions.filter((f) => f.name === func);
|
||||
chain.chains[0].lc_kwargs.llmKwargs.functions = filteredFunctions;
|
||||
const query = `${message}${func?.length > 0 ? `\n// Intent: ${intent}` : ''}`;
|
||||
const result = await chain.call({
|
||||
query,
|
||||
signal,
|
||||
});
|
||||
return result.response;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getSpec,
|
||||
readSpecFile,
|
||||
createOpenAPIPlugin,
|
||||
};
|
||||
@@ -1,72 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const { createOpenAPIPlugin, getSpec, readSpecFile } = require('./OpenAPIPlugin');
|
||||
|
||||
global.fetch = jest.fn().mockImplementationOnce(() => {
|
||||
return new Promise((resolve) => {
|
||||
resolve({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ key: 'value' }),
|
||||
});
|
||||
});
|
||||
});
|
||||
jest.mock('fs', () => ({
|
||||
promises: {
|
||||
readFile: jest.fn(),
|
||||
},
|
||||
existsSync: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('readSpecFile', () => {
|
||||
it('reads JSON file correctly', async () => {
|
||||
fs.promises.readFile.mockResolvedValue(JSON.stringify({ test: 'value' }));
|
||||
const result = await readSpecFile('test.json');
|
||||
expect(result).toEqual({ test: 'value' });
|
||||
});
|
||||
|
||||
it('reads YAML file correctly', async () => {
|
||||
fs.promises.readFile.mockResolvedValue('test: value');
|
||||
const result = await readSpecFile('test.yaml');
|
||||
expect(result).toEqual({ test: 'value' });
|
||||
});
|
||||
|
||||
it('handles error correctly', async () => {
|
||||
fs.promises.readFile.mockRejectedValue(new Error('test error'));
|
||||
const result = await readSpecFile('test.json');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSpec', () => {
|
||||
it('fetches spec from url correctly', async () => {
|
||||
const parsedJson = await getSpec('https://www.instacart.com/.well-known/ai-plugin.json');
|
||||
const isObject = typeof parsedJson === 'object';
|
||||
expect(isObject).toEqual(true);
|
||||
});
|
||||
|
||||
it('reads spec from file correctly', async () => {
|
||||
fs.existsSync.mockReturnValue(true);
|
||||
fs.promises.readFile.mockResolvedValue(JSON.stringify({ test: 'value' }));
|
||||
const result = await getSpec('test.json');
|
||||
expect(result).toEqual({ test: 'value' });
|
||||
});
|
||||
|
||||
it('returns false when file does not exist', async () => {
|
||||
fs.existsSync.mockReturnValue(false);
|
||||
const result = await getSpec('test.json');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createOpenAPIPlugin', () => {
|
||||
it('returns null when getSpec throws an error', async () => {
|
||||
const result = await createOpenAPIPlugin({ data: { api: { url: 'invalid' } } });
|
||||
expect(result).toBe(null);
|
||||
});
|
||||
|
||||
it('returns null when no spec is found', async () => {
|
||||
const result = await createOpenAPIPlugin({});
|
||||
expect(result).toBe(null);
|
||||
});
|
||||
|
||||
// Add more tests here for different scenarios
|
||||
});
|
||||
@@ -10,6 +10,7 @@ const StructuredACS = require('./structured/AzureAISearch');
|
||||
const StructuredSD = require('./structured/StableDiffusion');
|
||||
const GoogleSearchAPI = require('./structured/GoogleSearch');
|
||||
const TraversaalSearch = require('./structured/TraversaalSearch');
|
||||
const createOpenAIImageTools = require('./structured/OpenAIImageTools');
|
||||
const TavilySearchResults = require('./structured/TavilySearchResults');
|
||||
|
||||
/** @type {Record<string, TPlugin | undefined>} */
|
||||
@@ -40,4 +41,5 @@ module.exports = {
|
||||
StructuredWolfram,
|
||||
createYouTubeTools,
|
||||
TavilySearchResults,
|
||||
createOpenAIImageTools,
|
||||
};
|
||||
|
||||
@@ -44,6 +44,20 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "OpenAI Image Tools",
|
||||
"pluginKey": "image_gen_oai",
|
||||
"toolkit": true,
|
||||
"description": "Image Generation and Editing using OpenAI's latest state-of-the-art models",
|
||||
"icon": "/assets/image_gen_oai.png",
|
||||
"authConfig": [
|
||||
{
|
||||
"authField": "IMAGE_GEN_OAI_API_KEY",
|
||||
"label": "OpenAI Image Tools API Key",
|
||||
"description": "Your OpenAI API Key for Image Generation and Editing"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Wolfram",
|
||||
"pluginKey": "wolfram",
|
||||
|
||||
@@ -8,10 +8,10 @@ const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||
const { getImageBasename } = require('~/server/services/Files/images');
|
||||
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||
const { logger } = require('~/config');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
const displayMessage =
|
||||
'DALL-E displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
||||
"DALL-E displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
|
||||
class DALLE3 extends Tool {
|
||||
constructor(fields = {}) {
|
||||
super();
|
||||
|
||||
519
api/app/clients/tools/structured/OpenAIImageTools.js
Normal file
519
api/app/clients/tools/structured/OpenAIImageTools.js
Normal file
@@ -0,0 +1,519 @@
|
||||
const { z } = require('zod');
|
||||
const axios = require('axios');
|
||||
const { v4 } = require('uuid');
|
||||
const OpenAI = require('openai');
|
||||
const FormData = require('form-data');
|
||||
const { tool } = require('@langchain/core/tools');
|
||||
const { logAxiosError } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { ContentTypes, EImageOutputType } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { extractBaseURL } = require('~/utils');
|
||||
const { getFiles } = require('~/models/File');
|
||||
|
||||
/** Default descriptions for image generation tool */
|
||||
const DEFAULT_IMAGE_GEN_DESCRIPTION = `
|
||||
Generates high-quality, original images based solely on text, not using any uploaded reference images.
|
||||
|
||||
When to use \`image_gen_oai\`:
|
||||
- To create entirely new images from detailed text descriptions that do NOT reference any image files.
|
||||
|
||||
When NOT to use \`image_gen_oai\`:
|
||||
- If the user has uploaded any images and requests modifications, enhancements, or remixing based on those uploads → use \`image_edit_oai\` instead.
|
||||
|
||||
Generated image IDs will be returned in the response, so you can refer to them in future requests made to \`image_edit_oai\`.
|
||||
`.trim();
|
||||
|
||||
/** Default description for image editing tool */
|
||||
const DEFAULT_IMAGE_EDIT_DESCRIPTION =
|
||||
`Generates high-quality, original images based on text and one or more uploaded/referenced images.
|
||||
|
||||
When to use \`image_edit_oai\`:
|
||||
- The user wants to modify, extend, or remix one **or more** uploaded images, either:
|
||||
- Previously generated, or in the current request (both to be included in the \`image_ids\` array).
|
||||
- Always when the user refers to uploaded images for editing, enhancement, remixing, style transfer, or combining elements.
|
||||
- Any current or existing images are to be used as visual guides.
|
||||
- If there are any files in the current request, they are more likely than not expected as references for image edit requests.
|
||||
|
||||
When NOT to use \`image_edit_oai\`:
|
||||
- Brand-new generations that do not rely on an existing image → use \`image_gen_oai\` instead.
|
||||
|
||||
Both generated and referenced image IDs will be returned in the response, so you can refer to them in future requests made to \`image_edit_oai\`.
|
||||
`.trim();
|
||||
|
||||
/** Default prompt descriptions */
|
||||
const DEFAULT_IMAGE_GEN_PROMPT_DESCRIPTION = `Describe the image you want in detail.
|
||||
Be highly specific—break your idea into layers:
|
||||
(1) main concept and subject,
|
||||
(2) composition and position,
|
||||
(3) lighting and mood,
|
||||
(4) style, medium, or camera details,
|
||||
(5) important features (age, expression, clothing, etc.),
|
||||
(6) background.
|
||||
Use positive, descriptive language and specify what should be included, not what to avoid.
|
||||
List number and characteristics of people/objects, and mention style/technical requirements (e.g., "DSLR photo, 85mm lens, golden hour").
|
||||
Do not reference any uploaded images—use for new image creation from text only.`;
|
||||
|
||||
const DEFAULT_IMAGE_EDIT_PROMPT_DESCRIPTION = `Describe the changes, enhancements, or new ideas to apply to the uploaded image(s).
|
||||
Be highly specific—break your request into layers:
|
||||
(1) main concept or transformation,
|
||||
(2) specific edits/replacements or composition guidance,
|
||||
(3) desired style, mood, or technique,
|
||||
(4) features/items to keep, change, or add (such as objects, people, clothing, lighting, etc.).
|
||||
Use positive, descriptive language and clarify what should be included or changed, not what to avoid.
|
||||
Always base this prompt on the most recently uploaded reference images.`;
|
||||
|
||||
const displayMessage =
|
||||
"The tool displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
|
||||
|
||||
/**
|
||||
* Replaces unwanted characters from the input string
|
||||
* @param {string} inputString - The input string to process
|
||||
* @returns {string} - The processed string
|
||||
*/
|
||||
function replaceUnwantedChars(inputString) {
|
||||
return inputString
|
||||
.replace(/\r\n|\r|\n/g, ' ')
|
||||
.replace(/"/g, '')
|
||||
.trim();
|
||||
}
|
||||
|
||||
function returnValue(value) {
|
||||
if (typeof value === 'string') {
|
||||
return [value, {}];
|
||||
} else if (typeof value === 'object') {
|
||||
if (Array.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
return [displayMessage, value];
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
const getImageGenDescription = () => {
|
||||
return process.env.IMAGE_GEN_OAI_DESCRIPTION || DEFAULT_IMAGE_GEN_DESCRIPTION;
|
||||
};
|
||||
|
||||
const getImageEditDescription = () => {
|
||||
return process.env.IMAGE_EDIT_OAI_DESCRIPTION || DEFAULT_IMAGE_EDIT_DESCRIPTION;
|
||||
};
|
||||
|
||||
const getImageGenPromptDescription = () => {
|
||||
return process.env.IMAGE_GEN_OAI_PROMPT_DESCRIPTION || DEFAULT_IMAGE_GEN_PROMPT_DESCRIPTION;
|
||||
};
|
||||
|
||||
const getImageEditPromptDescription = () => {
|
||||
return process.env.IMAGE_EDIT_OAI_PROMPT_DESCRIPTION || DEFAULT_IMAGE_EDIT_PROMPT_DESCRIPTION;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates OpenAI Image tools (generation and editing)
|
||||
* @param {Object} fields - Configuration fields
|
||||
* @param {ServerRequest} fields.req - Whether the tool is being used in an agent context
|
||||
* @param {boolean} fields.isAgent - Whether the tool is being used in an agent context
|
||||
* @param {string} fields.IMAGE_GEN_OAI_API_KEY - The OpenAI API key
|
||||
* @param {boolean} [fields.override] - Whether to override the API key check, necessary for app initialization
|
||||
* @param {MongoFile[]} [fields.imageFiles] - The images to be used for editing
|
||||
* @returns {Array} - Array of image tools
|
||||
*/
|
||||
function createOpenAIImageTools(fields = {}) {
|
||||
/** @type {boolean} Used to initialize the Tool without necessary variables. */
|
||||
const override = fields.override ?? false;
|
||||
/** @type {boolean} */
|
||||
if (!override && !fields.isAgent) {
|
||||
throw new Error('This tool is only available for agents.');
|
||||
}
|
||||
const { req } = fields;
|
||||
const imageOutputType = req?.app.locals.imageOutputType || EImageOutputType.PNG;
|
||||
const appFileStrategy = req?.app.locals.fileStrategy;
|
||||
|
||||
const getApiKey = () => {
|
||||
const apiKey = process.env.IMAGE_GEN_OAI_API_KEY ?? '';
|
||||
if (!apiKey && !override) {
|
||||
throw new Error('Missing IMAGE_GEN_OAI_API_KEY environment variable.');
|
||||
}
|
||||
return apiKey;
|
||||
};
|
||||
|
||||
let apiKey = fields.IMAGE_GEN_OAI_API_KEY ?? getApiKey();
|
||||
const closureConfig = { apiKey };
|
||||
|
||||
let baseURL = 'https://api.openai.com/v1/';
|
||||
if (!override && process.env.IMAGE_GEN_OAI_BASEURL) {
|
||||
baseURL = extractBaseURL(process.env.IMAGE_GEN_OAI_BASEURL);
|
||||
closureConfig.baseURL = baseURL;
|
||||
}
|
||||
|
||||
// Note: Azure may not yet support the latest image generation models
|
||||
if (
|
||||
!override &&
|
||||
process.env.IMAGE_GEN_OAI_AZURE_API_VERSION &&
|
||||
process.env.IMAGE_GEN_OAI_BASEURL
|
||||
) {
|
||||
baseURL = process.env.IMAGE_GEN_OAI_BASEURL;
|
||||
closureConfig.baseURL = baseURL;
|
||||
closureConfig.defaultQuery = { 'api-version': process.env.IMAGE_GEN_OAI_AZURE_API_VERSION };
|
||||
closureConfig.defaultHeaders = {
|
||||
'api-key': process.env.IMAGE_GEN_OAI_API_KEY,
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
closureConfig.apiKey = process.env.IMAGE_GEN_OAI_API_KEY;
|
||||
}
|
||||
|
||||
const imageFiles = fields.imageFiles ?? [];
|
||||
|
||||
/**
|
||||
* Image Generation Tool
|
||||
*/
|
||||
const imageGenTool = tool(
|
||||
async (
|
||||
{
|
||||
prompt,
|
||||
background = 'auto',
|
||||
n = 1,
|
||||
output_compression = 100,
|
||||
quality = 'auto',
|
||||
size = 'auto',
|
||||
},
|
||||
runnableConfig,
|
||||
) => {
|
||||
if (!prompt) {
|
||||
throw new Error('Missing required field: prompt');
|
||||
}
|
||||
const clientConfig = { ...closureConfig };
|
||||
if (process.env.PROXY) {
|
||||
clientConfig.httpAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
|
||||
/** @type {OpenAI} */
|
||||
const openai = new OpenAI(clientConfig);
|
||||
let output_format = imageOutputType;
|
||||
if (
|
||||
background === 'transparent' &&
|
||||
output_format !== EImageOutputType.PNG &&
|
||||
output_format !== EImageOutputType.WEBP
|
||||
) {
|
||||
logger.warn(
|
||||
'[ImageGenOAI] Transparent background requires PNG or WebP format, defaulting to PNG',
|
||||
);
|
||||
output_format = EImageOutputType.PNG;
|
||||
}
|
||||
|
||||
let resp;
|
||||
try {
|
||||
const derivedSignal = runnableConfig?.signal
|
||||
? AbortSignal.any([runnableConfig.signal])
|
||||
: undefined;
|
||||
resp = await openai.images.generate(
|
||||
{
|
||||
model: 'gpt-image-1',
|
||||
prompt: replaceUnwantedChars(prompt),
|
||||
n: Math.min(Math.max(1, n), 10),
|
||||
background,
|
||||
output_format,
|
||||
output_compression:
|
||||
output_format === EImageOutputType.WEBP || output_format === EImageOutputType.JPEG
|
||||
? output_compression
|
||||
: undefined,
|
||||
quality,
|
||||
size,
|
||||
},
|
||||
{
|
||||
signal: derivedSignal,
|
||||
},
|
||||
);
|
||||
} catch (error) {
|
||||
const message = '[image_gen_oai] Problem generating the image:';
|
||||
logAxiosError({ error, message });
|
||||
return returnValue(`Something went wrong when trying to generate the image. The OpenAI API may be unavailable:
|
||||
Error Message: ${error.message}`);
|
||||
}
|
||||
|
||||
if (!resp) {
|
||||
return returnValue(
|
||||
'Something went wrong when trying to generate the image. The OpenAI API may be unavailable',
|
||||
);
|
||||
}
|
||||
|
||||
// For gpt-image-1, the response contains base64-encoded images
|
||||
// TODO: handle cost in `resp.usage`
|
||||
const base64Image = resp.data[0].b64_json;
|
||||
|
||||
if (!base64Image) {
|
||||
return returnValue(
|
||||
'No image data returned from OpenAI API. There may be a problem with the API or your configuration.',
|
||||
);
|
||||
}
|
||||
|
||||
const content = [
|
||||
{
|
||||
type: ContentTypes.IMAGE_URL,
|
||||
image_url: {
|
||||
url: `data:image/${output_format};base64,${base64Image}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const file_ids = [v4()];
|
||||
const response = [
|
||||
{
|
||||
type: ContentTypes.TEXT,
|
||||
text: displayMessage + `\n\ngenerated_image_id: "${file_ids[0]}"`,
|
||||
},
|
||||
];
|
||||
return [response, { content, file_ids }];
|
||||
},
|
||||
{
|
||||
name: 'image_gen_oai',
|
||||
description: getImageGenDescription(),
|
||||
schema: z.object({
|
||||
prompt: z.string().max(32000).describe(getImageGenPromptDescription()),
|
||||
background: z
|
||||
.enum(['transparent', 'opaque', 'auto'])
|
||||
.optional()
|
||||
.describe(
|
||||
'Sets transparency for the background. Must be one of transparent, opaque or auto (default). When transparent, the output format should be png or webp.',
|
||||
),
|
||||
/*
|
||||
n: z
|
||||
.number()
|
||||
.int()
|
||||
.min(1)
|
||||
.max(10)
|
||||
.optional()
|
||||
.describe('The number of images to generate. Must be between 1 and 10.'),
|
||||
output_compression: z
|
||||
.number()
|
||||
.int()
|
||||
.min(0)
|
||||
.max(100)
|
||||
.optional()
|
||||
.describe('The compression level (0-100%) for webp or jpeg formats. Defaults to 100.'),
|
||||
*/
|
||||
quality: z
|
||||
.enum(['auto', 'high', 'medium', 'low'])
|
||||
.optional()
|
||||
.describe('The quality of the image. One of auto (default), high, medium, or low.'),
|
||||
size: z
|
||||
.enum(['auto', '1024x1024', '1536x1024', '1024x1536'])
|
||||
.optional()
|
||||
.describe(
|
||||
'The size of the generated image. One of 1024x1024, 1536x1024 (landscape), 1024x1536 (portrait), or auto (default).',
|
||||
),
|
||||
}),
|
||||
responseFormat: 'content_and_artifact',
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* Image Editing Tool
|
||||
*/
|
||||
const imageEditTool = tool(
|
||||
async ({ prompt, image_ids, quality = 'auto', size = 'auto' }, runnableConfig) => {
|
||||
if (!prompt) {
|
||||
throw new Error('Missing required field: prompt');
|
||||
}
|
||||
|
||||
const clientConfig = { ...closureConfig };
|
||||
if (process.env.PROXY) {
|
||||
clientConfig.httpAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('model', 'gpt-image-1');
|
||||
formData.append('prompt', replaceUnwantedChars(prompt));
|
||||
// TODO: `mask` support
|
||||
// TODO: more than 1 image support
|
||||
// formData.append('n', n.toString());
|
||||
formData.append('quality', quality);
|
||||
formData.append('size', size);
|
||||
|
||||
/** @type {Record<FileSources, undefined | NodeStreamDownloader<File>>} */
|
||||
const streamMethods = {};
|
||||
|
||||
const requestFilesMap = Object.fromEntries(imageFiles.map((f) => [f.file_id, { ...f }]));
|
||||
|
||||
const orderedFiles = new Array(image_ids.length);
|
||||
const idsToFetch = [];
|
||||
const indexOfMissing = Object.create(null);
|
||||
|
||||
for (let i = 0; i < image_ids.length; i++) {
|
||||
const id = image_ids[i];
|
||||
const file = requestFilesMap[id];
|
||||
|
||||
if (file) {
|
||||
orderedFiles[i] = file;
|
||||
} else {
|
||||
idsToFetch.push(id);
|
||||
indexOfMissing[id] = i;
|
||||
}
|
||||
}
|
||||
|
||||
if (idsToFetch.length) {
|
||||
const fetchedFiles = await getFiles(
|
||||
{
|
||||
user: req.user.id,
|
||||
file_id: { $in: idsToFetch },
|
||||
height: { $exists: true },
|
||||
width: { $exists: true },
|
||||
},
|
||||
{},
|
||||
{},
|
||||
);
|
||||
|
||||
for (const file of fetchedFiles) {
|
||||
requestFilesMap[file.file_id] = file;
|
||||
orderedFiles[indexOfMissing[file.file_id]] = file;
|
||||
}
|
||||
}
|
||||
for (const imageFile of orderedFiles) {
|
||||
if (!imageFile) {
|
||||
continue;
|
||||
}
|
||||
/** @type {NodeStream<File>} */
|
||||
let stream;
|
||||
/** @type {NodeStreamDownloader<File>} */
|
||||
let getDownloadStream;
|
||||
const source = imageFile.source || appFileStrategy;
|
||||
if (!source) {
|
||||
throw new Error('No source found for image file');
|
||||
}
|
||||
if (streamMethods[source]) {
|
||||
getDownloadStream = streamMethods[source];
|
||||
} else {
|
||||
({ getDownloadStream } = getStrategyFunctions(source));
|
||||
streamMethods[source] = getDownloadStream;
|
||||
}
|
||||
if (!getDownloadStream) {
|
||||
throw new Error(`No download stream method found for source: ${source}`);
|
||||
}
|
||||
stream = await getDownloadStream(req, imageFile.filepath);
|
||||
if (!stream) {
|
||||
throw new Error('Failed to get download stream for image file');
|
||||
}
|
||||
formData.append('image[]', stream, {
|
||||
filename: imageFile.filename,
|
||||
contentType: imageFile.type,
|
||||
});
|
||||
}
|
||||
|
||||
/** @type {import('axios').RawAxiosHeaders} */
|
||||
let headers = {
|
||||
...formData.getHeaders(),
|
||||
};
|
||||
|
||||
if (process.env.IMAGE_GEN_OAI_AZURE_API_VERSION && process.env.IMAGE_GEN_OAI_BASEURL) {
|
||||
headers['api-key'] = apiKey;
|
||||
} else {
|
||||
headers['Authorization'] = `Bearer ${apiKey}`;
|
||||
}
|
||||
|
||||
try {
|
||||
const derivedSignal = runnableConfig?.signal
|
||||
? AbortSignal.any([runnableConfig.signal])
|
||||
: undefined;
|
||||
|
||||
/** @type {import('axios').AxiosRequestConfig} */
|
||||
const axiosConfig = {
|
||||
headers,
|
||||
...clientConfig,
|
||||
signal: derivedSignal,
|
||||
baseURL,
|
||||
};
|
||||
|
||||
if (process.env.IMAGE_GEN_OAI_AZURE_API_VERSION && process.env.IMAGE_GEN_OAI_BASEURL) {
|
||||
axiosConfig.params = {
|
||||
'api-version': process.env.IMAGE_GEN_OAI_AZURE_API_VERSION,
|
||||
...axiosConfig.params,
|
||||
};
|
||||
}
|
||||
const response = await axios.post('/images/edits', formData, axiosConfig);
|
||||
|
||||
if (!response.data || !response.data.data || !response.data.data.length) {
|
||||
return returnValue(
|
||||
'No image data returned from OpenAI API. There may be a problem with the API or your configuration.',
|
||||
);
|
||||
}
|
||||
|
||||
const base64Image = response.data.data[0].b64_json;
|
||||
if (!base64Image) {
|
||||
return returnValue(
|
||||
'No image data returned from OpenAI API. There may be a problem with the API or your configuration.',
|
||||
);
|
||||
}
|
||||
|
||||
const content = [
|
||||
{
|
||||
type: ContentTypes.IMAGE_URL,
|
||||
image_url: {
|
||||
url: `data:image/${imageOutputType};base64,${base64Image}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const file_ids = [v4()];
|
||||
const textResponse = [
|
||||
{
|
||||
type: ContentTypes.TEXT,
|
||||
text:
|
||||
displayMessage +
|
||||
`\n\ngenerated_image_id: "${file_ids[0]}"\nreferenced_image_ids: ["${image_ids.join('", "')}"]`,
|
||||
},
|
||||
];
|
||||
return [textResponse, { content, file_ids }];
|
||||
} catch (error) {
|
||||
const message = '[image_edit_oai] Problem editing the image:';
|
||||
logAxiosError({ error, message });
|
||||
return returnValue(`Something went wrong when trying to edit the image. The OpenAI API may be unavailable:
|
||||
Error Message: ${error.message || 'Unknown error'}`);
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'image_edit_oai',
|
||||
description: getImageEditDescription(),
|
||||
schema: z.object({
|
||||
image_ids: z
|
||||
.array(z.string())
|
||||
.min(1)
|
||||
.describe(
|
||||
`
|
||||
IDs (image ID strings) of previously generated or uploaded images that should guide the edit.
|
||||
|
||||
Guidelines:
|
||||
- If the user's request depends on any prior image(s), copy their image IDs into the \`image_ids\` array (in the same order the user refers to them).
|
||||
- Never invent or hallucinate IDs; only use IDs that are still visible in the conversation context.
|
||||
- If no earlier image is relevant, omit the field entirely.
|
||||
`.trim(),
|
||||
),
|
||||
prompt: z.string().max(32000).describe(getImageEditPromptDescription()),
|
||||
/*
|
||||
n: z
|
||||
.number()
|
||||
.int()
|
||||
.min(1)
|
||||
.max(10)
|
||||
.optional()
|
||||
.describe('The number of images to generate. Must be between 1 and 10. Defaults to 1.'),
|
||||
*/
|
||||
quality: z
|
||||
.enum(['auto', 'high', 'medium', 'low'])
|
||||
.optional()
|
||||
.describe(
|
||||
'The quality of the image. One of auto (default), high, medium, or low. High/medium/low only supported for gpt-image-1.',
|
||||
),
|
||||
size: z
|
||||
.enum(['auto', '1024x1024', '1536x1024', '1024x1536', '256x256', '512x512'])
|
||||
.optional()
|
||||
.describe(
|
||||
'The size of the generated images. For gpt-image-1: auto (default), 1024x1024, 1536x1024, 1024x1536. For dall-e-2: 256x256, 512x512, 1024x1024.',
|
||||
),
|
||||
}),
|
||||
responseFormat: 'content_and_artifact',
|
||||
},
|
||||
);
|
||||
|
||||
return [imageGenTool, imageEditTool];
|
||||
}
|
||||
|
||||
module.exports = createOpenAIImageTools;
|
||||
@@ -43,9 +43,39 @@ class TavilySearchResults extends Tool {
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Whether to include answers in the search results. Default is False.'),
|
||||
// include_raw_content: z.boolean().optional().describe('Whether to include raw content in the search results. Default is False.'),
|
||||
// include_domains: z.array(z.string()).optional().describe('A list of domains to specifically include in the search results.'),
|
||||
// exclude_domains: z.array(z.string()).optional().describe('A list of domains to specifically exclude from the search results.'),
|
||||
include_raw_content: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Whether to include raw content in the search results. Default is False.'),
|
||||
include_domains: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe('A list of domains to specifically include in the search results.'),
|
||||
exclude_domains: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe('A list of domains to specifically exclude from the search results.'),
|
||||
topic: z
|
||||
.enum(['general', 'news', 'finance'])
|
||||
.optional()
|
||||
.describe(
|
||||
'The category of the search. Use news ONLY if query SPECIFCALLY mentions the word "news".',
|
||||
),
|
||||
time_range: z
|
||||
.enum(['day', 'week', 'month', 'year', 'd', 'w', 'm', 'y'])
|
||||
.optional()
|
||||
.describe('The time range back from the current date to filter results.'),
|
||||
days: z
|
||||
.number()
|
||||
.min(1)
|
||||
.optional()
|
||||
.describe('Number of days back from the current date to include. Only if topic is news.'),
|
||||
include_image_descriptions: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe(
|
||||
'When include_images is true, also add a descriptive text for each image. Default is false.',
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,29 @@
|
||||
const OpenAI = require('openai');
|
||||
const DALLE3 = require('../DALLE3');
|
||||
|
||||
const { logger } = require('~/config');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
jest.mock('openai');
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => {
|
||||
return {
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('tiktoken', () => {
|
||||
return {
|
||||
encoding_for_model: jest.fn().mockReturnValue({
|
||||
encode: jest.fn(),
|
||||
decode: jest.fn(),
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
const processFileURL = jest.fn();
|
||||
|
||||
jest.mock('~/server/services/Files/images', () => ({
|
||||
@@ -37,6 +56,11 @@ jest.mock('fs', () => {
|
||||
return {
|
||||
existsSync: jest.fn(),
|
||||
mkdirSync: jest.fn(),
|
||||
promises: {
|
||||
writeFile: jest.fn(),
|
||||
readFile: jest.fn(),
|
||||
unlink: jest.fn(),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
const { loadSpecs } = require('./loadSpecs');
|
||||
|
||||
function transformSpec(input) {
|
||||
return {
|
||||
name: input.name_for_human,
|
||||
pluginKey: input.name_for_model,
|
||||
description: input.description_for_human,
|
||||
icon: input?.logo_url ?? 'https://placehold.co/70x70.png',
|
||||
// TODO: add support for authentication
|
||||
isAuthRequired: 'false',
|
||||
authConfig: [],
|
||||
};
|
||||
}
|
||||
|
||||
async function addOpenAPISpecs(availableTools) {
|
||||
try {
|
||||
const specs = (await loadSpecs({})).map(transformSpec);
|
||||
if (specs.length > 0) {
|
||||
return [...specs, ...availableTools];
|
||||
}
|
||||
return availableTools;
|
||||
} catch (error) {
|
||||
return availableTools;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
transformSpec,
|
||||
addOpenAPISpecs,
|
||||
};
|
||||
@@ -1,76 +0,0 @@
|
||||
const { addOpenAPISpecs, transformSpec } = require('./addOpenAPISpecs');
|
||||
const { loadSpecs } = require('./loadSpecs');
|
||||
const { createOpenAPIPlugin } = require('../dynamic/OpenAPIPlugin');
|
||||
|
||||
jest.mock('./loadSpecs');
|
||||
jest.mock('../dynamic/OpenAPIPlugin');
|
||||
|
||||
describe('transformSpec', () => {
|
||||
it('should transform input spec to a desired format', () => {
|
||||
const input = {
|
||||
name_for_human: 'Human Name',
|
||||
name_for_model: 'Model Name',
|
||||
description_for_human: 'Human Description',
|
||||
logo_url: 'https://example.com/logo.png',
|
||||
};
|
||||
|
||||
const expectedOutput = {
|
||||
name: 'Human Name',
|
||||
pluginKey: 'Model Name',
|
||||
description: 'Human Description',
|
||||
icon: 'https://example.com/logo.png',
|
||||
isAuthRequired: 'false',
|
||||
authConfig: [],
|
||||
};
|
||||
|
||||
expect(transformSpec(input)).toEqual(expectedOutput);
|
||||
});
|
||||
|
||||
it('should use default icon if logo_url is not provided', () => {
|
||||
const input = {
|
||||
name_for_human: 'Human Name',
|
||||
name_for_model: 'Model Name',
|
||||
description_for_human: 'Human Description',
|
||||
};
|
||||
|
||||
const expectedOutput = {
|
||||
name: 'Human Name',
|
||||
pluginKey: 'Model Name',
|
||||
description: 'Human Description',
|
||||
icon: 'https://placehold.co/70x70.png',
|
||||
isAuthRequired: 'false',
|
||||
authConfig: [],
|
||||
};
|
||||
|
||||
expect(transformSpec(input)).toEqual(expectedOutput);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addOpenAPISpecs', () => {
|
||||
it('should add specs to available tools', async () => {
|
||||
const availableTools = ['Tool1', 'Tool2'];
|
||||
const specs = [
|
||||
{
|
||||
name_for_human: 'Human Name',
|
||||
name_for_model: 'Model Name',
|
||||
description_for_human: 'Human Description',
|
||||
logo_url: 'https://example.com/logo.png',
|
||||
},
|
||||
];
|
||||
|
||||
loadSpecs.mockResolvedValue(specs);
|
||||
createOpenAPIPlugin.mockReturnValue('Plugin');
|
||||
|
||||
const result = await addOpenAPISpecs(availableTools);
|
||||
expect(result).toEqual([...specs.map(transformSpec), ...availableTools]);
|
||||
});
|
||||
|
||||
it('should return available tools if specs loading fails', async () => {
|
||||
const availableTools = ['Tool1', 'Tool2'];
|
||||
|
||||
loadSpecs.mockRejectedValue(new Error('Failed to load specs'));
|
||||
|
||||
const result = await addOpenAPISpecs(availableTools);
|
||||
expect(result).toEqual(availableTools);
|
||||
});
|
||||
});
|
||||
@@ -135,7 +135,7 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
|
||||
query: z
|
||||
.string()
|
||||
.describe(
|
||||
'A natural language query to search for relevant information in the files. Be specific and use keywords related to the information you\'re looking for. The query will be used for semantic similarity matching against the file contents.',
|
||||
"A natural language query to search for relevant information in the files. Be specific and use keywords related to the information you're looking for. The query will be used for semantic similarity matching against the file contents.",
|
||||
),
|
||||
}),
|
||||
},
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
const { Tools, Constants } = require('librechat-data-provider');
|
||||
const { mcpToolPattern } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
const { createCodeExecutionTool, EnvVar } = require('@librechat/agents');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
||||
const {
|
||||
Tools,
|
||||
EToolResources,
|
||||
loadWebSearchAuth,
|
||||
replaceSpecialVars,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
availableTools,
|
||||
manifestToolMap,
|
||||
@@ -18,15 +24,14 @@ const {
|
||||
StructuredWolfram,
|
||||
createYouTubeTools,
|
||||
TavilySearchResults,
|
||||
createOpenAIImageTools,
|
||||
} = require('../');
|
||||
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
|
||||
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { getCachedTools } = require('~/server/services/Config');
|
||||
const { createMCPTool } = require('~/server/services/MCP');
|
||||
const { loadSpecs } = require('./loadSpecs');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const mcpToolPattern = new RegExp(`^.+${Constants.mcp_delimiter}.+$`);
|
||||
|
||||
/**
|
||||
* Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values.
|
||||
@@ -87,7 +92,7 @@ const validateTools = async (user, tools = []) => {
|
||||
return Array.from(validToolsSet.values());
|
||||
} catch (err) {
|
||||
logger.error('[validateTools] There was a problem validating tools', err);
|
||||
throw new Error('There was a problem validating tools');
|
||||
throw new Error(err);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -123,7 +128,7 @@ const getAuthFields = (toolKey) => {
|
||||
*
|
||||
* @param {object} object
|
||||
* @param {string} object.user
|
||||
* @param {Agent} [object.agent]
|
||||
* @param {Pick<Agent, 'id' | 'provider' | 'model'>} [object.agent]
|
||||
* @param {string} [object.model]
|
||||
* @param {EModelEndpoint} [object.endpoint]
|
||||
* @param {LoadToolOptions} [object.options]
|
||||
@@ -138,7 +143,6 @@ const loadTools = async ({
|
||||
agent,
|
||||
model,
|
||||
endpoint,
|
||||
useSpecs,
|
||||
tools = [],
|
||||
options = {},
|
||||
functions = true,
|
||||
@@ -157,7 +161,7 @@ const loadTools = async ({
|
||||
};
|
||||
|
||||
const customConstructors = {
|
||||
serpapi: async () => {
|
||||
serpapi: async (_toolContextMap) => {
|
||||
const authFields = getAuthFields('serpapi');
|
||||
let envVar = authFields[0] ?? '';
|
||||
let apiKey = process.env[envVar];
|
||||
@@ -170,11 +174,40 @@ const loadTools = async ({
|
||||
gl: 'us',
|
||||
});
|
||||
},
|
||||
youtube: async () => {
|
||||
youtube: async (_toolContextMap) => {
|
||||
const authFields = getAuthFields('youtube');
|
||||
const authValues = await loadAuthValues({ userId: user, authFields });
|
||||
return createYouTubeTools(authValues);
|
||||
},
|
||||
image_gen_oai: async (toolContextMap) => {
|
||||
const authFields = getAuthFields('image_gen_oai');
|
||||
const authValues = await loadAuthValues({ userId: user, authFields });
|
||||
const imageFiles = options.tool_resources?.[EToolResources.image_edit]?.files ?? [];
|
||||
let toolContext = '';
|
||||
for (let i = 0; i < imageFiles.length; i++) {
|
||||
const file = imageFiles[i];
|
||||
if (!file) {
|
||||
continue;
|
||||
}
|
||||
if (i === 0) {
|
||||
toolContext =
|
||||
'Image files provided in this request (their image IDs listed in order of appearance) available for image editing:';
|
||||
}
|
||||
toolContext += `\n\t- ${file.file_id}`;
|
||||
if (i === imageFiles.length - 1) {
|
||||
toolContext += `\n\nInclude any you need in the \`image_ids\` array when calling \`${EToolResources.image_edit}_oai\`. You may also include previously referenced or generated image IDs.`;
|
||||
}
|
||||
}
|
||||
if (toolContext) {
|
||||
toolContextMap.image_edit_oai = toolContext;
|
||||
}
|
||||
return createOpenAIImageTools({
|
||||
...authValues,
|
||||
isAgent: !!agent,
|
||||
req: options.req,
|
||||
imageFiles,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
const requestedTools = {};
|
||||
@@ -200,9 +233,9 @@ const loadTools = async ({
|
||||
serpapi: { location: 'Austin,Texas,United States', hl: 'en', gl: 'us' },
|
||||
};
|
||||
|
||||
/** @type {Record<string, string>} */
|
||||
const toolContextMap = {};
|
||||
const remainingTools = [];
|
||||
const appTools = options.req?.app?.locals?.availableTools ?? {};
|
||||
const appTools = (await getCachedTools({ includeGlobal: true })) ?? {};
|
||||
|
||||
for (const tool of tools) {
|
||||
if (tool === Tools.execute_code) {
|
||||
@@ -234,10 +267,38 @@ const loadTools = async ({
|
||||
return createFileSearchTool({ req: options.req, files, entity_id: agent?.id });
|
||||
};
|
||||
continue;
|
||||
} else if (tool === Tools.web_search) {
|
||||
const webSearchConfig = options?.req?.app?.locals?.webSearch;
|
||||
const result = await loadWebSearchAuth({
|
||||
userId: user,
|
||||
loadAuthValues,
|
||||
webSearchConfig,
|
||||
});
|
||||
const { onSearchResults, onGetHighlights } = options?.[Tools.web_search] ?? {};
|
||||
requestedTools[tool] = async () => {
|
||||
toolContextMap[tool] = `# \`${tool}\`:
|
||||
Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
||||
1. **Execute immediately without preface** when using \`${tool}\`.
|
||||
2. **After the search, begin with a brief summary** that directly addresses the query without headers or explaining your process.
|
||||
3. **Structure your response clearly** using Markdown formatting (Level 2 headers for sections, lists for multiple points, tables for comparisons).
|
||||
4. **Cite sources properly** according to the citation anchor format, utilizing group anchors when appropriate.
|
||||
5. **Tailor your approach to the query type** (academic, news, coding, etc.) while maintaining an expert, journalistic, unbiased tone.
|
||||
6. **Provide comprehensive information** with specific details, examples, and as much relevant context as possible from search results.
|
||||
7. **Avoid moralizing language.**
|
||||
`.trim();
|
||||
return createSearchTool({
|
||||
...result.authResult,
|
||||
onSearchResults,
|
||||
onGetHighlights,
|
||||
logger,
|
||||
});
|
||||
};
|
||||
continue;
|
||||
} else if (tool && appTools[tool] && mcpToolPattern.test(tool)) {
|
||||
requestedTools[tool] = async () =>
|
||||
createMCPTool({
|
||||
req: options.req,
|
||||
res: options.res,
|
||||
toolKey: tool,
|
||||
model: agent?.model ?? model,
|
||||
provider: agent?.provider ?? endpoint,
|
||||
@@ -246,7 +307,7 @@ const loadTools = async ({
|
||||
}
|
||||
|
||||
if (customConstructors[tool]) {
|
||||
requestedTools[tool] = customConstructors[tool];
|
||||
requestedTools[tool] = async () => customConstructors[tool](toolContextMap);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -261,30 +322,6 @@ const loadTools = async ({
|
||||
requestedTools[tool] = toolInstance;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (functions === true) {
|
||||
remainingTools.push(tool);
|
||||
}
|
||||
}
|
||||
|
||||
let specs = null;
|
||||
if (useSpecs === true && functions === true && remainingTools.length > 0) {
|
||||
specs = await loadSpecs({
|
||||
llm: model,
|
||||
user,
|
||||
message: options.message,
|
||||
memory: options.memory,
|
||||
signal: options.signal,
|
||||
tools: remainingTools,
|
||||
map: true,
|
||||
verbose: false,
|
||||
});
|
||||
}
|
||||
|
||||
for (const tool of remainingTools) {
|
||||
if (specs && specs[tool]) {
|
||||
requestedTools[tool] = specs[tool];
|
||||
}
|
||||
}
|
||||
|
||||
if (returnMap) {
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
const mockUser = {
|
||||
_id: 'fakeId',
|
||||
save: jest.fn(),
|
||||
findByIdAndDelete: jest.fn(),
|
||||
};
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
|
||||
const mockPluginService = {
|
||||
updateUserPluginAuth: jest.fn(),
|
||||
@@ -10,23 +7,18 @@ const mockPluginService = {
|
||||
getUserPluginAuthValue: jest.fn(),
|
||||
};
|
||||
|
||||
jest.mock('~/models/User', () => {
|
||||
return function () {
|
||||
return mockUser;
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('~/server/services/PluginService', () => mockPluginService);
|
||||
|
||||
const { BaseLLM } = require('@langchain/openai');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
|
||||
const User = require('~/models/User');
|
||||
const { User } = require('~/db/models');
|
||||
const PluginService = require('~/server/services/PluginService');
|
||||
const { validateTools, loadTools, loadToolWithAuth } = require('./handleTools');
|
||||
const { StructuredSD, availableTools, DALLE3 } = require('../');
|
||||
|
||||
describe('Tool Handlers', () => {
|
||||
let mongoServer;
|
||||
let fakeUser;
|
||||
const pluginKey = 'dalle';
|
||||
const pluginKey2 = 'wolfram';
|
||||
@@ -37,7 +29,9 @@ describe('Tool Handlers', () => {
|
||||
const authConfigs = mainPlugin.authConfig;
|
||||
|
||||
beforeAll(async () => {
|
||||
mockUser.save.mockResolvedValue(undefined);
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
|
||||
const userAuthValues = {};
|
||||
mockPluginService.getUserPluginAuthValue.mockImplementation((userId, authField) => {
|
||||
@@ -78,9 +72,36 @@ describe('Tool Handlers', () => {
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mockUser.findByIdAndDelete(fakeUser._id);
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
// Clear mocks but not the database since we need the user to persist
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Reset the mock implementations
|
||||
const userAuthValues = {};
|
||||
mockPluginService.getUserPluginAuthValue.mockImplementation((userId, authField) => {
|
||||
return userAuthValues[`${userId}-${authField}`];
|
||||
});
|
||||
mockPluginService.updateUserPluginAuth.mockImplementation(
|
||||
(userId, authField, _pluginKey, credential) => {
|
||||
const fields = authField.split('||');
|
||||
fields.forEach((field) => {
|
||||
userAuthValues[`${userId}-${field}`] = credential;
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Re-add the auth configs for the user
|
||||
for (const authConfig of authConfigs) {
|
||||
await PluginService.deleteUserPluginAuth(fakeUser._id, authConfig.authField);
|
||||
await PluginService.updateUserPluginAuth(
|
||||
fakeUser._id,
|
||||
authConfig.authField,
|
||||
pluginKey,
|
||||
mockCredential,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -218,7 +239,6 @@ describe('Tool Handlers', () => {
|
||||
try {
|
||||
await loadTool2();
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line jest/no-conditional-expect
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { z } = require('zod');
|
||||
const { logger } = require('~/config');
|
||||
const { createOpenAPIPlugin } = require('~/app/clients/tools/dynamic/OpenAPIPlugin');
|
||||
|
||||
// The minimum Manifest definition
|
||||
const ManifestDefinition = z.object({
|
||||
schema_version: z.string().optional(),
|
||||
name_for_human: z.string(),
|
||||
name_for_model: z.string(),
|
||||
description_for_human: z.string(),
|
||||
description_for_model: z.string(),
|
||||
auth: z.object({}).optional(),
|
||||
api: z.object({
|
||||
// Spec URL or can be the filename of the OpenAPI spec yaml file,
|
||||
// located in api\app\clients\tools\.well-known\openapi
|
||||
url: z.string(),
|
||||
type: z.string().optional(),
|
||||
is_user_authenticated: z.boolean().nullable().optional(),
|
||||
has_user_authentication: z.boolean().nullable().optional(),
|
||||
}),
|
||||
// use to override any params that the LLM will consistently get wrong
|
||||
params: z.object({}).optional(),
|
||||
logo_url: z.string().optional(),
|
||||
contact_email: z.string().optional(),
|
||||
legal_info_url: z.string().optional(),
|
||||
});
|
||||
|
||||
function validateJson(json) {
|
||||
try {
|
||||
return ManifestDefinition.parse(json);
|
||||
} catch (error) {
|
||||
logger.debug('[validateJson] manifest parsing error', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// omit the LLM to return the well known jsons as objects
|
||||
async function loadSpecs({ llm, user, message, tools = [], map = false, memory, signal }) {
|
||||
const directoryPath = path.join(__dirname, '..', '.well-known');
|
||||
let files = [];
|
||||
|
||||
for (let i = 0; i < tools.length; i++) {
|
||||
const filePath = path.join(directoryPath, tools[i] + '.json');
|
||||
|
||||
try {
|
||||
// If the access Promise is resolved, it means that the file exists
|
||||
// Then we can add it to the files array
|
||||
await fs.promises.access(filePath, fs.constants.F_OK);
|
||||
files.push(tools[i] + '.json');
|
||||
} catch (err) {
|
||||
logger.error(`[loadSpecs] File ${tools[i] + '.json'} does not exist`, err);
|
||||
}
|
||||
}
|
||||
|
||||
if (files.length === 0) {
|
||||
files = (await fs.promises.readdir(directoryPath)).filter(
|
||||
(file) => path.extname(file) === '.json',
|
||||
);
|
||||
}
|
||||
|
||||
const validJsons = [];
|
||||
const constructorMap = {};
|
||||
|
||||
logger.debug('[validateJson] files', files);
|
||||
|
||||
for (const file of files) {
|
||||
if (path.extname(file) === '.json') {
|
||||
const filePath = path.join(directoryPath, file);
|
||||
const fileContent = await fs.promises.readFile(filePath, 'utf8');
|
||||
const json = JSON.parse(fileContent);
|
||||
|
||||
if (!validateJson(json)) {
|
||||
logger.debug('[validateJson] Invalid json', json);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (llm && map) {
|
||||
constructorMap[json.name_for_model] = async () =>
|
||||
await createOpenAPIPlugin({
|
||||
data: json,
|
||||
llm,
|
||||
message,
|
||||
memory,
|
||||
signal,
|
||||
user,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (llm) {
|
||||
validJsons.push(createOpenAPIPlugin({ data: json, llm }));
|
||||
continue;
|
||||
}
|
||||
|
||||
validJsons.push(json);
|
||||
}
|
||||
}
|
||||
|
||||
if (map) {
|
||||
return constructorMap;
|
||||
}
|
||||
|
||||
const plugins = (await Promise.all(validJsons)).filter((plugin) => plugin);
|
||||
|
||||
// logger.debug('[validateJson] plugins', plugins);
|
||||
// logger.debug(plugins[0].name);
|
||||
|
||||
return plugins;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
loadSpecs,
|
||||
validateJson,
|
||||
ManifestDefinition,
|
||||
};
|
||||
@@ -1,101 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const { validateJson, loadSpecs, ManifestDefinition } = require('./loadSpecs');
|
||||
const { createOpenAPIPlugin } = require('../dynamic/OpenAPIPlugin');
|
||||
|
||||
jest.mock('../dynamic/OpenAPIPlugin');
|
||||
|
||||
describe('ManifestDefinition', () => {
|
||||
it('should validate correct json', () => {
|
||||
const json = {
|
||||
name_for_human: 'Test',
|
||||
name_for_model: 'Test',
|
||||
description_for_human: 'Test',
|
||||
description_for_model: 'Test',
|
||||
api: {
|
||||
url: 'http://test.com',
|
||||
},
|
||||
};
|
||||
|
||||
expect(() => ManifestDefinition.parse(json)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should not validate incorrect json', () => {
|
||||
const json = {
|
||||
name_for_human: 'Test',
|
||||
name_for_model: 'Test',
|
||||
description_for_human: 'Test',
|
||||
description_for_model: 'Test',
|
||||
api: {
|
||||
url: 123, // incorrect type
|
||||
},
|
||||
};
|
||||
|
||||
expect(() => ManifestDefinition.parse(json)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateJson', () => {
|
||||
it('should return parsed json if valid', () => {
|
||||
const json = {
|
||||
name_for_human: 'Test',
|
||||
name_for_model: 'Test',
|
||||
description_for_human: 'Test',
|
||||
description_for_model: 'Test',
|
||||
api: {
|
||||
url: 'http://test.com',
|
||||
},
|
||||
};
|
||||
|
||||
expect(validateJson(json)).toEqual(json);
|
||||
});
|
||||
|
||||
it('should return false if json is not valid', () => {
|
||||
const json = {
|
||||
name_for_human: 'Test',
|
||||
name_for_model: 'Test',
|
||||
description_for_human: 'Test',
|
||||
description_for_model: 'Test',
|
||||
api: {
|
||||
url: 123, // incorrect type
|
||||
},
|
||||
};
|
||||
|
||||
expect(validateJson(json)).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadSpecs', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(fs.promises, 'readdir').mockResolvedValue(['test.json']);
|
||||
jest.spyOn(fs.promises, 'readFile').mockResolvedValue(
|
||||
JSON.stringify({
|
||||
name_for_human: 'Test',
|
||||
name_for_model: 'Test',
|
||||
description_for_human: 'Test',
|
||||
description_for_model: 'Test',
|
||||
api: {
|
||||
url: 'http://test.com',
|
||||
},
|
||||
}),
|
||||
);
|
||||
createOpenAPIPlugin.mockResolvedValue({});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should return plugins', async () => {
|
||||
const plugins = await loadSpecs({ llm: true, verbose: false });
|
||||
|
||||
expect(plugins).toHaveLength(1);
|
||||
expect(createOpenAPIPlugin).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should return constructorMap if map is true', async () => {
|
||||
const plugins = await loadSpecs({ llm: {}, map: true, verbose: false });
|
||||
|
||||
expect(plugins).toHaveProperty('Test');
|
||||
expect(createOpenAPIPlugin).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
4
api/cache/banViolation.js
vendored
4
api/cache/banViolation.js
vendored
@@ -1,8 +1,8 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { isEnabled, math, removePorts } = require('~/server/utils');
|
||||
const { deleteAllUserSessions } = require('~/models');
|
||||
const getLogStores = require('./getLogStores');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {};
|
||||
const interval = math(BAN_INTERVAL, 20);
|
||||
@@ -32,7 +32,6 @@ const banViolation = async (req, res, errorMessage) => {
|
||||
if (!isEnabled(BAN_VIOLATIONS)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!errorMessage) {
|
||||
return;
|
||||
}
|
||||
@@ -51,7 +50,6 @@ const banViolation = async (req, res, errorMessage) => {
|
||||
|
||||
const banLogs = getLogStores(ViolationTypes.BAN);
|
||||
const duration = errorMessage.duration || banLogs.opts.ttl;
|
||||
|
||||
if (duration <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
60
api/cache/banViolation.spec.js
vendored
60
api/cache/banViolation.spec.js
vendored
@@ -1,48 +1,28 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const banViolation = require('./banViolation');
|
||||
|
||||
jest.mock('keyv');
|
||||
jest.mock('../models/Session');
|
||||
// Mocking the getLogStores function
|
||||
jest.mock('./getLogStores', () => {
|
||||
return jest.fn().mockImplementation(() => {
|
||||
const EventEmitter = require('events');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const math = require('../server/utils/math');
|
||||
const mockGet = jest.fn();
|
||||
const mockSet = jest.fn();
|
||||
class KeyvMongo extends EventEmitter {
|
||||
constructor(url = 'mongodb://127.0.0.1:27017', options) {
|
||||
super();
|
||||
this.ttlSupport = false;
|
||||
url = url ?? {};
|
||||
if (typeof url === 'string') {
|
||||
url = { url };
|
||||
}
|
||||
if (url.uri) {
|
||||
url = { url: url.uri, ...url };
|
||||
}
|
||||
this.opts = {
|
||||
url,
|
||||
collection: 'keyv',
|
||||
...url,
|
||||
...options,
|
||||
};
|
||||
}
|
||||
|
||||
get = mockGet;
|
||||
set = mockSet;
|
||||
}
|
||||
|
||||
return new KeyvMongo('', {
|
||||
namespace: CacheKeys.BANS,
|
||||
ttl: math(process.env.BAN_DURATION, 7200000),
|
||||
});
|
||||
});
|
||||
});
|
||||
// Mock deleteAllUserSessions since we're testing ban logic, not session deletion
|
||||
jest.mock('~/models', () => ({
|
||||
...jest.requireActual('~/models'),
|
||||
deleteAllUserSessions: jest.fn().mockResolvedValue(true),
|
||||
}));
|
||||
|
||||
describe('banViolation', () => {
|
||||
let mongoServer;
|
||||
let req, res, errorMessage;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
req = {
|
||||
ip: '127.0.0.1',
|
||||
@@ -55,7 +35,7 @@ describe('banViolation', () => {
|
||||
};
|
||||
errorMessage = {
|
||||
type: 'someViolation',
|
||||
user_id: '12345',
|
||||
user_id: new mongoose.Types.ObjectId().toString(), // Use valid ObjectId
|
||||
prev_count: 0,
|
||||
violation_count: 0,
|
||||
};
|
||||
|
||||
9
api/cache/clearPendingReq.js
vendored
9
api/cache/clearPendingReq.js
vendored
@@ -1,7 +1,8 @@
|
||||
const { Time, CacheKeys } = require('librechat-data-provider');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const getLogStores = require('./getLogStores');
|
||||
const { isEnabled } = require('../server/utils');
|
||||
|
||||
const { USE_REDIS, LIMIT_CONCURRENT_MESSAGES } = process.env ?? {};
|
||||
const ttl = 1000 * 60 * 1;
|
||||
|
||||
/**
|
||||
* Clear or decrement pending requests from the cache.
|
||||
@@ -28,7 +29,7 @@ const clearPendingReq = async ({ userId, cache: _cache }) => {
|
||||
return;
|
||||
}
|
||||
|
||||
const namespace = 'pending_req';
|
||||
const namespace = CacheKeys.PENDING_REQ;
|
||||
const cache = _cache ?? getLogStores(namespace);
|
||||
|
||||
if (!cache) {
|
||||
@@ -39,7 +40,7 @@ const clearPendingReq = async ({ userId, cache: _cache }) => {
|
||||
const currentReq = +((await cache.get(key)) ?? 0);
|
||||
|
||||
if (currentReq && currentReq >= 1) {
|
||||
await cache.set(key, currentReq - 1, ttl);
|
||||
await cache.set(key, currentReq - 1, Time.ONE_MINUTE);
|
||||
} else {
|
||||
await cache.delete(key);
|
||||
}
|
||||
|
||||
18
api/cache/getLogStores.js
vendored
18
api/cache/getLogStores.js
vendored
@@ -1,7 +1,7 @@
|
||||
const Keyv = require('keyv');
|
||||
const { Keyv } = require('keyv');
|
||||
const { CacheKeys, ViolationTypes, Time } = require('librechat-data-provider');
|
||||
const { logFile, violationFile } = require('./keyvFiles');
|
||||
const { math, isEnabled } = require('~/server/utils');
|
||||
const { isEnabled, math } = require('~/server/utils');
|
||||
const keyvRedis = require('./keyvRedis');
|
||||
const keyvMongo = require('./keyvMongo');
|
||||
|
||||
@@ -19,7 +19,7 @@ const createViolationInstance = (namespace) => {
|
||||
// Serve cache from memory so no need to clear it on startup/exit
|
||||
const pending_req = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: 'pending_req' });
|
||||
: new Keyv({ namespace: CacheKeys.PENDING_REQ });
|
||||
|
||||
const config = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis })
|
||||
@@ -29,6 +29,10 @@ const roles = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: CacheKeys.ROLES });
|
||||
|
||||
const mcpTools = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: CacheKeys.MCP_TOOLS });
|
||||
|
||||
const audioRuns = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.TEN_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.AUDIO_RUNS, ttl: Time.TEN_MINUTES });
|
||||
@@ -61,10 +65,15 @@ const abortKeys = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: CacheKeys.ABORT_KEYS, ttl: Time.TEN_MINUTES });
|
||||
|
||||
const openIdExchangedTokensCache = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.TEN_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.OPENID_EXCHANGED_TOKENS, ttl: Time.TEN_MINUTES });
|
||||
|
||||
const namespaces = {
|
||||
[CacheKeys.ROLES]: roles,
|
||||
[CacheKeys.MCP_TOOLS]: mcpTools,
|
||||
[CacheKeys.CONFIG_STORE]: config,
|
||||
pending_req,
|
||||
[CacheKeys.PENDING_REQ]: pending_req,
|
||||
[ViolationTypes.BAN]: new Keyv({ store: keyvMongo, namespace: CacheKeys.BANS, ttl: duration }),
|
||||
[CacheKeys.ENCODED_DOMAINS]: new Keyv({
|
||||
store: keyvMongo,
|
||||
@@ -98,6 +107,7 @@ const namespaces = {
|
||||
[CacheKeys.AUDIO_RUNS]: audioRuns,
|
||||
[CacheKeys.MESSAGES]: messages,
|
||||
[CacheKeys.FLOWS]: flows,
|
||||
[CacheKeys.OPENID_EXCHANGED_TOKENS]: openIdExchangedTokensCache,
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
92
api/cache/ioredisClient.js
vendored
Normal file
92
api/cache/ioredisClient.js
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
const fs = require('fs');
|
||||
const Redis = require('ioredis');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
const { REDIS_URI, USE_REDIS, USE_REDIS_CLUSTER, REDIS_CA, REDIS_MAX_LISTENERS } = process.env;
|
||||
|
||||
/** @type {import('ioredis').Redis | import('ioredis').Cluster} */
|
||||
let ioredisClient;
|
||||
const redis_max_listeners = Number(REDIS_MAX_LISTENERS) || 40;
|
||||
|
||||
function mapURI(uri) {
|
||||
const regex =
|
||||
/^(?:(?<scheme>\w+):\/\/)?(?:(?<user>[^:@]+)(?::(?<password>[^@]+))?@)?(?<host>[\w.-]+)(?::(?<port>\d{1,5}))?$/;
|
||||
const match = uri.match(regex);
|
||||
|
||||
if (match) {
|
||||
const { scheme, user, password, host, port } = match.groups;
|
||||
|
||||
return {
|
||||
scheme: scheme || 'none',
|
||||
user: user || null,
|
||||
password: password || null,
|
||||
host: host || null,
|
||||
port: port || null,
|
||||
};
|
||||
} else {
|
||||
const parts = uri.split(':');
|
||||
if (parts.length === 2) {
|
||||
return {
|
||||
scheme: 'none',
|
||||
user: null,
|
||||
password: null,
|
||||
host: parts[0],
|
||||
port: parts[1],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
scheme: 'none',
|
||||
user: null,
|
||||
password: null,
|
||||
host: uri,
|
||||
port: null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (REDIS_URI && isEnabled(USE_REDIS)) {
|
||||
let redisOptions = null;
|
||||
|
||||
if (REDIS_CA) {
|
||||
const ca = fs.readFileSync(REDIS_CA);
|
||||
redisOptions = { tls: { ca } };
|
||||
}
|
||||
|
||||
if (isEnabled(USE_REDIS_CLUSTER)) {
|
||||
const hosts = REDIS_URI.split(',').map((item) => {
|
||||
var value = mapURI(item);
|
||||
|
||||
return {
|
||||
host: value.host,
|
||||
port: value.port,
|
||||
};
|
||||
});
|
||||
ioredisClient = new Redis.Cluster(hosts, { redisOptions });
|
||||
} else {
|
||||
ioredisClient = new Redis(REDIS_URI, redisOptions);
|
||||
}
|
||||
|
||||
ioredisClient.on('ready', () => {
|
||||
logger.info('IoRedis connection ready');
|
||||
});
|
||||
ioredisClient.on('reconnecting', () => {
|
||||
logger.info('IoRedis connection reconnecting');
|
||||
});
|
||||
ioredisClient.on('end', () => {
|
||||
logger.info('IoRedis connection ended');
|
||||
});
|
||||
ioredisClient.on('close', () => {
|
||||
logger.info('IoRedis connection closed');
|
||||
});
|
||||
ioredisClient.on('error', (err) => logger.error('IoRedis connection error:', err));
|
||||
ioredisClient.setMaxListeners(redis_max_listeners);
|
||||
logger.info(
|
||||
'[Optional] IoRedis initialized for rate limiters. If you have issues, disable Redis or restart the server.',
|
||||
);
|
||||
} else {
|
||||
logger.info('[Optional] IoRedis not initialized for rate limiters.');
|
||||
}
|
||||
|
||||
module.exports = ioredisClient;
|
||||
6
api/cache/keyvFiles.js
vendored
6
api/cache/keyvFiles.js
vendored
@@ -1,11 +1,9 @@
|
||||
const { KeyvFile } = require('keyv-file');
|
||||
|
||||
const logFile = new KeyvFile({ filename: './data/logs.json' });
|
||||
const pendingReqFile = new KeyvFile({ filename: './data/pendingReqCache.json' });
|
||||
const violationFile = new KeyvFile({ filename: './data/violations.json' });
|
||||
const logFile = new KeyvFile({ filename: './data/logs.json' }).setMaxListeners(20);
|
||||
const violationFile = new KeyvFile({ filename: './data/violations.json' }).setMaxListeners(20);
|
||||
|
||||
module.exports = {
|
||||
logFile,
|
||||
pendingReqFile,
|
||||
violationFile,
|
||||
};
|
||||
|
||||
269
api/cache/keyvMongo.js
vendored
269
api/cache/keyvMongo.js
vendored
@@ -1,9 +1,272 @@
|
||||
const KeyvMongo = require('@keyv/mongo');
|
||||
// api/cache/keyvMongo.js
|
||||
const mongoose = require('mongoose');
|
||||
const EventEmitter = require('events');
|
||||
const { GridFSBucket } = require('mongodb');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { MONGO_URI } = process.env ?? {};
|
||||
const storeMap = new Map();
|
||||
|
||||
class KeyvMongoCustom extends EventEmitter {
|
||||
constructor(url, options = {}) {
|
||||
super();
|
||||
|
||||
url = url || {};
|
||||
if (typeof url === 'string') {
|
||||
url = { url };
|
||||
}
|
||||
if (url.uri) {
|
||||
url = { url: url.uri, ...url };
|
||||
}
|
||||
|
||||
this.opts = {
|
||||
url: 'mongodb://127.0.0.1:27017',
|
||||
collection: 'keyv',
|
||||
...url,
|
||||
...options,
|
||||
};
|
||||
|
||||
this.ttlSupport = false;
|
||||
|
||||
// Filter valid options
|
||||
const keyvMongoKeys = new Set([
|
||||
'url',
|
||||
'collection',
|
||||
'namespace',
|
||||
'serialize',
|
||||
'deserialize',
|
||||
'uri',
|
||||
'useGridFS',
|
||||
'dialect',
|
||||
]);
|
||||
this.opts = Object.fromEntries(Object.entries(this.opts).filter(([k]) => keyvMongoKeys.has(k)));
|
||||
}
|
||||
|
||||
// Helper to access the store WITHOUT storing a promise on the instance
|
||||
_getClient() {
|
||||
const storeKey = `${this.opts.collection}:${this.opts.useGridFS ? 'gridfs' : 'collection'}`;
|
||||
|
||||
// If we already have the store initialized, return it directly
|
||||
if (storeMap.has(storeKey)) {
|
||||
return Promise.resolve(storeMap.get(storeKey));
|
||||
}
|
||||
|
||||
// Check mongoose connection state
|
||||
if (mongoose.connection.readyState !== 1) {
|
||||
return Promise.reject(
|
||||
new Error('Mongoose connection not ready. Ensure connectDb() is called first.'),
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
const db = mongoose.connection.db;
|
||||
let client;
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
const bucket = new GridFSBucket(db, {
|
||||
readPreference: this.opts.readPreference,
|
||||
bucketName: this.opts.collection,
|
||||
});
|
||||
const store = db.collection(`${this.opts.collection}.files`);
|
||||
client = { bucket, store, db };
|
||||
} else {
|
||||
const collection = this.opts.collection || 'keyv';
|
||||
const store = db.collection(collection);
|
||||
client = { store, db };
|
||||
}
|
||||
|
||||
storeMap.set(storeKey, client);
|
||||
return Promise.resolve(client);
|
||||
} catch (error) {
|
||||
this.emit('error', error);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
|
||||
async get(key) {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
await client.store.updateOne(
|
||||
{
|
||||
filename: key,
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
'metadata.lastAccessed': new Date(),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const stream = client.bucket.openDownloadStreamByName(key);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const resp = [];
|
||||
stream.on('error', () => {
|
||||
resolve(undefined);
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
const data = Buffer.concat(resp).toString('utf8');
|
||||
resolve(data);
|
||||
});
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
resp.push(chunk);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const document = await client.store.findOne({ key: { $eq: key } });
|
||||
|
||||
if (!document) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return document.value;
|
||||
}
|
||||
|
||||
async getMany(keys) {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
const promises = [];
|
||||
for (const key of keys) {
|
||||
promises.push(this.get(key));
|
||||
}
|
||||
|
||||
const values = await Promise.allSettled(promises);
|
||||
const data = [];
|
||||
for (const value of values) {
|
||||
data.push(value.value);
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
const values = await client.store
|
||||
.find({ key: { $in: keys } })
|
||||
.project({ _id: 0, value: 1, key: 1 })
|
||||
.toArray();
|
||||
|
||||
const results = [...keys];
|
||||
let i = 0;
|
||||
for (const key of keys) {
|
||||
const rowIndex = values.findIndex((row) => row.key === key);
|
||||
results[i] = rowIndex > -1 ? values[rowIndex].value : undefined;
|
||||
i++;
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async set(key, value, ttl) {
|
||||
const client = await this._getClient();
|
||||
const expiresAt = typeof ttl === 'number' ? new Date(Date.now() + ttl) : null;
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
const stream = client.bucket.openUploadStream(key, {
|
||||
metadata: {
|
||||
expiresAt,
|
||||
lastAccessed: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
stream.on('finish', () => {
|
||||
resolve(stream);
|
||||
});
|
||||
stream.end(value);
|
||||
});
|
||||
}
|
||||
|
||||
await client.store.updateOne(
|
||||
{ key: { $eq: key } },
|
||||
{ $set: { key, value, expiresAt } },
|
||||
{ upsert: true },
|
||||
);
|
||||
}
|
||||
|
||||
async delete(key) {
|
||||
if (typeof key !== 'string') {
|
||||
return false;
|
||||
}
|
||||
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
try {
|
||||
const bucket = new GridFSBucket(client.db, {
|
||||
bucketName: this.opts.collection,
|
||||
});
|
||||
const files = await bucket.find({ filename: key }).toArray();
|
||||
await client.bucket.delete(files[0]._id);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const object = await client.store.deleteOne({ key: { $eq: key } });
|
||||
return object.deletedCount > 0;
|
||||
}
|
||||
|
||||
async deleteMany(keys) {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
const bucket = new GridFSBucket(client.db, {
|
||||
bucketName: this.opts.collection,
|
||||
});
|
||||
const files = await bucket.find({ filename: { $in: keys } }).toArray();
|
||||
if (files.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
await Promise.all(files.map(async (file) => client.bucket.delete(file._id)));
|
||||
return true;
|
||||
}
|
||||
|
||||
const object = await client.store.deleteMany({ key: { $in: keys } });
|
||||
return object.deletedCount > 0;
|
||||
}
|
||||
|
||||
async clear() {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
try {
|
||||
await client.bucket.drop();
|
||||
} catch (error) {
|
||||
// Throw error if not "namespace not found" error
|
||||
if (!(error.code === 26)) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await client.store.deleteMany({
|
||||
key: { $regex: this.namespace ? `^${this.namespace}:*` : '' },
|
||||
});
|
||||
}
|
||||
|
||||
async has(key) {
|
||||
const client = await this._getClient();
|
||||
const filter = { [this.opts.useGridFS ? 'filename' : 'key']: { $eq: key } };
|
||||
const document = await client.store.countDocuments(filter, { limit: 1 });
|
||||
return document !== 0;
|
||||
}
|
||||
|
||||
// No-op disconnect
|
||||
async disconnect() {
|
||||
// This is a no-op since we don't want to close the shared mongoose connection
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
const keyvMongo = new KeyvMongoCustom({
|
||||
collection: 'logs',
|
||||
});
|
||||
|
||||
const keyvMongo = new KeyvMongo(MONGO_URI, { collection: 'logs' });
|
||||
keyvMongo.on('error', (err) => logger.error('KeyvMongo connection error:', err));
|
||||
|
||||
module.exports = keyvMongo;
|
||||
|
||||
25
api/cache/keyvRedis.js
vendored
25
api/cache/keyvRedis.js
vendored
@@ -1,6 +1,6 @@
|
||||
const fs = require('fs');
|
||||
const ioredis = require('ioredis');
|
||||
const KeyvRedis = require('@keyv/redis');
|
||||
const KeyvRedis = require('@keyv/redis').default;
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
@@ -50,6 +50,7 @@ function mapURI(uri) {
|
||||
|
||||
if (REDIS_URI && isEnabled(USE_REDIS)) {
|
||||
let redisOptions = null;
|
||||
/** @type {import('@keyv/redis').KeyvRedisOptions} */
|
||||
let keyvOpts = {
|
||||
useRedisSets: false,
|
||||
keyPrefix: redis_prefix,
|
||||
@@ -74,6 +75,28 @@ if (REDIS_URI && isEnabled(USE_REDIS)) {
|
||||
} else {
|
||||
keyvRedis = new KeyvRedis(REDIS_URI, keyvOpts);
|
||||
}
|
||||
|
||||
const pingInterval = setInterval(
|
||||
() => {
|
||||
logger.debug('KeyvRedis ping');
|
||||
keyvRedis.client.ping().catch((err) => logger.error('Redis keep-alive ping failed:', err));
|
||||
},
|
||||
5 * 60 * 1000,
|
||||
);
|
||||
|
||||
keyvRedis.on('ready', () => {
|
||||
logger.info('KeyvRedis connection ready');
|
||||
});
|
||||
keyvRedis.on('reconnecting', () => {
|
||||
logger.info('KeyvRedis connection reconnecting');
|
||||
});
|
||||
keyvRedis.on('end', () => {
|
||||
logger.info('KeyvRedis connection ended');
|
||||
});
|
||||
keyvRedis.on('close', () => {
|
||||
clearInterval(pingInterval);
|
||||
logger.info('KeyvRedis connection closed');
|
||||
});
|
||||
keyvRedis.on('error', (err) => logger.error('KeyvRedis connection error:', err));
|
||||
keyvRedis.setMaxListeners(redis_max_listeners);
|
||||
logger.info(
|
||||
|
||||
4
api/cache/redis.js
vendored
4
api/cache/redis.js
vendored
@@ -1,4 +0,0 @@
|
||||
const Redis = require('ioredis');
|
||||
const { REDIS_URI } = process.env ?? {};
|
||||
const redis = new Redis.Cluster(REDIS_URI);
|
||||
module.exports = redis;
|
||||
@@ -1,93 +1,42 @@
|
||||
const axios = require('axios');
|
||||
const { EventSource } = require('eventsource');
|
||||
const { Time, CacheKeys } = require('librechat-data-provider');
|
||||
const { Time } = require('librechat-data-provider');
|
||||
const { MCPManager, FlowStateManager } = require('@librechat/api');
|
||||
const logger = require('./winston');
|
||||
|
||||
global.EventSource = EventSource;
|
||||
|
||||
/** @type {MCPManager} */
|
||||
let mcpManager = null;
|
||||
let flowManager = null;
|
||||
|
||||
/**
|
||||
* @returns {Promise<MCPManager>}
|
||||
* @param {string} [userId] - Optional user ID, to avoid disconnecting the current user.
|
||||
* @returns {MCPManager}
|
||||
*/
|
||||
async function getMCPManager() {
|
||||
function getMCPManager(userId) {
|
||||
if (!mcpManager) {
|
||||
const { MCPManager } = await import('librechat-mcp');
|
||||
mcpManager = MCPManager.getInstance(logger);
|
||||
mcpManager = MCPManager.getInstance();
|
||||
} else {
|
||||
mcpManager.checkIdleConnections(userId);
|
||||
}
|
||||
return mcpManager;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {(key: string) => Keyv} getLogStores
|
||||
* @returns {Promise<FlowStateManager>}
|
||||
* @param {Keyv} flowsCache
|
||||
* @returns {FlowStateManager}
|
||||
*/
|
||||
async function getFlowStateManager(getLogStores) {
|
||||
function getFlowStateManager(flowsCache) {
|
||||
if (!flowManager) {
|
||||
const { FlowStateManager } = await import('librechat-mcp');
|
||||
flowManager = new FlowStateManager(getLogStores(CacheKeys.FLOWS), {
|
||||
flowManager = new FlowStateManager(flowsCache, {
|
||||
ttl: Time.ONE_MINUTE * 3,
|
||||
logger,
|
||||
});
|
||||
}
|
||||
return flowManager;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends message data in Server Sent Events format.
|
||||
* @param {ServerResponse} res - The server response.
|
||||
* @param {{ data: string | Record<string, unknown>, event?: string }} event - The message event.
|
||||
* @param {string} event.event - The type of event.
|
||||
* @param {string} event.data - The message to be sent.
|
||||
*/
|
||||
const sendEvent = (res, event) => {
|
||||
if (typeof event.data === 'string' && event.data.length === 0) {
|
||||
return;
|
||||
}
|
||||
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates and configures an Axios instance with optional proxy settings.
|
||||
*
|
||||
* @typedef {import('axios').AxiosInstance} AxiosInstance
|
||||
* @typedef {import('axios').AxiosProxyConfig} AxiosProxyConfig
|
||||
*
|
||||
* @returns {AxiosInstance} A configured Axios instance
|
||||
* @throws {Error} If there's an issue creating the Axios instance or parsing the proxy URL
|
||||
*/
|
||||
function createAxiosInstance() {
|
||||
const instance = axios.create();
|
||||
|
||||
if (process.env.proxy) {
|
||||
try {
|
||||
const url = new URL(process.env.proxy);
|
||||
|
||||
/** @type {AxiosProxyConfig} */
|
||||
const proxyConfig = {
|
||||
host: url.hostname.replace(/^\[|\]$/g, ''),
|
||||
protocol: url.protocol.replace(':', ''),
|
||||
};
|
||||
|
||||
if (url.port) {
|
||||
proxyConfig.port = parseInt(url.port, 10);
|
||||
}
|
||||
|
||||
instance.defaults.proxy = proxyConfig;
|
||||
} catch (error) {
|
||||
console.error('Error parsing proxy URL:', error);
|
||||
throw new Error(`Invalid proxy URL: ${process.env.proxy}`);
|
||||
}
|
||||
}
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
logger,
|
||||
sendEvent,
|
||||
getMCPManager,
|
||||
createAxiosInstance,
|
||||
getFlowStateManager,
|
||||
};
|
||||
|
||||
@@ -39,7 +39,10 @@ async function connectDb() {
|
||||
});
|
||||
}
|
||||
cached.conn = await cached.promise;
|
||||
|
||||
return cached.conn;
|
||||
}
|
||||
|
||||
module.exports = connectDb;
|
||||
module.exports = {
|
||||
connectDb,
|
||||
};
|
||||
8
api/db/index.js
Normal file
8
api/db/index.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { createModels } = require('@librechat/data-schemas');
|
||||
const { connectDb } = require('./connect');
|
||||
const indexSync = require('./indexSync');
|
||||
|
||||
createModels(mongoose);
|
||||
|
||||
module.exports = { connectDb, indexSync };
|
||||
@@ -1,8 +1,11 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MeiliSearch } = require('meilisearch');
|
||||
const { Conversation } = require('~/models/Conversation');
|
||||
const { Message } = require('~/models/Message');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Conversation = mongoose.models.Conversation;
|
||||
const Message = mongoose.models.Message;
|
||||
|
||||
const searchEnabled = isEnabled(process.env.SEARCH);
|
||||
const indexingDisabled = isEnabled(process.env.MEILI_NO_SYNC);
|
||||
@@ -29,7 +32,6 @@ async function indexSync() {
|
||||
if (!searchEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const client = MeiliSearchClient.getInstance();
|
||||
|
||||
5
api/db/models.js
Normal file
5
api/db/models.js
Normal file
@@ -0,0 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { createModels } = require('@librechat/data-schemas');
|
||||
const models = createModels(mongoose);
|
||||
|
||||
module.exports = { ...models };
|
||||
@@ -5,12 +5,14 @@ module.exports = {
|
||||
coverageDirectory: 'coverage',
|
||||
setupFiles: [
|
||||
'./test/jestSetup.js',
|
||||
'./test/__mocks__/KeyvMongo.js',
|
||||
'./test/__mocks__/logger.js',
|
||||
'./test/__mocks__/fetchEventSource.js',
|
||||
],
|
||||
moduleNameMapper: {
|
||||
'~/(.*)': '<rootDir>/$1',
|
||||
'~/data/auth.json': '<rootDir>/__mocks__/auth.mock.json',
|
||||
'^openid-client/passport$': '<rootDir>/test/__mocks__/openid-client-passport.js', // Mock for the passport strategy part
|
||||
'^openid-client$': '<rootDir>/test/__mocks__/openid-client.js',
|
||||
},
|
||||
transformIgnorePatterns: ['/node_modules/(?!(openid-client|oauth4webapi|jose)/).*/'],
|
||||
};
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
const connectDb = require('./connectDb');
|
||||
const indexSync = require('./indexSync');
|
||||
|
||||
module.exports = { connectDb, indexSync };
|
||||
@@ -1,59 +0,0 @@
|
||||
const mergeSort = require('./mergeSort');
|
||||
const { cleanUpPrimaryKeyValue } = require('./misc');
|
||||
|
||||
function reduceMessages(hits) {
|
||||
const counts = {};
|
||||
|
||||
for (const hit of hits) {
|
||||
if (!counts[hit.conversationId]) {
|
||||
counts[hit.conversationId] = 1;
|
||||
} else {
|
||||
counts[hit.conversationId]++;
|
||||
}
|
||||
}
|
||||
|
||||
const result = [];
|
||||
|
||||
for (const [conversationId, count] of Object.entries(counts)) {
|
||||
result.push({
|
||||
conversationId,
|
||||
count,
|
||||
});
|
||||
}
|
||||
|
||||
return mergeSort(result, (a, b) => b.count - a.count);
|
||||
}
|
||||
|
||||
function reduceHits(hits, titles = []) {
|
||||
const counts = {};
|
||||
const titleMap = {};
|
||||
const convos = [...hits, ...titles];
|
||||
|
||||
for (const convo of convos) {
|
||||
const currentId = cleanUpPrimaryKeyValue(convo.conversationId);
|
||||
if (!counts[currentId]) {
|
||||
counts[currentId] = 1;
|
||||
} else {
|
||||
counts[currentId]++;
|
||||
}
|
||||
|
||||
if (convo.title) {
|
||||
// titleMap[currentId] = convo._formatted.title;
|
||||
titleMap[currentId] = convo.title;
|
||||
}
|
||||
}
|
||||
|
||||
const result = [];
|
||||
|
||||
for (const [conversationId, count] of Object.entries(counts)) {
|
||||
result.push({
|
||||
conversationId,
|
||||
count,
|
||||
title: titleMap[conversationId] ? titleMap[conversationId] : null,
|
||||
});
|
||||
}
|
||||
|
||||
return mergeSort(result, (a, b) => b.count - a.count);
|
||||
}
|
||||
|
||||
module.exports = { reduceMessages, reduceHits };
|
||||
@@ -1,7 +1,4 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { actionSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Action = mongoose.model('action', actionSchema);
|
||||
const { Action } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* Update an action with new data without overwriting existing properties,
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { SystemRoles } = require('librechat-data-provider');
|
||||
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
||||
const { CONFIG_STORE, STARTUP_CONFIG } = require('librechat-data-provider').CacheKeys;
|
||||
const crypto = require('node:crypto');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
|
||||
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_delimiter } =
|
||||
require('librechat-data-provider').Constants;
|
||||
const {
|
||||
getProjectByName,
|
||||
addAgentIdsToProject,
|
||||
removeAgentIdsFromProject,
|
||||
removeAgentFromAllProjects,
|
||||
} = require('./Project');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { agentSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Agent = mongoose.model('agent', agentSchema);
|
||||
const { getCachedTools } = require('~/server/services/Config');
|
||||
const { getActions } = require('./Action');
|
||||
const { Agent } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* Create an agent with the provided data.
|
||||
@@ -20,7 +21,19 @@ const Agent = mongoose.model('agent', agentSchema);
|
||||
* @throws {Error} If the agent creation fails.
|
||||
*/
|
||||
const createAgent = async (agentData) => {
|
||||
return (await Agent.create(agentData)).toObject();
|
||||
const { author: _author, ...versionData } = agentData;
|
||||
const timestamp = new Date();
|
||||
const initialAgentData = {
|
||||
...agentData,
|
||||
versions: [
|
||||
{
|
||||
...versionData,
|
||||
createdAt: timestamp,
|
||||
updatedAt: timestamp,
|
||||
},
|
||||
],
|
||||
};
|
||||
return (await Agent.create(initialAgentData)).toObject();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -39,9 +52,66 @@ const getAgent = async (searchParameter) => await Agent.findOne(searchParameter)
|
||||
* @param {Object} params
|
||||
* @param {ServerRequest} params.req
|
||||
* @param {string} params.agent_id
|
||||
* @param {string} params.endpoint
|
||||
* @param {import('@librechat/agents').ClientOptions} [params.model_parameters]
|
||||
* @returns {Promise<Agent|null>} The agent document as a plain object, or null if not found.
|
||||
*/
|
||||
const loadAgent = async ({ req, agent_id }) => {
|
||||
const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _m }) => {
|
||||
const { model, ...model_parameters } = _m;
|
||||
/** @type {Record<string, FunctionTool>} */
|
||||
const availableTools = await getCachedTools({ includeGlobal: true });
|
||||
/** @type {TEphemeralAgent | null} */
|
||||
const ephemeralAgent = req.body.ephemeralAgent;
|
||||
const mcpServers = new Set(ephemeralAgent?.mcp);
|
||||
/** @type {string[]} */
|
||||
const tools = [];
|
||||
if (ephemeralAgent?.execute_code === true) {
|
||||
tools.push(Tools.execute_code);
|
||||
}
|
||||
if (ephemeralAgent?.web_search === true) {
|
||||
tools.push(Tools.web_search);
|
||||
}
|
||||
|
||||
if (mcpServers.size > 0) {
|
||||
for (const toolName of Object.keys(availableTools)) {
|
||||
if (!toolName.includes(mcp_delimiter)) {
|
||||
continue;
|
||||
}
|
||||
const mcpServer = toolName.split(mcp_delimiter)?.[1];
|
||||
if (mcpServer && mcpServers.has(mcpServer)) {
|
||||
tools.push(toolName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const instructions = req.body.promptPrefix;
|
||||
return {
|
||||
id: agent_id,
|
||||
instructions,
|
||||
provider: endpoint,
|
||||
model_parameters,
|
||||
model,
|
||||
tools,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Load an agent based on the provided ID
|
||||
*
|
||||
* @param {Object} params
|
||||
* @param {ServerRequest} params.req
|
||||
* @param {string} params.agent_id
|
||||
* @param {string} params.endpoint
|
||||
* @param {import('@librechat/agents').ClientOptions} [params.model_parameters]
|
||||
* @returns {Promise<Agent|null>} The agent document as a plain object, or null if not found.
|
||||
*/
|
||||
const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
||||
if (!agent_id) {
|
||||
return null;
|
||||
}
|
||||
if (agent_id === EPHEMERAL_AGENT_ID) {
|
||||
return await loadEphemeralAgent({ req, agent_id, endpoint, model_parameters });
|
||||
}
|
||||
const agent = await getAgent({
|
||||
id: agent_id,
|
||||
});
|
||||
@@ -50,43 +120,216 @@ const loadAgent = async ({ req, agent_id }) => {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (agent.author.toString() === req.user.id) {
|
||||
return agent;
|
||||
}
|
||||
agent.version = agent.versions ? agent.versions.length : 0;
|
||||
return agent;
|
||||
};
|
||||
|
||||
if (!agent.projectIds) {
|
||||
/**
|
||||
* Check if a version already exists in the versions array, excluding timestamp and author fields
|
||||
* @param {Object} updateData - The update data to compare
|
||||
* @param {Object} currentData - The current agent data
|
||||
* @param {Array} versions - The existing versions array
|
||||
* @param {string} [actionsHash] - Hash of current action metadata
|
||||
* @returns {Object|null} - The matching version if found, null otherwise
|
||||
*/
|
||||
const isDuplicateVersion = (updateData, currentData, versions, actionsHash = null) => {
|
||||
if (!versions || versions.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cache = getLogStores(CONFIG_STORE);
|
||||
/** @type {TStartupConfig} */
|
||||
const cachedStartupConfig = await cache.get(STARTUP_CONFIG);
|
||||
let { instanceProjectId } = cachedStartupConfig ?? {};
|
||||
if (!instanceProjectId) {
|
||||
instanceProjectId = (await getProjectByName(GLOBAL_PROJECT_NAME, '_id'))._id.toString();
|
||||
const excludeFields = [
|
||||
'_id',
|
||||
'id',
|
||||
'createdAt',
|
||||
'updatedAt',
|
||||
'author',
|
||||
'updatedBy',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'__v',
|
||||
'versions',
|
||||
'actionsHash', // Exclude actionsHash from direct comparison
|
||||
];
|
||||
|
||||
const { $push: _$push, $pull: _$pull, $addToSet: _$addToSet, ...directUpdates } = updateData;
|
||||
|
||||
if (Object.keys(directUpdates).length === 0 && !actionsHash) {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (const projectObjectId of agent.projectIds) {
|
||||
const projectId = projectObjectId.toString();
|
||||
if (projectId === instanceProjectId) {
|
||||
return agent;
|
||||
const wouldBeVersion = { ...currentData, ...directUpdates };
|
||||
const lastVersion = versions[versions.length - 1];
|
||||
|
||||
if (actionsHash && lastVersion.actionsHash !== actionsHash) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const allFields = new Set([...Object.keys(wouldBeVersion), ...Object.keys(lastVersion)]);
|
||||
|
||||
const importantFields = Array.from(allFields).filter((field) => !excludeFields.includes(field));
|
||||
|
||||
let isMatch = true;
|
||||
for (const field of importantFields) {
|
||||
if (!wouldBeVersion[field] && !lastVersion[field]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Array.isArray(wouldBeVersion[field]) && Array.isArray(lastVersion[field])) {
|
||||
if (wouldBeVersion[field].length !== lastVersion[field].length) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
|
||||
// Special handling for projectIds (MongoDB ObjectIds)
|
||||
if (field === 'projectIds') {
|
||||
const wouldBeIds = wouldBeVersion[field].map((id) => id.toString()).sort();
|
||||
const versionIds = lastVersion[field].map((id) => id.toString()).sort();
|
||||
|
||||
if (!wouldBeIds.every((id, i) => id === versionIds[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Handle arrays of objects like tool_kwargs
|
||||
else if (typeof wouldBeVersion[field][0] === 'object' && wouldBeVersion[field][0] !== null) {
|
||||
const sortedWouldBe = [...wouldBeVersion[field]].map((item) => JSON.stringify(item)).sort();
|
||||
const sortedVersion = [...lastVersion[field]].map((item) => JSON.stringify(item)).sort();
|
||||
|
||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
const sortedWouldBe = [...wouldBeVersion[field]].sort();
|
||||
const sortedVersion = [...lastVersion[field]].sort();
|
||||
|
||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (field === 'model_parameters') {
|
||||
const wouldBeParams = wouldBeVersion[field] || {};
|
||||
const lastVersionParams = lastVersion[field] || {};
|
||||
if (JSON.stringify(wouldBeParams) !== JSON.stringify(lastVersionParams)) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
} else if (wouldBeVersion[field] !== lastVersion[field]) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return isMatch ? lastVersion : null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Update an agent with new data without overwriting existing
|
||||
* properties, or create a new agent if it doesn't exist.
|
||||
* When an agent is updated, a copy of the current state will be saved to the versions array.
|
||||
*
|
||||
* @param {Object} searchParameter - The search parameters to find the agent to update.
|
||||
* @param {string} searchParameter.id - The ID of the agent to update.
|
||||
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @param {Object} [options] - Optional configuration object.
|
||||
* @param {string} [options.updatingUserId] - The ID of the user performing the update (used for tracking non-author updates).
|
||||
* @param {boolean} [options.forceVersion] - Force creation of a new version even if no fields changed.
|
||||
* @param {boolean} [options.skipVersioning] - Skip version creation entirely (useful for isolated operations like sharing).
|
||||
* @returns {Promise<Agent>} The updated or newly created agent document as a plain object.
|
||||
* @throws {Error} If the update would create a duplicate version
|
||||
*/
|
||||
const updateAgent = async (searchParameter, updateData) => {
|
||||
const options = { new: true, upsert: false };
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, options).lean();
|
||||
const updateAgent = async (searchParameter, updateData, options = {}) => {
|
||||
const { updatingUserId = null, forceVersion = false, skipVersioning = false } = options;
|
||||
const mongoOptions = { new: true, upsert: false };
|
||||
|
||||
const currentAgent = await Agent.findOne(searchParameter);
|
||||
if (currentAgent) {
|
||||
const {
|
||||
__v,
|
||||
_id,
|
||||
id: __id,
|
||||
versions,
|
||||
author: _author,
|
||||
...versionData
|
||||
} = currentAgent.toObject();
|
||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||
|
||||
let actionsHash = null;
|
||||
|
||||
// Generate actions hash if agent has actions
|
||||
if (currentAgent.actions && currentAgent.actions.length > 0) {
|
||||
// Extract action IDs from the format "domain_action_id"
|
||||
const actionIds = currentAgent.actions
|
||||
.map((action) => {
|
||||
const parts = action.split(actionDelimiter);
|
||||
return parts[1]; // Get just the action ID part
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
if (actionIds.length > 0) {
|
||||
try {
|
||||
const actions = await getActions(
|
||||
{
|
||||
action_id: { $in: actionIds },
|
||||
},
|
||||
true,
|
||||
); // Include sensitive data for hash
|
||||
|
||||
actionsHash = await generateActionMetadataHash(currentAgent.actions, actions);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching actions for hash generation:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shouldCreateVersion =
|
||||
!skipVersioning &&
|
||||
(forceVersion || Object.keys(directUpdates).length > 0 || $push || $pull || $addToSet);
|
||||
|
||||
if (shouldCreateVersion) {
|
||||
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash);
|
||||
if (duplicateVersion && !forceVersion) {
|
||||
const error = new Error(
|
||||
'Duplicate version: This would create a version identical to an existing one',
|
||||
);
|
||||
error.statusCode = 409;
|
||||
error.details = {
|
||||
duplicateVersion,
|
||||
versionIndex: versions.findIndex(
|
||||
(v) => JSON.stringify(duplicateVersion) === JSON.stringify(v),
|
||||
),
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const versionEntry = {
|
||||
...versionData,
|
||||
...directUpdates,
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
// Include actions hash in version if available
|
||||
if (actionsHash) {
|
||||
versionEntry.actionsHash = actionsHash;
|
||||
}
|
||||
|
||||
// Always store updatedBy field to track who made the change
|
||||
if (updatingUserId) {
|
||||
versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId);
|
||||
}
|
||||
|
||||
if (shouldCreateVersion) {
|
||||
updateData.$push = {
|
||||
...($push || {}),
|
||||
versions: versionEntry,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, mongoOptions).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -98,11 +341,13 @@ const updateAgent = async (searchParameter, updateData) => {
|
||||
* @param {string} params.file_id
|
||||
* @returns {Promise<Agent>} The updated agent.
|
||||
*/
|
||||
const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
|
||||
const addAgentResourceFile = async ({ req, agent_id, tool_resource, file_id }) => {
|
||||
const searchParameter = { id: agent_id };
|
||||
|
||||
let agent = await getAgent(searchParameter);
|
||||
if (!agent) {
|
||||
throw new Error('Agent not found for adding resource file');
|
||||
}
|
||||
const fileIdsPath = `tool_resources.${tool_resource}.file_ids`;
|
||||
|
||||
await Agent.updateOne(
|
||||
{
|
||||
id: agent_id,
|
||||
@@ -115,9 +360,16 @@ const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
|
||||
},
|
||||
);
|
||||
|
||||
const updateData = { $addToSet: { [fileIdsPath]: file_id } };
|
||||
const updateData = {
|
||||
$addToSet: {
|
||||
tools: tool_resource,
|
||||
[fileIdsPath]: file_id,
|
||||
},
|
||||
};
|
||||
|
||||
const updatedAgent = await updateAgent(searchParameter, updateData);
|
||||
const updatedAgent = await updateAgent(searchParameter, updateData, {
|
||||
updatingUserId: req?.user?.id,
|
||||
});
|
||||
if (updatedAgent) {
|
||||
return updatedAgent;
|
||||
} else {
|
||||
@@ -126,16 +378,17 @@ const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Removes multiple resource files from an agent in a single update.
|
||||
* Removes multiple resource files from an agent using atomic operations.
|
||||
* @param {object} params
|
||||
* @param {string} params.agent_id
|
||||
* @param {Array<{tool_resource: string, file_id: string}>} params.files
|
||||
* @returns {Promise<Agent>} The updated agent.
|
||||
* @throws {Error} If the agent is not found or update fails.
|
||||
*/
|
||||
const removeAgentResourceFiles = async ({ agent_id, files }) => {
|
||||
const searchParameter = { id: agent_id };
|
||||
|
||||
// associate each tool resource with the respective file ids array
|
||||
// Group files to remove by resource
|
||||
const filesByResource = files.reduce((acc, { tool_resource, file_id }) => {
|
||||
if (!acc[tool_resource]) {
|
||||
acc[tool_resource] = [];
|
||||
@@ -144,42 +397,35 @@ const removeAgentResourceFiles = async ({ agent_id, files }) => {
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
// build the update aggregation pipeline wich removes file ids from tool resources array
|
||||
// and eventually deletes empty tool resources
|
||||
const updateData = [];
|
||||
Object.entries(filesByResource).forEach(([resource, fileIds]) => {
|
||||
const toolResourcePath = `tool_resources.${resource}`;
|
||||
const fileIdsPath = `${toolResourcePath}.file_ids`;
|
||||
|
||||
// file ids removal stage
|
||||
updateData.push({
|
||||
$set: {
|
||||
[fileIdsPath]: {
|
||||
$filter: {
|
||||
input: `$${fileIdsPath}`,
|
||||
cond: { $not: [{ $in: ['$$this', fileIds] }] },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// empty tool resource deletion stage
|
||||
updateData.push({
|
||||
$set: {
|
||||
[toolResourcePath]: {
|
||||
$cond: [{ $eq: [`$${fileIdsPath}`, []] }, '$$REMOVE', `$${toolResourcePath}`],
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
// return the updated agent or throw if no agent matches
|
||||
const updatedAgent = await updateAgent(searchParameter, updateData);
|
||||
if (updatedAgent) {
|
||||
return updatedAgent;
|
||||
} else {
|
||||
throw new Error('Agent not found for removing resource files');
|
||||
// Step 1: Atomically remove file IDs using $pull
|
||||
const pullOps = {};
|
||||
const resourcesToCheck = new Set();
|
||||
for (const [resource, fileIds] of Object.entries(filesByResource)) {
|
||||
const fileIdsPath = `tool_resources.${resource}.file_ids`;
|
||||
pullOps[fileIdsPath] = { $in: fileIds };
|
||||
resourcesToCheck.add(resource);
|
||||
}
|
||||
|
||||
const updatePullData = { $pull: pullOps };
|
||||
const agentAfterPull = await Agent.findOneAndUpdate(searchParameter, updatePullData, {
|
||||
new: true,
|
||||
}).lean();
|
||||
|
||||
if (!agentAfterPull) {
|
||||
// Agent might have been deleted concurrently, or never existed.
|
||||
// Check if it existed before trying to throw.
|
||||
const agentExists = await getAgent(searchParameter);
|
||||
if (!agentExists) {
|
||||
throw new Error('Agent not found for removing resource files');
|
||||
}
|
||||
// If it existed but findOneAndUpdate returned null, something else went wrong.
|
||||
throw new Error('Failed to update agent during file removal (pull step)');
|
||||
}
|
||||
|
||||
// Return the agent state directly after the $pull operation.
|
||||
// Skipping the $unset step for now to simplify and test core $pull atomicity.
|
||||
// Empty arrays might remain, but the removal itself should be correct.
|
||||
return agentAfterPull;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -198,8 +444,110 @@ const deleteAgent = async (searchParameter) => {
|
||||
return agent;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get agents by accessible IDs with optional cursor-based pagination.
|
||||
* @param {Object} params - The parameters for getting accessible agents.
|
||||
* @param {Array} [params.accessibleIds] - Array of agent ObjectIds the user has ACL access to.
|
||||
* @param {Object} [params.otherParams] - Additional query parameters (including author filter).
|
||||
* @param {number} [params.limit] - Number of agents to return (max 100). If not provided, returns all agents.
|
||||
* @param {string} [params.after] - Cursor for pagination - get agents after this cursor. // base64 encoded JSON string with updatedAt and _id.
|
||||
* @returns {Promise<Object>} A promise that resolves to an object containing the agents data and pagination info.
|
||||
*/
|
||||
const getListAgentsByAccess = async ({
|
||||
accessibleIds = [],
|
||||
otherParams = {},
|
||||
limit = null,
|
||||
after = null,
|
||||
}) => {
|
||||
const isPaginated = limit !== null && limit !== undefined;
|
||||
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
|
||||
|
||||
// Build base query combining ACL accessible agents with other filters
|
||||
const baseQuery = { ...otherParams };
|
||||
|
||||
if (accessibleIds.length > 0) {
|
||||
baseQuery._id = { $in: accessibleIds };
|
||||
}
|
||||
|
||||
// Add cursor condition
|
||||
if (after) {
|
||||
try {
|
||||
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
|
||||
const { updatedAt, _id } = cursor;
|
||||
|
||||
const cursorCondition = {
|
||||
$or: [
|
||||
{ updatedAt: { $lt: new Date(updatedAt) } },
|
||||
{ updatedAt: new Date(updatedAt), _id: { $gt: mongoose.Types.ObjectId(_id) } },
|
||||
],
|
||||
};
|
||||
|
||||
// Merge cursor condition with base query
|
||||
if (Object.keys(baseQuery).length > 0) {
|
||||
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
|
||||
// Remove the original conditions from baseQuery to avoid duplication
|
||||
Object.keys(baseQuery).forEach((key) => {
|
||||
if (key !== '$and') delete baseQuery[key];
|
||||
});
|
||||
} else {
|
||||
Object.assign(baseQuery, cursorCondition);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Invalid cursor:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
let query = Agent.find(baseQuery, {
|
||||
id: 1,
|
||||
_id: 1,
|
||||
name: 1,
|
||||
avatar: 1,
|
||||
author: 1,
|
||||
projectIds: 1,
|
||||
description: 1,
|
||||
updatedAt: 1,
|
||||
}).sort({ updatedAt: -1, _id: 1 });
|
||||
|
||||
// Only apply limit if pagination is requested
|
||||
if (isPaginated) {
|
||||
query = query.limit(normalizedLimit + 1);
|
||||
}
|
||||
|
||||
const agents = await query.lean();
|
||||
|
||||
const hasMore = isPaginated ? agents.length > normalizedLimit : false;
|
||||
const data = (isPaginated ? agents.slice(0, normalizedLimit) : agents).map((agent) => {
|
||||
if (agent.author) {
|
||||
agent.author = agent.author.toString();
|
||||
}
|
||||
return agent;
|
||||
});
|
||||
|
||||
// Generate next cursor only if paginated
|
||||
let nextCursor = null;
|
||||
if (isPaginated && hasMore && data.length > 0) {
|
||||
const lastAgent = agents[normalizedLimit - 1];
|
||||
nextCursor = Buffer.from(
|
||||
JSON.stringify({
|
||||
updatedAt: lastAgent.updatedAt.toISOString(),
|
||||
_id: lastAgent._id.toString(),
|
||||
}),
|
||||
).toString('base64');
|
||||
}
|
||||
|
||||
return {
|
||||
object: 'list',
|
||||
data,
|
||||
first_id: data.length > 0 ? data[0].id : null,
|
||||
last_id: data.length > 0 ? data[data.length - 1].id : null,
|
||||
has_more: hasMore,
|
||||
after: nextCursor,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Get all agents.
|
||||
* @deprecated Use getListAgentsByAccess for ACL-aware agent listing
|
||||
* @param {Object} searchParameter - The search parameters to find matching agents.
|
||||
* @param {string} searchParameter.author - The user ID of the agent's author.
|
||||
* @returns {Promise<Object>} A promise that resolves to an object containing the agents data and pagination info.
|
||||
@@ -215,16 +563,16 @@ const getListAgents = async (searchParameter) => {
|
||||
delete globalQuery.author;
|
||||
query = { $or: [globalQuery, query] };
|
||||
}
|
||||
|
||||
const agents = (
|
||||
await Agent.find(query, {
|
||||
id: 1,
|
||||
_id: 0,
|
||||
_id: 1,
|
||||
name: 1,
|
||||
avatar: 1,
|
||||
author: 1,
|
||||
projectIds: 1,
|
||||
description: 1,
|
||||
// @deprecated - isCollaborative replaced by ACL permissions
|
||||
isCollaborative: 1,
|
||||
}).lean()
|
||||
).map((agent) => {
|
||||
@@ -254,7 +602,7 @@ const getListAgents = async (searchParameter) => {
|
||||
* This function also updates the corresponding projects to include or exclude the agent ID.
|
||||
*
|
||||
* @param {Object} params - Parameters for updating the agent's projects.
|
||||
* @param {import('librechat-data-provider').TUser} params.user - Parameters for updating the agent's projects.
|
||||
* @param {MongoUser} params.user - Parameters for updating the agent's projects.
|
||||
* @param {string} params.agentId - The ID of the agent to update.
|
||||
* @param {string[]} [params.projectIds] - Array of project IDs to add to the agent.
|
||||
* @param {string[]} [params.removeProjectIds] - Array of project IDs to remove from the agent.
|
||||
@@ -287,7 +635,10 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
||||
delete updateQuery.author;
|
||||
}
|
||||
|
||||
const updatedAgent = await updateAgent(updateQuery, updateOps);
|
||||
const updatedAgent = await updateAgent(updateQuery, updateOps, {
|
||||
updatingUserId: user.id,
|
||||
skipVersioning: true,
|
||||
});
|
||||
if (updatedAgent) {
|
||||
return updatedAgent;
|
||||
}
|
||||
@@ -304,15 +655,108 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
||||
return await getAgent({ id: agentId });
|
||||
};
|
||||
|
||||
/**
|
||||
* Reverts an agent to a specific version in its version history.
|
||||
* @param {Object} searchParameter - The search parameters to find the agent to revert.
|
||||
* @param {string} searchParameter.id - The ID of the agent to revert.
|
||||
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
||||
* @param {number} versionIndex - The index of the version to revert to in the versions array.
|
||||
* @returns {Promise<MongoAgent>} The updated agent document after reverting.
|
||||
* @throws {Error} If the agent is not found or the specified version does not exist.
|
||||
*/
|
||||
const revertAgentVersion = async (searchParameter, versionIndex) => {
|
||||
const agent = await Agent.findOne(searchParameter);
|
||||
if (!agent) {
|
||||
throw new Error('Agent not found');
|
||||
}
|
||||
|
||||
if (!agent.versions || !agent.versions[versionIndex]) {
|
||||
throw new Error(`Version ${versionIndex} not found`);
|
||||
}
|
||||
|
||||
const revertToVersion = agent.versions[versionIndex];
|
||||
|
||||
const updateData = {
|
||||
...revertToVersion,
|
||||
};
|
||||
|
||||
delete updateData._id;
|
||||
delete updateData.id;
|
||||
delete updateData.versions;
|
||||
delete updateData.author;
|
||||
delete updateData.updatedBy;
|
||||
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, { new: true }).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates a hash of action metadata for version comparison
|
||||
* @param {string[]} actionIds - Array of action IDs in format "domain_action_id"
|
||||
* @param {Action[]} actions - Array of action documents
|
||||
* @returns {Promise<string>} - SHA256 hash of the action metadata
|
||||
*/
|
||||
const generateActionMetadataHash = async (actionIds, actions) => {
|
||||
if (!actionIds || actionIds.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create a map of action_id to metadata for quick lookup
|
||||
const actionMap = new Map();
|
||||
actions.forEach((action) => {
|
||||
actionMap.set(action.action_id, action.metadata);
|
||||
});
|
||||
|
||||
// Sort action IDs for consistent hashing
|
||||
const sortedActionIds = [...actionIds].sort();
|
||||
|
||||
// Build a deterministic string representation of all action metadata
|
||||
const metadataString = sortedActionIds
|
||||
.map((actionFullId) => {
|
||||
// Extract just the action_id part (after the delimiter)
|
||||
const parts = actionFullId.split(actionDelimiter);
|
||||
const actionId = parts[1];
|
||||
|
||||
const metadata = actionMap.get(actionId);
|
||||
if (!metadata) {
|
||||
return `${actionId}:null`;
|
||||
}
|
||||
|
||||
// Sort metadata keys for deterministic output
|
||||
const sortedKeys = Object.keys(metadata).sort();
|
||||
const metadataStr = sortedKeys
|
||||
.map((key) => `${key}:${JSON.stringify(metadata[key])}`)
|
||||
.join(',');
|
||||
return `${actionId}:{${metadataStr}}`;
|
||||
})
|
||||
.join(';');
|
||||
|
||||
// Use Web Crypto API to generate hash
|
||||
const encoder = new TextEncoder();
|
||||
const data = encoder.encode(metadataString);
|
||||
const hashBuffer = await crypto.webcrypto.subtle.digest('SHA-256', data);
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||
const hashHex = hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
||||
|
||||
return hashHex;
|
||||
};
|
||||
|
||||
/**
|
||||
* Load a default agent based on the endpoint
|
||||
* @param {string} endpoint
|
||||
* @returns {Agent | null}
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
Agent,
|
||||
getAgent,
|
||||
loadAgent,
|
||||
createAgent,
|
||||
updateAgent,
|
||||
deleteAgent,
|
||||
getListAgents,
|
||||
revertAgentVersion,
|
||||
updateAgentProjects,
|
||||
addAgentResourceFile,
|
||||
getListAgentsByAccess,
|
||||
removeAgentResourceFiles,
|
||||
generateActionMetadataHash,
|
||||
};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,4 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { assistantSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Assistant = mongoose.model('assistant', assistantSchema);
|
||||
const { Assistant } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* Update an assistant with new data without overwriting existing properties,
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { balanceSchema } = require('@librechat/data-schemas');
|
||||
|
||||
module.exports = mongoose.model('Balance', balanceSchema);
|
||||
@@ -1,8 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const logger = require('~/config/winston');
|
||||
const { bannerSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Banner = mongoose.model('Banner', bannerSchema);
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Banner } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* Retrieves the current active banner.
|
||||
@@ -28,4 +25,4 @@ const getBanner = async (user) => {
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = { Banner, getBanner };
|
||||
module.exports = { getBanner };
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const major = [0, 0];
|
||||
const minor = [0, 0];
|
||||
const patch = [0, 5];
|
||||
|
||||
const configSchema = mongoose.Schema(
|
||||
{
|
||||
tag: {
|
||||
type: String,
|
||||
required: true,
|
||||
validate: {
|
||||
validator: function (tag) {
|
||||
const [part1, part2, part3] = tag.replace('v', '').split('.').map(Number);
|
||||
|
||||
// Check if all parts are numbers
|
||||
if (isNaN(part1) || isNaN(part2) || isNaN(part3)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if all parts are within their respective ranges
|
||||
if (part1 < major[0] || part1 > major[1]) {
|
||||
return false;
|
||||
}
|
||||
if (part2 < minor[0] || part2 > minor[1]) {
|
||||
return false;
|
||||
}
|
||||
if (part3 < patch[0] || part3 > patch[1]) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
message: 'Invalid tag value',
|
||||
},
|
||||
},
|
||||
searchEnabled: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
usersEnabled: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
startupCounts: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
// Instance method
|
||||
configSchema.methods.incrementCount = function () {
|
||||
this.startupCounts += 1;
|
||||
};
|
||||
|
||||
// Static methods
|
||||
configSchema.statics.findByTag = async function (tag) {
|
||||
return await this.findOne({ tag }).lean();
|
||||
};
|
||||
|
||||
configSchema.statics.updateByTag = async function (tag, update) {
|
||||
return await this.findOneAndUpdate({ tag }, update, { new: true });
|
||||
};
|
||||
|
||||
const Config = mongoose.models.Config || mongoose.model('Config', configSchema);
|
||||
|
||||
module.exports = {
|
||||
getConfigs: async (filter) => {
|
||||
try {
|
||||
return await Config.find(filter).lean();
|
||||
} catch (error) {
|
||||
logger.error('Error getting configs', error);
|
||||
return { config: 'Error getting configs' };
|
||||
}
|
||||
},
|
||||
deleteConfigs: async (filter) => {
|
||||
try {
|
||||
return await Config.deleteMany(filter);
|
||||
} catch (error) {
|
||||
logger.error('Error deleting configs', error);
|
||||
return { config: 'Error deleting configs' };
|
||||
}
|
||||
},
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
const Conversation = require('./schema/convoSchema');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { getMessages, deleteMessages } = require('./Message');
|
||||
const logger = require('~/config/winston');
|
||||
const { Conversation } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* Searches for a conversation by conversationId and returns a lean document with only conversationId and user.
|
||||
@@ -75,7 +75,6 @@ const getConvoFiles = async (conversationId) => {
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Conversation,
|
||||
getConvoFiles,
|
||||
searchConversation,
|
||||
deleteNullOrEmptyConversations,
|
||||
@@ -88,11 +87,13 @@ module.exports = {
|
||||
*/
|
||||
saveConvo: async (req, { conversationId, newConversationId, ...convo }, metadata) => {
|
||||
try {
|
||||
if (metadata && metadata?.context) {
|
||||
if (metadata?.context) {
|
||||
logger.debug(`[saveConvo] ${metadata.context}`);
|
||||
}
|
||||
|
||||
const messages = await getMessages({ conversationId }, '_id');
|
||||
const update = { ...convo, messages, user: req.user.id };
|
||||
|
||||
if (newConversationId) {
|
||||
update.conversationId = newConversationId;
|
||||
}
|
||||
@@ -148,75 +149,101 @@ module.exports = {
|
||||
throw new Error('Failed to save conversations in bulk.');
|
||||
}
|
||||
},
|
||||
getConvosByPage: async (user, pageNumber = 1, pageSize = 25, isArchived = false, tags) => {
|
||||
const query = { user };
|
||||
getConvosByCursor: async (
|
||||
user,
|
||||
{ cursor, limit = 25, isArchived = false, tags, search, order = 'desc' } = {},
|
||||
) => {
|
||||
const filters = [{ user }];
|
||||
if (isArchived) {
|
||||
query.isArchived = true;
|
||||
filters.push({ isArchived: true });
|
||||
} else {
|
||||
query.$or = [{ isArchived: false }, { isArchived: { $exists: false } }];
|
||||
}
|
||||
if (Array.isArray(tags) && tags.length > 0) {
|
||||
query.tags = { $in: tags };
|
||||
filters.push({ $or: [{ isArchived: false }, { isArchived: { $exists: false } }] });
|
||||
}
|
||||
|
||||
query.$and = [{ $or: [{ expiredAt: null }, { expiredAt: { $exists: false } }] }];
|
||||
if (Array.isArray(tags) && tags.length > 0) {
|
||||
filters.push({ tags: { $in: tags } });
|
||||
}
|
||||
|
||||
filters.push({ $or: [{ expiredAt: null }, { expiredAt: { $exists: false } }] });
|
||||
|
||||
if (search) {
|
||||
try {
|
||||
const meiliResults = await Conversation.meiliSearch(search);
|
||||
const matchingIds = Array.isArray(meiliResults.hits)
|
||||
? meiliResults.hits.map((result) => result.conversationId)
|
||||
: [];
|
||||
if (!matchingIds.length) {
|
||||
return { conversations: [], nextCursor: null };
|
||||
}
|
||||
filters.push({ conversationId: { $in: matchingIds } });
|
||||
} catch (error) {
|
||||
logger.error('[getConvosByCursor] Error during meiliSearch', error);
|
||||
return { message: 'Error during meiliSearch' };
|
||||
}
|
||||
}
|
||||
|
||||
if (cursor) {
|
||||
filters.push({ updatedAt: { $lt: new Date(cursor) } });
|
||||
}
|
||||
|
||||
const query = filters.length === 1 ? filters[0] : { $and: filters };
|
||||
|
||||
try {
|
||||
const totalConvos = (await Conversation.countDocuments(query)) || 1;
|
||||
const totalPages = Math.ceil(totalConvos / pageSize);
|
||||
const convos = await Conversation.find(query)
|
||||
.sort({ updatedAt: -1 })
|
||||
.skip((pageNumber - 1) * pageSize)
|
||||
.limit(pageSize)
|
||||
.select(
|
||||
'conversationId endpoint title createdAt updatedAt user model agent_id assistant_id spec iconURL',
|
||||
)
|
||||
.sort({ updatedAt: order === 'asc' ? 1 : -1 })
|
||||
.limit(limit + 1)
|
||||
.lean();
|
||||
return { conversations: convos, pages: totalPages, pageNumber, pageSize };
|
||||
|
||||
let nextCursor = null;
|
||||
if (convos.length > limit) {
|
||||
const lastConvo = convos.pop();
|
||||
nextCursor = lastConvo.updatedAt.toISOString();
|
||||
}
|
||||
|
||||
return { conversations: convos, nextCursor };
|
||||
} catch (error) {
|
||||
logger.error('[getConvosByPage] Error getting conversations', error);
|
||||
logger.error('[getConvosByCursor] Error getting conversations', error);
|
||||
return { message: 'Error getting conversations' };
|
||||
}
|
||||
},
|
||||
getConvosQueried: async (user, convoIds, pageNumber = 1, pageSize = 25) => {
|
||||
getConvosQueried: async (user, convoIds, cursor = null, limit = 25) => {
|
||||
try {
|
||||
if (!convoIds || convoIds.length === 0) {
|
||||
return { conversations: [], pages: 1, pageNumber, pageSize };
|
||||
if (!convoIds?.length) {
|
||||
return { conversations: [], nextCursor: null, convoMap: {} };
|
||||
}
|
||||
|
||||
const conversationIds = convoIds.map((convo) => convo.conversationId);
|
||||
|
||||
const results = await Conversation.find({
|
||||
user,
|
||||
conversationId: { $in: conversationIds },
|
||||
$or: [{ expiredAt: { $exists: false } }, { expiredAt: null }],
|
||||
}).lean();
|
||||
|
||||
results.sort((a, b) => new Date(b.updatedAt) - new Date(a.updatedAt));
|
||||
|
||||
let filtered = results;
|
||||
if (cursor && cursor !== 'start') {
|
||||
const cursorDate = new Date(cursor);
|
||||
filtered = results.filter((convo) => new Date(convo.updatedAt) < cursorDate);
|
||||
}
|
||||
|
||||
const limited = filtered.slice(0, limit + 1);
|
||||
let nextCursor = null;
|
||||
if (limited.length > limit) {
|
||||
const lastConvo = limited.pop();
|
||||
nextCursor = lastConvo.updatedAt.toISOString();
|
||||
}
|
||||
|
||||
const cache = {};
|
||||
const convoMap = {};
|
||||
const promises = [];
|
||||
|
||||
convoIds.forEach((convo) =>
|
||||
promises.push(
|
||||
Conversation.findOne({
|
||||
user,
|
||||
conversationId: convo.conversationId,
|
||||
$or: [{ expiredAt: { $exists: false } }, { expiredAt: null }],
|
||||
}).lean(),
|
||||
),
|
||||
);
|
||||
|
||||
const results = (await Promise.all(promises)).filter(Boolean);
|
||||
|
||||
results.forEach((convo, i) => {
|
||||
const page = Math.floor(i / pageSize) + 1;
|
||||
if (!cache[page]) {
|
||||
cache[page] = [];
|
||||
}
|
||||
cache[page].push(convo);
|
||||
limited.forEach((convo) => {
|
||||
convoMap[convo.conversationId] = convo;
|
||||
});
|
||||
|
||||
const totalPages = Math.ceil(results.length / pageSize);
|
||||
cache.pages = totalPages;
|
||||
cache.pageSize = pageSize;
|
||||
return {
|
||||
cache,
|
||||
conversations: cache[pageNumber] || [],
|
||||
pages: totalPages || 1,
|
||||
pageNumber,
|
||||
pageSize,
|
||||
convoMap,
|
||||
};
|
||||
return { conversations: limited, nextCursor, convoMap };
|
||||
} catch (error) {
|
||||
logger.error('[getConvosQueried] Error getting conversations', error);
|
||||
return { message: 'Error fetching conversations' };
|
||||
@@ -257,10 +284,25 @@ module.exports = {
|
||||
* logger.error(result); // { n: 5, ok: 1, deletedCount: 5, messages: { n: 10, ok: 1, deletedCount: 10 } }
|
||||
*/
|
||||
deleteConvos: async (user, filter) => {
|
||||
let toRemove = await Conversation.find({ ...filter, user }).select('conversationId');
|
||||
const ids = toRemove.map((instance) => instance.conversationId);
|
||||
let deleteCount = await Conversation.deleteMany({ ...filter, user });
|
||||
deleteCount.messages = await deleteMessages({ conversationId: { $in: ids } });
|
||||
return deleteCount;
|
||||
try {
|
||||
const userFilter = { ...filter, user };
|
||||
const conversations = await Conversation.find(userFilter).select('conversationId');
|
||||
const conversationIds = conversations.map((c) => c.conversationId);
|
||||
|
||||
if (!conversationIds.length) {
|
||||
throw new Error('Conversation not found or already deleted.');
|
||||
}
|
||||
|
||||
const deleteConvoResult = await Conversation.deleteMany(userFilter);
|
||||
|
||||
const deleteMessagesResult = await deleteMessages({
|
||||
conversationId: { $in: conversationIds },
|
||||
});
|
||||
|
||||
return { ...deleteConvoResult, messages: deleteMessagesResult };
|
||||
} catch (error) {
|
||||
logger.error('[deleteConvos] Error deleting conversations and messages', error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const Conversation = require('./schema/convoSchema');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
const { conversationTagSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const ConversationTag = mongoose.model('ConversationTag', conversationTagSchema);
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ConversationTag, Conversation } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* Retrieves all conversation tags for a user.
|
||||
@@ -140,13 +135,13 @@ const adjustPositions = async (user, oldPosition, newPosition) => {
|
||||
const position =
|
||||
oldPosition < newPosition
|
||||
? {
|
||||
$gt: Math.min(oldPosition, newPosition),
|
||||
$lte: Math.max(oldPosition, newPosition),
|
||||
}
|
||||
$gt: Math.min(oldPosition, newPosition),
|
||||
$lte: Math.max(oldPosition, newPosition),
|
||||
}
|
||||
: {
|
||||
$gte: Math.min(oldPosition, newPosition),
|
||||
$lt: Math.max(oldPosition, newPosition),
|
||||
};
|
||||
$gte: Math.min(oldPosition, newPosition),
|
||||
$lt: Math.max(oldPosition, newPosition),
|
||||
};
|
||||
|
||||
await ConversationTag.updateMany(
|
||||
{
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { fileSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const File = mongoose.model('File', fileSchema);
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { EToolResources } = require('librechat-data-provider');
|
||||
const { File } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* Finds a file by its file_id with additional query options.
|
||||
* @param {string} file_id - The unique identifier of the file.
|
||||
* @param {object} options - Query options for filtering, projection, etc.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the file document or null.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the file document or null.
|
||||
*/
|
||||
const findFileById = async (file_id, options = {}) => {
|
||||
return await File.findOne({ file_id, ...options }).lean();
|
||||
@@ -20,7 +18,7 @@ const findFileById = async (file_id, options = {}) => {
|
||||
* @param {Object} [_sortOptions] - Optional sort parameters.
|
||||
* @param {Object|String} [selectFields={ text: 0 }] - Fields to include/exclude in the query results.
|
||||
* Default excludes the 'text' field.
|
||||
* @returns {Promise<Array<IMongoFile>>} A promise that resolves to an array of file documents.
|
||||
* @returns {Promise<Array<MongoFile>>} A promise that resolves to an array of file documents.
|
||||
*/
|
||||
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
|
||||
const sortOptions = { updatedAt: -1, ..._sortOptions };
|
||||
@@ -30,9 +28,10 @@ const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
|
||||
/**
|
||||
* Retrieves tool files (files that are embedded or have a fileIdentifier) from an array of file IDs
|
||||
* @param {string[]} fileIds - Array of file_id strings to search for
|
||||
* @returns {Promise<Array<IMongoFile>>} Files that match the criteria
|
||||
* @param {Set<EToolResources>} toolResourceSet - Optional filter for tool resources
|
||||
* @returns {Promise<Array<MongoFile>>} Files that match the criteria
|
||||
*/
|
||||
const getToolFilesByIds = async (fileIds) => {
|
||||
const getToolFilesByIds = async (fileIds, toolResourceSet) => {
|
||||
if (!fileIds || !fileIds.length) {
|
||||
return [];
|
||||
}
|
||||
@@ -40,9 +39,19 @@ const getToolFilesByIds = async (fileIds) => {
|
||||
try {
|
||||
const filter = {
|
||||
file_id: { $in: fileIds },
|
||||
$or: [{ embedded: true }, { 'metadata.fileIdentifier': { $exists: true } }],
|
||||
};
|
||||
|
||||
if (toolResourceSet.size) {
|
||||
filter.$or = [];
|
||||
}
|
||||
|
||||
if (toolResourceSet.has(EToolResources.file_search)) {
|
||||
filter.$or.push({ embedded: true });
|
||||
}
|
||||
if (toolResourceSet.has(EToolResources.execute_code)) {
|
||||
filter.$or.push({ 'metadata.fileIdentifier': { $exists: true } });
|
||||
}
|
||||
|
||||
const selectFields = { text: 0 };
|
||||
const sortOptions = { updatedAt: -1 };
|
||||
|
||||
@@ -55,9 +64,9 @@ const getToolFilesByIds = async (fileIds) => {
|
||||
|
||||
/**
|
||||
* Creates a new file with a TTL of 1 hour.
|
||||
* @param {IMongoFile} data - The file data to be created, must contain file_id.
|
||||
* @param {MongoFile} data - The file data to be created, must contain file_id.
|
||||
* @param {boolean} disableTTL - Whether to disable the TTL.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the created file document.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the created file document.
|
||||
*/
|
||||
const createFile = async (data, disableTTL) => {
|
||||
const fileData = {
|
||||
@@ -77,8 +86,8 @@ const createFile = async (data, disableTTL) => {
|
||||
|
||||
/**
|
||||
* Updates a file identified by file_id with new data and removes the TTL.
|
||||
* @param {IMongoFile} data - The data to update, must contain file_id.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
|
||||
* @param {MongoFile} data - The data to update, must contain file_id.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
|
||||
*/
|
||||
const updateFile = async (data) => {
|
||||
const { file_id, ...update } = data;
|
||||
@@ -91,8 +100,8 @@ const updateFile = async (data) => {
|
||||
|
||||
/**
|
||||
* Increments the usage of a file identified by file_id.
|
||||
* @param {IMongoFile} data - The data to update, must contain file_id and the increment value for usage.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
|
||||
* @param {MongoFile} data - The data to update, must contain file_id and the increment value for usage.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
|
||||
*/
|
||||
const updateFileUsage = async (data) => {
|
||||
const { file_id, inc = 1 } = data;
|
||||
@@ -106,7 +115,7 @@ const updateFileUsage = async (data) => {
|
||||
/**
|
||||
* Deletes a file identified by file_id.
|
||||
* @param {string} file_id - The unique identifier of the file to delete.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
|
||||
*/
|
||||
const deleteFile = async (file_id) => {
|
||||
return await File.findOneAndDelete({ file_id }).lean();
|
||||
@@ -115,7 +124,7 @@ const deleteFile = async (file_id) => {
|
||||
/**
|
||||
* Deletes a file identified by a filter.
|
||||
* @param {object} filter - The filter criteria to apply.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
|
||||
*/
|
||||
const deleteFileByFilter = async (filter) => {
|
||||
return await File.findOneAndDelete(filter).lean();
|
||||
@@ -157,7 +166,6 @@ async function batchUpdateFiles(updates) {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
File,
|
||||
findFileById,
|
||||
getFiles,
|
||||
getToolFilesByIds,
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { keySchema } = require('@librechat/data-schemas');
|
||||
|
||||
module.exports = mongoose.model('Key', keySchema);
|
||||
@@ -1,6 +1,6 @@
|
||||
const { z } = require('zod');
|
||||
const Message = require('./schema/messageSchema');
|
||||
const { logger } = require('~/config');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Message } = require('~/db/models');
|
||||
|
||||
const idSchema = z.string().uuid();
|
||||
|
||||
@@ -61,6 +61,13 @@ async function saveMessage(req, params, metadata) {
|
||||
update.expiredAt = null;
|
||||
}
|
||||
|
||||
if (update.tokenCount != null && isNaN(update.tokenCount)) {
|
||||
logger.warn(
|
||||
`Resetting invalid \`tokenCount\` for message \`${params.messageId}\`: ${update.tokenCount}`,
|
||||
);
|
||||
logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
|
||||
update.tokenCount = 0;
|
||||
}
|
||||
const message = await Message.findOneAndUpdate(
|
||||
{ messageId: params.messageId, user: req.user.id },
|
||||
update,
|
||||
@@ -97,7 +104,9 @@ async function saveMessage(req, params, metadata) {
|
||||
};
|
||||
} catch (findError) {
|
||||
// If the findOne also fails, log it but don't crash
|
||||
logger.warn(`Could not retrieve existing message with ID ${params.messageId}: ${findError.message}`);
|
||||
logger.warn(
|
||||
`Could not retrieve existing message with ID ${params.messageId}: ${findError.message}`,
|
||||
);
|
||||
return {
|
||||
...params,
|
||||
messageId: params.messageId,
|
||||
@@ -130,7 +139,6 @@ async function bulkSaveMessages(messages, overrideTimestamp = false) {
|
||||
upsert: true,
|
||||
},
|
||||
}));
|
||||
|
||||
const result = await Message.bulkWrite(bulkOps);
|
||||
return result;
|
||||
} catch (err) {
|
||||
@@ -245,6 +253,7 @@ async function updateMessage(req, message, metadata) {
|
||||
text: updatedMessage.text,
|
||||
isCreatedByUser: updatedMessage.isCreatedByUser,
|
||||
tokenCount: updatedMessage.tokenCount,
|
||||
feedback: updatedMessage.feedback,
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error('Error updating message:', err);
|
||||
@@ -345,7 +354,6 @@ async function deleteMessages(filter) {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Message,
|
||||
saveMessage,
|
||||
bulkSaveMessages,
|
||||
recordMessage,
|
||||
|
||||
@@ -1,32 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
|
||||
jest.mock('mongoose');
|
||||
|
||||
const mockFindQuery = {
|
||||
select: jest.fn().mockReturnThis(),
|
||||
sort: jest.fn().mockReturnThis(),
|
||||
lean: jest.fn().mockReturnThis(),
|
||||
deleteMany: jest.fn().mockResolvedValue({ deletedCount: 1 }),
|
||||
};
|
||||
|
||||
const mockSchema = {
|
||||
findOneAndUpdate: jest.fn(),
|
||||
updateOne: jest.fn(),
|
||||
findOne: jest.fn(() => ({
|
||||
lean: jest.fn(),
|
||||
})),
|
||||
find: jest.fn(() => mockFindQuery),
|
||||
deleteMany: jest.fn(),
|
||||
};
|
||||
|
||||
mongoose.model.mockReturnValue(mockSchema);
|
||||
|
||||
jest.mock('~/models/schema/messageSchema', () => mockSchema);
|
||||
|
||||
jest.mock('~/config/winston', () => ({
|
||||
error: jest.fn(),
|
||||
}));
|
||||
const { messageSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const {
|
||||
saveMessage,
|
||||
@@ -35,77 +10,102 @@ const {
|
||||
deleteMessages,
|
||||
updateMessageText,
|
||||
deleteMessagesSince,
|
||||
} = require('~/models/Message');
|
||||
} = require('./Message');
|
||||
|
||||
/**
|
||||
* @type {import('mongoose').Model<import('@librechat/data-schemas').IMessage>}
|
||||
*/
|
||||
let Message;
|
||||
|
||||
describe('Message Operations', () => {
|
||||
let mongoServer;
|
||||
let mockReq;
|
||||
let mockMessage;
|
||||
let mockMessageData;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
// Clear database
|
||||
await Message.deleteMany({});
|
||||
|
||||
mockReq = {
|
||||
user: { id: 'user123' },
|
||||
};
|
||||
|
||||
mockMessage = {
|
||||
mockMessageData = {
|
||||
messageId: 'msg123',
|
||||
conversationId: uuidv4(),
|
||||
text: 'Hello, world!',
|
||||
user: 'user123',
|
||||
};
|
||||
|
||||
mockSchema.findOneAndUpdate.mockResolvedValue({
|
||||
toObject: () => mockMessage,
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveMessage', () => {
|
||||
it('should save a message for an authenticated user', async () => {
|
||||
const result = await saveMessage(mockReq, mockMessage);
|
||||
expect(result).toEqual(mockMessage);
|
||||
expect(mockSchema.findOneAndUpdate).toHaveBeenCalledWith(
|
||||
{ messageId: 'msg123', user: 'user123' },
|
||||
expect.objectContaining({ user: 'user123' }),
|
||||
expect.any(Object),
|
||||
);
|
||||
const result = await saveMessage(mockReq, mockMessageData);
|
||||
|
||||
expect(result.messageId).toBe('msg123');
|
||||
expect(result.user).toBe('user123');
|
||||
expect(result.text).toBe('Hello, world!');
|
||||
|
||||
// Verify the message was actually saved to the database
|
||||
const savedMessage = await Message.findOne({ messageId: 'msg123', user: 'user123' });
|
||||
expect(savedMessage).toBeTruthy();
|
||||
expect(savedMessage.text).toBe('Hello, world!');
|
||||
});
|
||||
|
||||
it('should throw an error for unauthenticated user', async () => {
|
||||
mockReq.user = null;
|
||||
await expect(saveMessage(mockReq, mockMessage)).rejects.toThrow('User not authenticated');
|
||||
await expect(saveMessage(mockReq, mockMessageData)).rejects.toThrow('User not authenticated');
|
||||
});
|
||||
|
||||
it('should throw an error for invalid conversation ID', async () => {
|
||||
mockMessage.conversationId = 'invalid-id';
|
||||
await expect(saveMessage(mockReq, mockMessage)).resolves.toBeUndefined();
|
||||
it('should handle invalid conversation ID gracefully', async () => {
|
||||
mockMessageData.conversationId = 'invalid-id';
|
||||
const result = await saveMessage(mockReq, mockMessageData);
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateMessageText', () => {
|
||||
it('should update message text for the authenticated user', async () => {
|
||||
// First save a message
|
||||
await saveMessage(mockReq, mockMessageData);
|
||||
|
||||
// Then update it
|
||||
await updateMessageText(mockReq, { messageId: 'msg123', text: 'Updated text' });
|
||||
expect(mockSchema.updateOne).toHaveBeenCalledWith(
|
||||
{ messageId: 'msg123', user: 'user123' },
|
||||
{ text: 'Updated text' },
|
||||
);
|
||||
|
||||
// Verify the update
|
||||
const updatedMessage = await Message.findOne({ messageId: 'msg123', user: 'user123' });
|
||||
expect(updatedMessage.text).toBe('Updated text');
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateMessage', () => {
|
||||
it('should update a message for the authenticated user', async () => {
|
||||
mockSchema.findOneAndUpdate.mockResolvedValue(mockMessage);
|
||||
// First save a message
|
||||
await saveMessage(mockReq, mockMessageData);
|
||||
|
||||
const result = await updateMessage(mockReq, { messageId: 'msg123', text: 'Updated text' });
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
messageId: 'msg123',
|
||||
text: 'Hello, world!',
|
||||
}),
|
||||
);
|
||||
|
||||
expect(result.messageId).toBe('msg123');
|
||||
expect(result.text).toBe('Updated text');
|
||||
|
||||
// Verify in database
|
||||
const updatedMessage = await Message.findOne({ messageId: 'msg123', user: 'user123' });
|
||||
expect(updatedMessage.text).toBe('Updated text');
|
||||
});
|
||||
|
||||
it('should throw an error if message is not found', async () => {
|
||||
mockSchema.findOneAndUpdate.mockResolvedValue(null);
|
||||
await expect(
|
||||
updateMessage(mockReq, { messageId: 'nonexistent', text: 'Test' }),
|
||||
).rejects.toThrow('Message not found or user not authorized.');
|
||||
@@ -114,19 +114,45 @@ describe('Message Operations', () => {
|
||||
|
||||
describe('deleteMessagesSince', () => {
|
||||
it('should delete messages only for the authenticated user', async () => {
|
||||
mockSchema.findOne().lean.mockResolvedValueOnce({ createdAt: new Date() });
|
||||
mockFindQuery.deleteMany.mockResolvedValueOnce({ deletedCount: 1 });
|
||||
const result = await deleteMessagesSince(mockReq, {
|
||||
messageId: 'msg123',
|
||||
conversationId: 'convo123',
|
||||
const conversationId = uuidv4();
|
||||
|
||||
// Create multiple messages in the same conversation
|
||||
const message1 = await saveMessage(mockReq, {
|
||||
messageId: 'msg1',
|
||||
conversationId,
|
||||
text: 'First message',
|
||||
user: 'user123',
|
||||
});
|
||||
expect(mockSchema.findOne).toHaveBeenCalledWith({ messageId: 'msg123', user: 'user123' });
|
||||
expect(mockSchema.find).not.toHaveBeenCalled();
|
||||
expect(result).toBeUndefined();
|
||||
|
||||
const message2 = await saveMessage(mockReq, {
|
||||
messageId: 'msg2',
|
||||
conversationId,
|
||||
text: 'Second message',
|
||||
user: 'user123',
|
||||
});
|
||||
|
||||
const message3 = await saveMessage(mockReq, {
|
||||
messageId: 'msg3',
|
||||
conversationId,
|
||||
text: 'Third message',
|
||||
user: 'user123',
|
||||
});
|
||||
|
||||
// Delete messages since message2 (this should only delete messages created AFTER msg2)
|
||||
await deleteMessagesSince(mockReq, {
|
||||
messageId: 'msg2',
|
||||
conversationId,
|
||||
});
|
||||
|
||||
// Verify msg1 and msg2 remain, msg3 is deleted
|
||||
const remainingMessages = await Message.find({ conversationId, user: 'user123' });
|
||||
expect(remainingMessages).toHaveLength(2);
|
||||
expect(remainingMessages.map((m) => m.messageId)).toContain('msg1');
|
||||
expect(remainingMessages.map((m) => m.messageId)).toContain('msg2');
|
||||
expect(remainingMessages.map((m) => m.messageId)).not.toContain('msg3');
|
||||
});
|
||||
|
||||
it('should return undefined if no message is found', async () => {
|
||||
mockSchema.findOne().lean.mockResolvedValueOnce(null);
|
||||
const result = await deleteMessagesSince(mockReq, {
|
||||
messageId: 'nonexistent',
|
||||
conversationId: 'convo123',
|
||||
@@ -137,29 +163,71 @@ describe('Message Operations', () => {
|
||||
|
||||
describe('getMessages', () => {
|
||||
it('should retrieve messages with the correct filter', async () => {
|
||||
const filter = { conversationId: 'convo123' };
|
||||
await getMessages(filter);
|
||||
expect(mockSchema.find).toHaveBeenCalledWith(filter);
|
||||
expect(mockFindQuery.sort).toHaveBeenCalledWith({ createdAt: 1 });
|
||||
expect(mockFindQuery.lean).toHaveBeenCalled();
|
||||
const conversationId = uuidv4();
|
||||
|
||||
// Save some messages
|
||||
await saveMessage(mockReq, {
|
||||
messageId: 'msg1',
|
||||
conversationId,
|
||||
text: 'First message',
|
||||
user: 'user123',
|
||||
});
|
||||
|
||||
await saveMessage(mockReq, {
|
||||
messageId: 'msg2',
|
||||
conversationId,
|
||||
text: 'Second message',
|
||||
user: 'user123',
|
||||
});
|
||||
|
||||
const messages = await getMessages({ conversationId });
|
||||
expect(messages).toHaveLength(2);
|
||||
expect(messages[0].text).toBe('First message');
|
||||
expect(messages[1].text).toBe('Second message');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteMessages', () => {
|
||||
it('should delete messages with the correct filter', async () => {
|
||||
// Save some messages for different users
|
||||
await saveMessage(mockReq, mockMessageData);
|
||||
await saveMessage(
|
||||
{ user: { id: 'user456' } },
|
||||
{
|
||||
messageId: 'msg456',
|
||||
conversationId: uuidv4(),
|
||||
text: 'Other user message',
|
||||
user: 'user456',
|
||||
},
|
||||
);
|
||||
|
||||
await deleteMessages({ user: 'user123' });
|
||||
expect(mockSchema.deleteMany).toHaveBeenCalledWith({ user: 'user123' });
|
||||
|
||||
// Verify only user123's messages were deleted
|
||||
const user123Messages = await Message.find({ user: 'user123' });
|
||||
const user456Messages = await Message.find({ user: 'user456' });
|
||||
|
||||
expect(user123Messages).toHaveLength(0);
|
||||
expect(user456Messages).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Conversation Hijacking Prevention', () => {
|
||||
it('should not allow editing a message in another user\'s conversation', async () => {
|
||||
it("should not allow editing a message in another user's conversation", async () => {
|
||||
const attackerReq = { user: { id: 'attacker123' } };
|
||||
const victimConversationId = 'victim-convo-123';
|
||||
const victimConversationId = uuidv4();
|
||||
const victimMessageId = 'victim-msg-123';
|
||||
|
||||
mockSchema.findOneAndUpdate.mockResolvedValue(null);
|
||||
// First, save a message as the victim (but we'll try to edit as attacker)
|
||||
const victimReq = { user: { id: 'victim123' } };
|
||||
await saveMessage(victimReq, {
|
||||
messageId: victimMessageId,
|
||||
conversationId: victimConversationId,
|
||||
text: 'Victim message',
|
||||
user: 'victim123',
|
||||
});
|
||||
|
||||
// Attacker tries to edit the victim's message
|
||||
await expect(
|
||||
updateMessage(attackerReq, {
|
||||
messageId: victimMessageId,
|
||||
@@ -168,71 +236,82 @@ describe('Message Operations', () => {
|
||||
}),
|
||||
).rejects.toThrow('Message not found or user not authorized.');
|
||||
|
||||
expect(mockSchema.findOneAndUpdate).toHaveBeenCalledWith(
|
||||
{ messageId: victimMessageId, user: 'attacker123' },
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
);
|
||||
// Verify the original message is unchanged
|
||||
const originalMessage = await Message.findOne({
|
||||
messageId: victimMessageId,
|
||||
user: 'victim123',
|
||||
});
|
||||
expect(originalMessage.text).toBe('Victim message');
|
||||
});
|
||||
|
||||
it('should not allow deleting messages from another user\'s conversation', async () => {
|
||||
it("should not allow deleting messages from another user's conversation", async () => {
|
||||
const attackerReq = { user: { id: 'attacker123' } };
|
||||
const victimConversationId = 'victim-convo-123';
|
||||
const victimConversationId = uuidv4();
|
||||
const victimMessageId = 'victim-msg-123';
|
||||
|
||||
mockSchema.findOne().lean.mockResolvedValueOnce(null); // Simulating message not found for this user
|
||||
// Save a message as the victim
|
||||
const victimReq = { user: { id: 'victim123' } };
|
||||
await saveMessage(victimReq, {
|
||||
messageId: victimMessageId,
|
||||
conversationId: victimConversationId,
|
||||
text: 'Victim message',
|
||||
user: 'victim123',
|
||||
});
|
||||
|
||||
// Attacker tries to delete from victim's conversation
|
||||
const result = await deleteMessagesSince(attackerReq, {
|
||||
messageId: victimMessageId,
|
||||
conversationId: victimConversationId,
|
||||
});
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(mockSchema.findOne).toHaveBeenCalledWith({
|
||||
|
||||
// Verify the victim's message still exists
|
||||
const victimMessage = await Message.findOne({
|
||||
messageId: victimMessageId,
|
||||
user: 'attacker123',
|
||||
user: 'victim123',
|
||||
});
|
||||
expect(victimMessage).toBeTruthy();
|
||||
expect(victimMessage.text).toBe('Victim message');
|
||||
});
|
||||
|
||||
it('should not allow inserting a new message into another user\'s conversation', async () => {
|
||||
it("should not allow inserting a new message into another user's conversation", async () => {
|
||||
const attackerReq = { user: { id: 'attacker123' } };
|
||||
const victimConversationId = uuidv4(); // Use a valid UUID
|
||||
const victimConversationId = uuidv4();
|
||||
|
||||
await expect(
|
||||
saveMessage(attackerReq, {
|
||||
conversationId: victimConversationId,
|
||||
text: 'Inserted malicious message',
|
||||
messageId: 'new-msg-123',
|
||||
}),
|
||||
).resolves.not.toThrow(); // It should not throw an error
|
||||
// Attacker tries to save a message - this should succeed but with attacker's user ID
|
||||
const result = await saveMessage(attackerReq, {
|
||||
conversationId: victimConversationId,
|
||||
text: 'Inserted malicious message',
|
||||
messageId: 'new-msg-123',
|
||||
user: 'attacker123',
|
||||
});
|
||||
|
||||
// Check that the message was saved with the attacker's user ID
|
||||
expect(mockSchema.findOneAndUpdate).toHaveBeenCalledWith(
|
||||
{ messageId: 'new-msg-123', user: 'attacker123' },
|
||||
expect.objectContaining({
|
||||
user: 'attacker123',
|
||||
conversationId: victimConversationId,
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.user).toBe('attacker123');
|
||||
|
||||
// Verify the message was saved with the attacker's user ID, not as an anonymous message
|
||||
const savedMessage = await Message.findOne({ messageId: 'new-msg-123' });
|
||||
expect(savedMessage.user).toBe('attacker123');
|
||||
expect(savedMessage.conversationId).toBe(victimConversationId);
|
||||
});
|
||||
|
||||
it('should allow retrieving messages from any conversation', async () => {
|
||||
const victimConversationId = 'victim-convo-123';
|
||||
const victimConversationId = uuidv4();
|
||||
|
||||
await getMessages({ conversationId: victimConversationId });
|
||||
|
||||
expect(mockSchema.find).toHaveBeenCalledWith({
|
||||
// Save a message in the victim's conversation
|
||||
const victimReq = { user: { id: 'victim123' } };
|
||||
await saveMessage(victimReq, {
|
||||
messageId: 'victim-msg',
|
||||
conversationId: victimConversationId,
|
||||
text: 'Victim message',
|
||||
user: 'victim123',
|
||||
});
|
||||
|
||||
mockSchema.find.mockReturnValueOnce({
|
||||
select: jest.fn().mockReturnThis(),
|
||||
sort: jest.fn().mockReturnThis(),
|
||||
lean: jest.fn().mockResolvedValue([{ text: 'Test message' }]),
|
||||
});
|
||||
|
||||
const result = await getMessages({ conversationId: victimConversationId });
|
||||
expect(result).toEqual([{ text: 'Test message' }]);
|
||||
// Anyone should be able to retrieve messages by conversation ID
|
||||
const messages = await getMessages({ conversationId: victimConversationId });
|
||||
expect(messages).toHaveLength(1);
|
||||
expect(messages[0].text).toBe('Victim message');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const Preset = require('./schema/presetSchema');
|
||||
const { logger } = require('~/config');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Preset } = require('~/db/models');
|
||||
|
||||
const getPreset = async (user, presetId) => {
|
||||
try {
|
||||
@@ -11,7 +11,6 @@ const getPreset = async (user, presetId) => {
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Preset,
|
||||
getPreset,
|
||||
getPresets: async (user, filter) => {
|
||||
try {
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
const { model } = require('mongoose');
|
||||
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
||||
const { projectSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Project = model('Project', projectSchema);
|
||||
const { Project } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* Retrieve a project by ID and convert the found project document to a plain object.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { ObjectId } = require('mongodb');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
getProjectByName,
|
||||
@@ -7,12 +7,8 @@ const {
|
||||
removeGroupIdsFromProject,
|
||||
removeGroupFromAllProjects,
|
||||
} = require('./Project');
|
||||
const { promptGroupSchema, promptSchema } = require('@librechat/data-schemas');
|
||||
const { PromptGroup, Prompt } = require('~/db/models');
|
||||
const { escapeRegExp } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
|
||||
const Prompt = mongoose.model('Prompt', promptSchema);
|
||||
|
||||
/**
|
||||
* Create a pipeline for the aggregation to get prompt groups
|
||||
|
||||
@@ -1,34 +1,27 @@
|
||||
const mongoose = require('mongoose');
|
||||
const {
|
||||
CacheKeys,
|
||||
SystemRoles,
|
||||
roleDefaults,
|
||||
PermissionTypes,
|
||||
permissionsSchema,
|
||||
removeNullishValues,
|
||||
agentPermissionsSchema,
|
||||
promptPermissionsSchema,
|
||||
runCodePermissionsSchema,
|
||||
bookmarkPermissionsSchema,
|
||||
multiConvoPermissionsSchema,
|
||||
temporaryChatPermissionsSchema,
|
||||
} = require('librechat-data-provider');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { roleSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Role = mongoose.model('Role', roleSchema);
|
||||
const { Role } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* Retrieve a role by name and convert the found role document to a plain object.
|
||||
* If the role with the given name doesn't exist and the name is a system defined role, create it and return the lean version.
|
||||
* If the role with the given name doesn't exist and the name is a system defined role,
|
||||
* create it and return the lean version.
|
||||
*
|
||||
* @param {string} roleName - The name of the role to find or create.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<Object>} A plain object representing the role document.
|
||||
*/
|
||||
const getRoleByName = async function (roleName, fieldsToSelect = null) {
|
||||
const cache = getLogStores(CacheKeys.ROLES);
|
||||
try {
|
||||
const cache = getLogStores(CacheKeys.ROLES);
|
||||
const cachedRole = await cache.get(roleName);
|
||||
if (cachedRole) {
|
||||
return cachedRole;
|
||||
@@ -40,8 +33,7 @@ const getRoleByName = async function (roleName, fieldsToSelect = null) {
|
||||
let role = await query.lean().exec();
|
||||
|
||||
if (!role && SystemRoles[roleName]) {
|
||||
role = roleDefaults[roleName];
|
||||
role = await new Role(role).save();
|
||||
role = await new Role(roleDefaults[roleName]).save();
|
||||
await cache.set(roleName, role);
|
||||
return role.toObject();
|
||||
}
|
||||
@@ -60,8 +52,8 @@ const getRoleByName = async function (roleName, fieldsToSelect = null) {
|
||||
* @returns {Promise<TRole>} Updated role document.
|
||||
*/
|
||||
const updateRoleByName = async function (roleName, updates) {
|
||||
const cache = getLogStores(CacheKeys.ROLES);
|
||||
try {
|
||||
const cache = getLogStores(CacheKeys.ROLES);
|
||||
const role = await Role.findOneAndUpdate(
|
||||
{ name: roleName },
|
||||
{ $set: updates },
|
||||
@@ -77,29 +69,20 @@ const updateRoleByName = async function (roleName, updates) {
|
||||
}
|
||||
};
|
||||
|
||||
const permissionSchemas = {
|
||||
[PermissionTypes.AGENTS]: agentPermissionsSchema,
|
||||
[PermissionTypes.PROMPTS]: promptPermissionsSchema,
|
||||
[PermissionTypes.BOOKMARKS]: bookmarkPermissionsSchema,
|
||||
[PermissionTypes.MULTI_CONVO]: multiConvoPermissionsSchema,
|
||||
[PermissionTypes.TEMPORARY_CHAT]: temporaryChatPermissionsSchema,
|
||||
[PermissionTypes.RUN_CODE]: runCodePermissionsSchema,
|
||||
};
|
||||
|
||||
/**
|
||||
* Updates access permissions for a specific role and multiple permission types.
|
||||
* @param {SystemRoles} roleName - The role to update.
|
||||
* @param {string} roleName - The role to update.
|
||||
* @param {Object.<PermissionTypes, Object.<Permissions, boolean>>} permissionsUpdate - Permissions to update and their values.
|
||||
*/
|
||||
async function updateAccessPermissions(roleName, permissionsUpdate) {
|
||||
// Filter and clean the permission updates based on our schema definition.
|
||||
const updates = {};
|
||||
for (const [permissionType, permissions] of Object.entries(permissionsUpdate)) {
|
||||
if (permissionSchemas[permissionType]) {
|
||||
if (permissionsSchema.shape && permissionsSchema.shape[permissionType]) {
|
||||
updates[permissionType] = removeNullishValues(permissions);
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(updates).length === 0) {
|
||||
if (!Object.keys(updates).length) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -109,26 +92,75 @@ async function updateAccessPermissions(roleName, permissionsUpdate) {
|
||||
return;
|
||||
}
|
||||
|
||||
const updatedPermissions = {};
|
||||
const currentPermissions = role.permissions || {};
|
||||
const updatedPermissions = { ...currentPermissions };
|
||||
let hasChanges = false;
|
||||
|
||||
const unsetFields = {};
|
||||
const permissionTypes = Object.keys(permissionsSchema.shape || {});
|
||||
for (const permType of permissionTypes) {
|
||||
if (role[permType] && typeof role[permType] === 'object') {
|
||||
logger.info(
|
||||
`Migrating '${roleName}' role from old schema: found '${permType}' at top level`,
|
||||
);
|
||||
|
||||
updatedPermissions[permType] = {
|
||||
...updatedPermissions[permType],
|
||||
...role[permType],
|
||||
};
|
||||
|
||||
unsetFields[permType] = 1;
|
||||
hasChanges = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Process the current updates
|
||||
for (const [permissionType, permissions] of Object.entries(updates)) {
|
||||
const currentPermissions = role[permissionType] || {};
|
||||
updatedPermissions[permissionType] = { ...currentPermissions };
|
||||
const currentTypePermissions = currentPermissions[permissionType] || {};
|
||||
updatedPermissions[permissionType] = { ...currentTypePermissions };
|
||||
|
||||
for (const [permission, value] of Object.entries(permissions)) {
|
||||
if (currentPermissions[permission] !== value) {
|
||||
if (currentTypePermissions[permission] !== value) {
|
||||
updatedPermissions[permissionType][permission] = value;
|
||||
hasChanges = true;
|
||||
logger.info(
|
||||
`Updating '${roleName}' role ${permissionType} '${permission}' permission from ${currentPermissions[permission]} to: ${value}`,
|
||||
`Updating '${roleName}' role permission '${permissionType}' '${permission}' from ${currentTypePermissions[permission]} to: ${value}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hasChanges) {
|
||||
await updateRoleByName(roleName, updatedPermissions);
|
||||
const updateObj = { permissions: updatedPermissions };
|
||||
|
||||
if (Object.keys(unsetFields).length > 0) {
|
||||
logger.info(
|
||||
`Unsetting old schema fields for '${roleName}' role: ${Object.keys(unsetFields).join(', ')}`,
|
||||
);
|
||||
|
||||
try {
|
||||
await Role.updateOne(
|
||||
{ name: roleName },
|
||||
{
|
||||
$set: updateObj,
|
||||
$unset: unsetFields,
|
||||
},
|
||||
);
|
||||
|
||||
const cache = getLogStores(CacheKeys.ROLES);
|
||||
const updatedRole = await Role.findOne({ name: roleName }).select('-__v').lean().exec();
|
||||
await cache.set(roleName, updatedRole);
|
||||
|
||||
logger.info(`Updated role '${roleName}' and removed old schema fields`);
|
||||
} catch (updateError) {
|
||||
logger.error(`Error during role migration update: ${updateError.message}`);
|
||||
throw updateError;
|
||||
}
|
||||
} else {
|
||||
// Standard update if no migration needed
|
||||
await updateRoleByName(roleName, updateObj);
|
||||
}
|
||||
|
||||
logger.info(`Updated '${roleName}' role permissions`);
|
||||
} else {
|
||||
logger.info(`No changes needed for '${roleName}' role permissions`);
|
||||
@@ -139,41 +171,87 @@ async function updateAccessPermissions(roleName, permissionsUpdate) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize default roles in the system.
|
||||
* Creates the default roles (ADMIN, USER) if they don't exist in the database.
|
||||
* Updates existing roles with new permission types if they're missing.
|
||||
* Migrates roles from old schema to new schema structure.
|
||||
* This can be called directly to fix existing roles.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
* @param {string} [roleName] - Optional specific role to migrate. If not provided, migrates all roles.
|
||||
* @returns {Promise<number>} Number of roles migrated.
|
||||
*/
|
||||
const initializeRoles = async function () {
|
||||
const defaultRoles = [SystemRoles.ADMIN, SystemRoles.USER];
|
||||
|
||||
for (const roleName of defaultRoles) {
|
||||
let role = await Role.findOne({ name: roleName });
|
||||
|
||||
if (!role) {
|
||||
// Create new role if it doesn't exist
|
||||
role = new Role(roleDefaults[roleName]);
|
||||
const migrateRoleSchema = async function (roleName) {
|
||||
try {
|
||||
// Get roles to migrate
|
||||
let roles;
|
||||
if (roleName) {
|
||||
const role = await Role.findOne({ name: roleName });
|
||||
roles = role ? [role] : [];
|
||||
} else {
|
||||
// Add missing permission types
|
||||
let isUpdated = false;
|
||||
for (const permType of Object.values(PermissionTypes)) {
|
||||
if (!role[permType]) {
|
||||
role[permType] = roleDefaults[roleName][permType];
|
||||
isUpdated = true;
|
||||
roles = await Role.find({});
|
||||
}
|
||||
|
||||
logger.info(`Migrating ${roles.length} roles to new schema structure`);
|
||||
let migratedCount = 0;
|
||||
|
||||
for (const role of roles) {
|
||||
const permissionTypes = Object.keys(permissionsSchema.shape || {});
|
||||
const unsetFields = {};
|
||||
let hasOldSchema = false;
|
||||
|
||||
// Check for old schema fields
|
||||
for (const permType of permissionTypes) {
|
||||
if (role[permType] && typeof role[permType] === 'object') {
|
||||
hasOldSchema = true;
|
||||
|
||||
// Ensure permissions object exists
|
||||
role.permissions = role.permissions || {};
|
||||
|
||||
// Migrate permissions from old location to new
|
||||
role.permissions[permType] = {
|
||||
...role.permissions[permType],
|
||||
...role[permType],
|
||||
};
|
||||
|
||||
// Mark field for removal
|
||||
unsetFields[permType] = 1;
|
||||
}
|
||||
}
|
||||
if (isUpdated) {
|
||||
await role.save();
|
||||
|
||||
if (hasOldSchema) {
|
||||
try {
|
||||
logger.info(`Migrating role '${role.name}' from old schema structure`);
|
||||
|
||||
// Simple update operation
|
||||
await Role.updateOne(
|
||||
{ _id: role._id },
|
||||
{
|
||||
$set: { permissions: role.permissions },
|
||||
$unset: unsetFields,
|
||||
},
|
||||
);
|
||||
|
||||
// Refresh cache
|
||||
const cache = getLogStores(CacheKeys.ROLES);
|
||||
const updatedRole = await Role.findById(role._id).lean().exec();
|
||||
await cache.set(role.name, updatedRole);
|
||||
|
||||
migratedCount++;
|
||||
logger.info(`Migrated role '${role.name}'`);
|
||||
} catch (error) {
|
||||
logger.error(`Failed to migrate role '${role.name}': ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
await role.save();
|
||||
|
||||
logger.info(`Migration complete: ${migratedCount} roles migrated`);
|
||||
return migratedCount;
|
||||
} catch (error) {
|
||||
logger.error(`Role schema migration failed: ${error.message}`);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Role,
|
||||
getRoleByName,
|
||||
initializeRoles,
|
||||
updateRoleByName,
|
||||
migrateRoleSchema,
|
||||
updateAccessPermissions,
|
||||
};
|
||||
|
||||
@@ -2,22 +2,23 @@ const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const {
|
||||
SystemRoles,
|
||||
PermissionTypes,
|
||||
roleDefaults,
|
||||
Permissions,
|
||||
roleDefaults,
|
||||
PermissionTypes,
|
||||
} = require('librechat-data-provider');
|
||||
const { updateAccessPermissions, initializeRoles } = require('~/models/Role');
|
||||
const { getRoleByName, updateAccessPermissions } = require('~/models/Role');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { Role } = require('~/models/Role');
|
||||
const { initializeRoles } = require('~/models');
|
||||
const { Role } = require('~/db/models');
|
||||
|
||||
// Mock the cache
|
||||
jest.mock('~/cache/getLogStores', () => {
|
||||
return jest.fn().mockReturnValue({
|
||||
jest.mock('~/cache/getLogStores', () =>
|
||||
jest.fn().mockReturnValue({
|
||||
get: jest.fn(),
|
||||
set: jest.fn(),
|
||||
del: jest.fn(),
|
||||
});
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
let mongoServer;
|
||||
|
||||
@@ -41,10 +42,12 @@ describe('updateAccessPermissions', () => {
|
||||
it('should update permissions when changes are needed', async () => {
|
||||
await new Role({
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
permissions: {
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
},
|
||||
},
|
||||
}).save();
|
||||
|
||||
@@ -56,8 +59,8 @@ describe('updateAccessPermissions', () => {
|
||||
},
|
||||
});
|
||||
|
||||
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
|
||||
const updatedRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: true,
|
||||
@@ -67,10 +70,12 @@ describe('updateAccessPermissions', () => {
|
||||
it('should not update permissions when no changes are needed', async () => {
|
||||
await new Role({
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
permissions: {
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
},
|
||||
},
|
||||
}).save();
|
||||
|
||||
@@ -82,8 +87,8 @@ describe('updateAccessPermissions', () => {
|
||||
},
|
||||
});
|
||||
|
||||
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
|
||||
const updatedRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
@@ -92,11 +97,8 @@ describe('updateAccessPermissions', () => {
|
||||
|
||||
it('should handle non-existent roles', async () => {
|
||||
await updateAccessPermissions('NON_EXISTENT_ROLE', {
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
},
|
||||
[PermissionTypes.PROMPTS]: { CREATE: true },
|
||||
});
|
||||
|
||||
const role = await Role.findOne({ name: 'NON_EXISTENT_ROLE' });
|
||||
expect(role).toBeNull();
|
||||
});
|
||||
@@ -104,21 +106,21 @@ describe('updateAccessPermissions', () => {
|
||||
it('should update only specified permissions', async () => {
|
||||
await new Role({
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
permissions: {
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
},
|
||||
},
|
||||
}).save();
|
||||
|
||||
await updateAccessPermissions(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
SHARED_GLOBAL: true,
|
||||
},
|
||||
[PermissionTypes.PROMPTS]: { SHARED_GLOBAL: true },
|
||||
});
|
||||
|
||||
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
|
||||
const updatedRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: true,
|
||||
@@ -128,21 +130,21 @@ describe('updateAccessPermissions', () => {
|
||||
it('should handle partial updates', async () => {
|
||||
await new Role({
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
permissions: {
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
},
|
||||
},
|
||||
}).save();
|
||||
|
||||
await updateAccessPermissions(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
USE: false,
|
||||
},
|
||||
[PermissionTypes.PROMPTS]: { USE: false },
|
||||
});
|
||||
|
||||
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
|
||||
const updatedRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
|
||||
CREATE: true,
|
||||
USE: false,
|
||||
SHARED_GLOBAL: false,
|
||||
@@ -152,13 +154,9 @@ describe('updateAccessPermissions', () => {
|
||||
it('should update multiple permission types at once', async () => {
|
||||
await new Role({
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
},
|
||||
[PermissionTypes.BOOKMARKS]: {
|
||||
USE: true,
|
||||
permissions: {
|
||||
[PermissionTypes.PROMPTS]: { CREATE: true, USE: true, SHARED_GLOBAL: false },
|
||||
[PermissionTypes.BOOKMARKS]: { USE: true },
|
||||
},
|
||||
}).save();
|
||||
|
||||
@@ -167,24 +165,20 @@ describe('updateAccessPermissions', () => {
|
||||
[PermissionTypes.BOOKMARKS]: { USE: false },
|
||||
});
|
||||
|
||||
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
|
||||
const updatedRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
|
||||
CREATE: true,
|
||||
USE: false,
|
||||
SHARED_GLOBAL: true,
|
||||
});
|
||||
expect(updatedRole[PermissionTypes.BOOKMARKS]).toEqual({
|
||||
USE: false,
|
||||
});
|
||||
expect(updatedRole.permissions[PermissionTypes.BOOKMARKS]).toEqual({ USE: false });
|
||||
});
|
||||
|
||||
it('should handle updates for a single permission type', async () => {
|
||||
await new Role({
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
permissions: {
|
||||
[PermissionTypes.PROMPTS]: { CREATE: true, USE: true, SHARED_GLOBAL: false },
|
||||
},
|
||||
}).save();
|
||||
|
||||
@@ -192,8 +186,8 @@ describe('updateAccessPermissions', () => {
|
||||
[PermissionTypes.PROMPTS]: { USE: false, SHARED_GLOBAL: true },
|
||||
});
|
||||
|
||||
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
|
||||
const updatedRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
|
||||
CREATE: true,
|
||||
USE: false,
|
||||
SHARED_GLOBAL: true,
|
||||
@@ -203,33 +197,25 @@ describe('updateAccessPermissions', () => {
|
||||
it('should update MULTI_CONVO permissions', async () => {
|
||||
await new Role({
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.MULTI_CONVO]: {
|
||||
USE: false,
|
||||
permissions: {
|
||||
[PermissionTypes.MULTI_CONVO]: { USE: false },
|
||||
},
|
||||
}).save();
|
||||
|
||||
await updateAccessPermissions(SystemRoles.USER, {
|
||||
[PermissionTypes.MULTI_CONVO]: {
|
||||
USE: true,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: { USE: true },
|
||||
});
|
||||
|
||||
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
expect(updatedRole[PermissionTypes.MULTI_CONVO]).toEqual({
|
||||
USE: true,
|
||||
});
|
||||
const updatedRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(updatedRole.permissions[PermissionTypes.MULTI_CONVO]).toEqual({ USE: true });
|
||||
});
|
||||
|
||||
it('should update MULTI_CONVO permissions along with other permission types', async () => {
|
||||
await new Role({
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: {
|
||||
USE: false,
|
||||
permissions: {
|
||||
[PermissionTypes.PROMPTS]: { CREATE: true, USE: true, SHARED_GLOBAL: false },
|
||||
[PermissionTypes.MULTI_CONVO]: { USE: false },
|
||||
},
|
||||
}).save();
|
||||
|
||||
@@ -238,35 +224,29 @@ describe('updateAccessPermissions', () => {
|
||||
[PermissionTypes.MULTI_CONVO]: { USE: true },
|
||||
});
|
||||
|
||||
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
|
||||
const updatedRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
|
||||
CREATE: true,
|
||||
USE: true,
|
||||
SHARED_GLOBAL: true,
|
||||
});
|
||||
expect(updatedRole[PermissionTypes.MULTI_CONVO]).toEqual({
|
||||
USE: true,
|
||||
});
|
||||
expect(updatedRole.permissions[PermissionTypes.MULTI_CONVO]).toEqual({ USE: true });
|
||||
});
|
||||
|
||||
it('should not update MULTI_CONVO permissions when no changes are needed', async () => {
|
||||
await new Role({
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.MULTI_CONVO]: {
|
||||
USE: true,
|
||||
permissions: {
|
||||
[PermissionTypes.MULTI_CONVO]: { USE: true },
|
||||
},
|
||||
}).save();
|
||||
|
||||
await updateAccessPermissions(SystemRoles.USER, {
|
||||
[PermissionTypes.MULTI_CONVO]: {
|
||||
USE: true,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: { USE: true },
|
||||
});
|
||||
|
||||
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
expect(updatedRole[PermissionTypes.MULTI_CONVO]).toEqual({
|
||||
USE: true,
|
||||
});
|
||||
const updatedRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(updatedRole.permissions[PermissionTypes.MULTI_CONVO]).toEqual({ USE: true });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -278,65 +258,69 @@ describe('initializeRoles', () => {
|
||||
it('should create default roles if they do not exist', async () => {
|
||||
await initializeRoles();
|
||||
|
||||
const adminRole = await Role.findOne({ name: SystemRoles.ADMIN }).lean();
|
||||
const userRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
const adminRole = await getRoleByName(SystemRoles.ADMIN);
|
||||
const userRole = await getRoleByName(SystemRoles.USER);
|
||||
|
||||
expect(adminRole).toBeTruthy();
|
||||
expect(userRole).toBeTruthy();
|
||||
|
||||
// Check if all permission types exist
|
||||
// Check if all permission types exist in the permissions field
|
||||
Object.values(PermissionTypes).forEach((permType) => {
|
||||
expect(adminRole[permType]).toBeDefined();
|
||||
expect(userRole[permType]).toBeDefined();
|
||||
expect(adminRole.permissions[permType]).toBeDefined();
|
||||
expect(userRole.permissions[permType]).toBeDefined();
|
||||
});
|
||||
|
||||
// Check if permissions match defaults (example for ADMIN role)
|
||||
expect(adminRole[PermissionTypes.PROMPTS].SHARED_GLOBAL).toBe(true);
|
||||
expect(adminRole[PermissionTypes.BOOKMARKS].USE).toBe(true);
|
||||
expect(adminRole[PermissionTypes.AGENTS].CREATE).toBe(true);
|
||||
// Example: Check default values for ADMIN role
|
||||
expect(adminRole.permissions[PermissionTypes.PROMPTS].SHARED_GLOBAL).toBe(true);
|
||||
expect(adminRole.permissions[PermissionTypes.BOOKMARKS].USE).toBe(true);
|
||||
expect(adminRole.permissions[PermissionTypes.AGENTS].CREATE).toBe(true);
|
||||
});
|
||||
|
||||
it('should not modify existing permissions for existing roles', async () => {
|
||||
const customUserRole = {
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
[Permissions.USE]: false,
|
||||
[Permissions.CREATE]: true,
|
||||
[Permissions.SHARED_GLOBAL]: true,
|
||||
},
|
||||
[PermissionTypes.BOOKMARKS]: {
|
||||
[Permissions.USE]: false,
|
||||
permissions: {
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
[Permissions.USE]: false,
|
||||
[Permissions.CREATE]: true,
|
||||
[Permissions.SHARED_GLOBAL]: true,
|
||||
},
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
|
||||
},
|
||||
};
|
||||
|
||||
await new Role(customUserRole).save();
|
||||
|
||||
await initializeRoles();
|
||||
|
||||
const userRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
|
||||
expect(userRole[PermissionTypes.PROMPTS]).toEqual(customUserRole[PermissionTypes.PROMPTS]);
|
||||
expect(userRole[PermissionTypes.BOOKMARKS]).toEqual(customUserRole[PermissionTypes.BOOKMARKS]);
|
||||
expect(userRole[PermissionTypes.AGENTS]).toBeDefined();
|
||||
const userRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(userRole.permissions[PermissionTypes.PROMPTS]).toEqual(
|
||||
customUserRole.permissions[PermissionTypes.PROMPTS],
|
||||
);
|
||||
expect(userRole.permissions[PermissionTypes.BOOKMARKS]).toEqual(
|
||||
customUserRole.permissions[PermissionTypes.BOOKMARKS],
|
||||
);
|
||||
expect(userRole.permissions[PermissionTypes.AGENTS]).toBeDefined();
|
||||
});
|
||||
|
||||
it('should add new permission types to existing roles', async () => {
|
||||
const partialUserRole = {
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.PROMPTS]: roleDefaults[SystemRoles.USER][PermissionTypes.PROMPTS],
|
||||
[PermissionTypes.BOOKMARKS]: roleDefaults[SystemRoles.USER][PermissionTypes.BOOKMARKS],
|
||||
permissions: {
|
||||
[PermissionTypes.PROMPTS]:
|
||||
roleDefaults[SystemRoles.USER].permissions[PermissionTypes.PROMPTS],
|
||||
[PermissionTypes.BOOKMARKS]:
|
||||
roleDefaults[SystemRoles.USER].permissions[PermissionTypes.BOOKMARKS],
|
||||
},
|
||||
};
|
||||
|
||||
await new Role(partialUserRole).save();
|
||||
|
||||
await initializeRoles();
|
||||
|
||||
const userRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
|
||||
expect(userRole[PermissionTypes.AGENTS]).toBeDefined();
|
||||
expect(userRole[PermissionTypes.AGENTS].CREATE).toBeDefined();
|
||||
expect(userRole[PermissionTypes.AGENTS].USE).toBeDefined();
|
||||
expect(userRole[PermissionTypes.AGENTS].SHARED_GLOBAL).toBeDefined();
|
||||
const userRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(userRole.permissions[PermissionTypes.AGENTS]).toBeDefined();
|
||||
expect(userRole.permissions[PermissionTypes.AGENTS].CREATE).toBeDefined();
|
||||
expect(userRole.permissions[PermissionTypes.AGENTS].USE).toBeDefined();
|
||||
expect(userRole.permissions[PermissionTypes.AGENTS].SHARED_GLOBAL).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle multiple runs without duplicating or modifying data', async () => {
|
||||
@@ -349,72 +333,73 @@ describe('initializeRoles', () => {
|
||||
expect(adminRoles).toHaveLength(1);
|
||||
expect(userRoles).toHaveLength(1);
|
||||
|
||||
const adminRole = adminRoles[0].toObject();
|
||||
const userRole = userRoles[0].toObject();
|
||||
|
||||
// Check if all permission types exist
|
||||
const adminPerms = adminRoles[0].toObject().permissions;
|
||||
const userPerms = userRoles[0].toObject().permissions;
|
||||
Object.values(PermissionTypes).forEach((permType) => {
|
||||
expect(adminRole[permType]).toBeDefined();
|
||||
expect(userRole[permType]).toBeDefined();
|
||||
expect(adminPerms[permType]).toBeDefined();
|
||||
expect(userPerms[permType]).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
it('should update roles with missing permission types from roleDefaults', async () => {
|
||||
const partialAdminRole = {
|
||||
name: SystemRoles.ADMIN,
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
[Permissions.USE]: false,
|
||||
[Permissions.CREATE]: false,
|
||||
[Permissions.SHARED_GLOBAL]: false,
|
||||
permissions: {
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
[Permissions.USE]: false,
|
||||
[Permissions.CREATE]: false,
|
||||
[Permissions.SHARED_GLOBAL]: false,
|
||||
},
|
||||
[PermissionTypes.BOOKMARKS]:
|
||||
roleDefaults[SystemRoles.ADMIN].permissions[PermissionTypes.BOOKMARKS],
|
||||
},
|
||||
[PermissionTypes.BOOKMARKS]: roleDefaults[SystemRoles.ADMIN][PermissionTypes.BOOKMARKS],
|
||||
};
|
||||
|
||||
await new Role(partialAdminRole).save();
|
||||
|
||||
await initializeRoles();
|
||||
|
||||
const adminRole = await Role.findOne({ name: SystemRoles.ADMIN }).lean();
|
||||
|
||||
expect(adminRole[PermissionTypes.PROMPTS]).toEqual(partialAdminRole[PermissionTypes.PROMPTS]);
|
||||
expect(adminRole[PermissionTypes.AGENTS]).toBeDefined();
|
||||
expect(adminRole[PermissionTypes.AGENTS].CREATE).toBeDefined();
|
||||
expect(adminRole[PermissionTypes.AGENTS].USE).toBeDefined();
|
||||
expect(adminRole[PermissionTypes.AGENTS].SHARED_GLOBAL).toBeDefined();
|
||||
const adminRole = await getRoleByName(SystemRoles.ADMIN);
|
||||
expect(adminRole.permissions[PermissionTypes.PROMPTS]).toEqual(
|
||||
partialAdminRole.permissions[PermissionTypes.PROMPTS],
|
||||
);
|
||||
expect(adminRole.permissions[PermissionTypes.AGENTS]).toBeDefined();
|
||||
expect(adminRole.permissions[PermissionTypes.AGENTS].CREATE).toBeDefined();
|
||||
expect(adminRole.permissions[PermissionTypes.AGENTS].USE).toBeDefined();
|
||||
expect(adminRole.permissions[PermissionTypes.AGENTS].SHARED_GLOBAL).toBeDefined();
|
||||
});
|
||||
|
||||
it('should include MULTI_CONVO permissions when creating default roles', async () => {
|
||||
await initializeRoles();
|
||||
|
||||
const adminRole = await Role.findOne({ name: SystemRoles.ADMIN }).lean();
|
||||
const userRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
const adminRole = await getRoleByName(SystemRoles.ADMIN);
|
||||
const userRole = await getRoleByName(SystemRoles.USER);
|
||||
|
||||
expect(adminRole[PermissionTypes.MULTI_CONVO]).toBeDefined();
|
||||
expect(userRole[PermissionTypes.MULTI_CONVO]).toBeDefined();
|
||||
|
||||
// Check if MULTI_CONVO permissions match defaults
|
||||
expect(adminRole[PermissionTypes.MULTI_CONVO].USE).toBe(
|
||||
roleDefaults[SystemRoles.ADMIN][PermissionTypes.MULTI_CONVO].USE,
|
||||
expect(adminRole.permissions[PermissionTypes.MULTI_CONVO]).toBeDefined();
|
||||
expect(userRole.permissions[PermissionTypes.MULTI_CONVO]).toBeDefined();
|
||||
expect(adminRole.permissions[PermissionTypes.MULTI_CONVO].USE).toBe(
|
||||
roleDefaults[SystemRoles.ADMIN].permissions[PermissionTypes.MULTI_CONVO].USE,
|
||||
);
|
||||
expect(userRole[PermissionTypes.MULTI_CONVO].USE).toBe(
|
||||
roleDefaults[SystemRoles.USER][PermissionTypes.MULTI_CONVO].USE,
|
||||
expect(userRole.permissions[PermissionTypes.MULTI_CONVO].USE).toBe(
|
||||
roleDefaults[SystemRoles.USER].permissions[PermissionTypes.MULTI_CONVO].USE,
|
||||
);
|
||||
});
|
||||
|
||||
it('should add MULTI_CONVO permissions to existing roles without them', async () => {
|
||||
const partialUserRole = {
|
||||
name: SystemRoles.USER,
|
||||
[PermissionTypes.PROMPTS]: roleDefaults[SystemRoles.USER][PermissionTypes.PROMPTS],
|
||||
[PermissionTypes.BOOKMARKS]: roleDefaults[SystemRoles.USER][PermissionTypes.BOOKMARKS],
|
||||
permissions: {
|
||||
[PermissionTypes.PROMPTS]:
|
||||
roleDefaults[SystemRoles.USER].permissions[PermissionTypes.PROMPTS],
|
||||
[PermissionTypes.BOOKMARKS]:
|
||||
roleDefaults[SystemRoles.USER].permissions[PermissionTypes.BOOKMARKS],
|
||||
},
|
||||
};
|
||||
|
||||
await new Role(partialUserRole).save();
|
||||
|
||||
await initializeRoles();
|
||||
|
||||
const userRole = await Role.findOne({ name: SystemRoles.USER }).lean();
|
||||
|
||||
expect(userRole[PermissionTypes.MULTI_CONVO]).toBeDefined();
|
||||
expect(userRole[PermissionTypes.MULTI_CONVO].USE).toBeDefined();
|
||||
const userRole = await getRoleByName(SystemRoles.USER);
|
||||
expect(userRole.permissions[PermissionTypes.MULTI_CONVO]).toBeDefined();
|
||||
expect(userRole.permissions[PermissionTypes.MULTI_CONVO].USE).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,275 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const signPayload = require('~/server/services/signPayload');
|
||||
const { hashToken } = require('~/server/utils/crypto');
|
||||
const { sessionSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Session = mongoose.model('Session', sessionSchema);
|
||||
|
||||
const { REFRESH_TOKEN_EXPIRY } = process.env ?? {};
|
||||
const expires = eval(REFRESH_TOKEN_EXPIRY) ?? 1000 * 60 * 60 * 24 * 7; // 7 days default
|
||||
|
||||
/**
|
||||
* Error class for Session-related errors
|
||||
*/
|
||||
class SessionError extends Error {
|
||||
constructor(message, code = 'SESSION_ERROR') {
|
||||
super(message);
|
||||
this.name = 'SessionError';
|
||||
this.code = code;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new session for a user
|
||||
* @param {string} userId - The ID of the user
|
||||
* @param {Object} options - Additional options for session creation
|
||||
* @param {Date} options.expiration - Custom expiration date
|
||||
* @returns {Promise<{session: Session, refreshToken: string}>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const createSession = async (userId, options = {}) => {
|
||||
if (!userId) {
|
||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
||||
}
|
||||
|
||||
try {
|
||||
const session = new Session({
|
||||
user: userId,
|
||||
expiration: options.expiration || new Date(Date.now() + expires),
|
||||
});
|
||||
const refreshToken = await generateRefreshToken(session);
|
||||
return { session, refreshToken };
|
||||
} catch (error) {
|
||||
logger.error('[createSession] Error creating session:', error);
|
||||
throw new SessionError('Failed to create session', 'CREATE_SESSION_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds a session by various parameters
|
||||
* @param {Object} params - Search parameters
|
||||
* @param {string} [params.refreshToken] - The refresh token to search by
|
||||
* @param {string} [params.userId] - The user ID to search by
|
||||
* @param {string} [params.sessionId] - The session ID to search by
|
||||
* @param {Object} [options] - Additional options
|
||||
* @param {boolean} [options.lean=true] - Whether to return plain objects instead of documents
|
||||
* @returns {Promise<Session|null>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const findSession = async (params, options = { lean: true }) => {
|
||||
try {
|
||||
const query = {};
|
||||
|
||||
if (!params.refreshToken && !params.userId && !params.sessionId) {
|
||||
throw new SessionError('At least one search parameter is required', 'INVALID_SEARCH_PARAMS');
|
||||
}
|
||||
|
||||
if (params.refreshToken) {
|
||||
const tokenHash = await hashToken(params.refreshToken);
|
||||
query.refreshTokenHash = tokenHash;
|
||||
}
|
||||
|
||||
if (params.userId) {
|
||||
query.user = params.userId;
|
||||
}
|
||||
|
||||
if (params.sessionId) {
|
||||
const sessionId = params.sessionId.sessionId || params.sessionId;
|
||||
if (!mongoose.Types.ObjectId.isValid(sessionId)) {
|
||||
throw new SessionError('Invalid session ID format', 'INVALID_SESSION_ID');
|
||||
}
|
||||
query._id = sessionId;
|
||||
}
|
||||
|
||||
// Add expiration check to only return valid sessions
|
||||
query.expiration = { $gt: new Date() };
|
||||
|
||||
const sessionQuery = Session.findOne(query);
|
||||
|
||||
if (options.lean) {
|
||||
return await sessionQuery.lean();
|
||||
}
|
||||
|
||||
return await sessionQuery.exec();
|
||||
} catch (error) {
|
||||
logger.error('[findSession] Error finding session:', error);
|
||||
throw new SessionError('Failed to find session', 'FIND_SESSION_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Updates session expiration
|
||||
* @param {Session|string} session - The session or session ID to update
|
||||
* @param {Date} [newExpiration] - Optional new expiration date
|
||||
* @returns {Promise<Session>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const updateExpiration = async (session, newExpiration) => {
|
||||
try {
|
||||
const sessionDoc = typeof session === 'string' ? await Session.findById(session) : session;
|
||||
|
||||
if (!sessionDoc) {
|
||||
throw new SessionError('Session not found', 'SESSION_NOT_FOUND');
|
||||
}
|
||||
|
||||
sessionDoc.expiration = newExpiration || new Date(Date.now() + expires);
|
||||
return await sessionDoc.save();
|
||||
} catch (error) {
|
||||
logger.error('[updateExpiration] Error updating session:', error);
|
||||
throw new SessionError('Failed to update session expiration', 'UPDATE_EXPIRATION_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes a session by refresh token or session ID
|
||||
* @param {Object} params - Delete parameters
|
||||
* @param {string} [params.refreshToken] - The refresh token of the session to delete
|
||||
* @param {string} [params.sessionId] - The ID of the session to delete
|
||||
* @returns {Promise<Object>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const deleteSession = async (params) => {
|
||||
try {
|
||||
if (!params.refreshToken && !params.sessionId) {
|
||||
throw new SessionError(
|
||||
'Either refreshToken or sessionId is required',
|
||||
'INVALID_DELETE_PARAMS',
|
||||
);
|
||||
}
|
||||
|
||||
const query = {};
|
||||
|
||||
if (params.refreshToken) {
|
||||
query.refreshTokenHash = await hashToken(params.refreshToken);
|
||||
}
|
||||
|
||||
if (params.sessionId) {
|
||||
query._id = params.sessionId;
|
||||
}
|
||||
|
||||
const result = await Session.deleteOne(query);
|
||||
|
||||
if (result.deletedCount === 0) {
|
||||
logger.warn('[deleteSession] No session found to delete');
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[deleteSession] Error deleting session:', error);
|
||||
throw new SessionError('Failed to delete session', 'DELETE_SESSION_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes all sessions for a user
|
||||
* @param {string} userId - The ID of the user
|
||||
* @param {Object} [options] - Additional options
|
||||
* @param {boolean} [options.excludeCurrentSession] - Whether to exclude the current session
|
||||
* @param {string} [options.currentSessionId] - The ID of the current session to exclude
|
||||
* @returns {Promise<Object>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const deleteAllUserSessions = async (userId, options = {}) => {
|
||||
try {
|
||||
if (!userId) {
|
||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
||||
}
|
||||
|
||||
// Extract userId if it's passed as an object
|
||||
const userIdString = userId.userId || userId;
|
||||
|
||||
if (!mongoose.Types.ObjectId.isValid(userIdString)) {
|
||||
throw new SessionError('Invalid user ID format', 'INVALID_USER_ID_FORMAT');
|
||||
}
|
||||
|
||||
const query = { user: userIdString };
|
||||
|
||||
if (options.excludeCurrentSession && options.currentSessionId) {
|
||||
query._id = { $ne: options.currentSessionId };
|
||||
}
|
||||
|
||||
const result = await Session.deleteMany(query);
|
||||
|
||||
if (result.deletedCount > 0) {
|
||||
logger.debug(
|
||||
`[deleteAllUserSessions] Deleted ${result.deletedCount} sessions for user ${userIdString}.`,
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[deleteAllUserSessions] Error deleting user sessions:', error);
|
||||
throw new SessionError('Failed to delete user sessions', 'DELETE_ALL_SESSIONS_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates a refresh token for a session
|
||||
* @param {Session} session - The session to generate a token for
|
||||
* @returns {Promise<string>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const generateRefreshToken = async (session) => {
|
||||
if (!session || !session.user) {
|
||||
throw new SessionError('Invalid session object', 'INVALID_SESSION');
|
||||
}
|
||||
|
||||
try {
|
||||
const expiresIn = session.expiration ? session.expiration.getTime() : Date.now() + expires;
|
||||
|
||||
if (!session.expiration) {
|
||||
session.expiration = new Date(expiresIn);
|
||||
}
|
||||
|
||||
const refreshToken = await signPayload({
|
||||
payload: {
|
||||
id: session.user,
|
||||
sessionId: session._id,
|
||||
},
|
||||
secret: process.env.JWT_REFRESH_SECRET,
|
||||
expirationTime: Math.floor((expiresIn - Date.now()) / 1000),
|
||||
});
|
||||
|
||||
session.refreshTokenHash = await hashToken(refreshToken);
|
||||
await session.save();
|
||||
|
||||
return refreshToken;
|
||||
} catch (error) {
|
||||
logger.error('[generateRefreshToken] Error generating refresh token:', error);
|
||||
throw new SessionError('Failed to generate refresh token', 'GENERATE_TOKEN_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Counts active sessions for a user
|
||||
* @param {string} userId - The ID of the user
|
||||
* @returns {Promise<number>}
|
||||
* @throws {SessionError}
|
||||
*/
|
||||
const countActiveSessions = async (userId) => {
|
||||
try {
|
||||
if (!userId) {
|
||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
||||
}
|
||||
|
||||
return await Session.countDocuments({
|
||||
user: userId,
|
||||
expiration: { $gt: new Date() },
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[countActiveSessions] Error counting active sessions:', error);
|
||||
throw new SessionError('Failed to count active sessions', 'COUNT_SESSIONS_FAILED');
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
createSession,
|
||||
findSession,
|
||||
updateExpiration,
|
||||
deleteSession,
|
||||
deleteAllUserSessions,
|
||||
generateRefreshToken,
|
||||
countActiveSessions,
|
||||
SessionError,
|
||||
};
|
||||
@@ -1,342 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { nanoid } = require('nanoid');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { Conversation } = require('~/models/Conversation');
|
||||
const { shareSchema } = require('@librechat/data-schemas');
|
||||
const SharedLink = mongoose.model('SharedLink', shareSchema);
|
||||
const { getMessages } = require('./Message');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
class ShareServiceError extends Error {
|
||||
constructor(message, code) {
|
||||
super(message);
|
||||
this.name = 'ShareServiceError';
|
||||
this.code = code;
|
||||
}
|
||||
}
|
||||
|
||||
const memoizedAnonymizeId = (prefix) => {
|
||||
const memo = new Map();
|
||||
return (id) => {
|
||||
if (!memo.has(id)) {
|
||||
memo.set(id, `${prefix}_${nanoid()}`);
|
||||
}
|
||||
return memo.get(id);
|
||||
};
|
||||
};
|
||||
|
||||
const anonymizeConvoId = memoizedAnonymizeId('convo');
|
||||
const anonymizeAssistantId = memoizedAnonymizeId('a');
|
||||
const anonymizeMessageId = (id) =>
|
||||
id === Constants.NO_PARENT ? id : memoizedAnonymizeId('msg')(id);
|
||||
|
||||
function anonymizeConvo(conversation) {
|
||||
if (!conversation) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const newConvo = { ...conversation };
|
||||
if (newConvo.assistant_id) {
|
||||
newConvo.assistant_id = anonymizeAssistantId(newConvo.assistant_id);
|
||||
}
|
||||
return newConvo;
|
||||
}
|
||||
|
||||
function anonymizeMessages(messages, newConvoId) {
|
||||
if (!Array.isArray(messages)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const idMap = new Map();
|
||||
return messages.map((message) => {
|
||||
const newMessageId = anonymizeMessageId(message.messageId);
|
||||
idMap.set(message.messageId, newMessageId);
|
||||
|
||||
return {
|
||||
...message,
|
||||
messageId: newMessageId,
|
||||
parentMessageId:
|
||||
idMap.get(message.parentMessageId) || anonymizeMessageId(message.parentMessageId),
|
||||
conversationId: newConvoId,
|
||||
model: message.model?.startsWith('asst_')
|
||||
? anonymizeAssistantId(message.model)
|
||||
: message.model,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function getSharedMessages(shareId) {
|
||||
try {
|
||||
const share = await SharedLink.findOne({ shareId, isPublic: true })
|
||||
.populate({
|
||||
path: 'messages',
|
||||
select: '-_id -__v -user',
|
||||
})
|
||||
.select('-_id -__v -user')
|
||||
.lean();
|
||||
|
||||
if (!share?.conversationId || !share.isPublic) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const newConvoId = anonymizeConvoId(share.conversationId);
|
||||
const result = {
|
||||
...share,
|
||||
conversationId: newConvoId,
|
||||
messages: anonymizeMessages(share.messages, newConvoId),
|
||||
};
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[getShare] Error getting share link', {
|
||||
error: error.message,
|
||||
shareId,
|
||||
});
|
||||
throw new ShareServiceError('Error getting share link', 'SHARE_FETCH_ERROR');
|
||||
}
|
||||
}
|
||||
|
||||
async function getSharedLinks(user, pageParam, pageSize, isPublic, sortBy, sortDirection, search) {
|
||||
try {
|
||||
const query = { user, isPublic };
|
||||
|
||||
if (pageParam) {
|
||||
if (sortDirection === 'desc') {
|
||||
query[sortBy] = { $lt: pageParam };
|
||||
} else {
|
||||
query[sortBy] = { $gt: pageParam };
|
||||
}
|
||||
}
|
||||
|
||||
if (search && search.trim()) {
|
||||
try {
|
||||
const searchResults = await Conversation.meiliSearch(search);
|
||||
|
||||
if (!searchResults?.hits?.length) {
|
||||
return {
|
||||
links: [],
|
||||
nextCursor: undefined,
|
||||
hasNextPage: false,
|
||||
};
|
||||
}
|
||||
|
||||
const conversationIds = searchResults.hits.map((hit) => hit.conversationId);
|
||||
query['conversationId'] = { $in: conversationIds };
|
||||
} catch (searchError) {
|
||||
logger.error('[getSharedLinks] Meilisearch error', {
|
||||
error: searchError.message,
|
||||
user,
|
||||
});
|
||||
return {
|
||||
links: [],
|
||||
nextCursor: undefined,
|
||||
hasNextPage: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const sort = {};
|
||||
sort[sortBy] = sortDirection === 'desc' ? -1 : 1;
|
||||
|
||||
if (Array.isArray(query.conversationId)) {
|
||||
query.conversationId = { $in: query.conversationId };
|
||||
}
|
||||
|
||||
const sharedLinks = await SharedLink.find(query)
|
||||
.sort(sort)
|
||||
.limit(pageSize + 1)
|
||||
.select('-__v -user')
|
||||
.lean();
|
||||
|
||||
const hasNextPage = sharedLinks.length > pageSize;
|
||||
const links = sharedLinks.slice(0, pageSize);
|
||||
|
||||
const nextCursor = hasNextPage ? links[links.length - 1][sortBy] : undefined;
|
||||
|
||||
return {
|
||||
links: links.map((link) => ({
|
||||
shareId: link.shareId,
|
||||
title: link?.title || 'Untitled',
|
||||
isPublic: link.isPublic,
|
||||
createdAt: link.createdAt,
|
||||
conversationId: link.conversationId,
|
||||
})),
|
||||
nextCursor,
|
||||
hasNextPage,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('[getSharedLinks] Error getting shares', {
|
||||
error: error.message,
|
||||
user,
|
||||
});
|
||||
throw new ShareServiceError('Error getting shares', 'SHARES_FETCH_ERROR');
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteAllSharedLinks(user) {
|
||||
try {
|
||||
const result = await SharedLink.deleteMany({ user });
|
||||
return {
|
||||
message: 'All shared links deleted successfully',
|
||||
deletedCount: result.deletedCount,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('[deleteAllSharedLinks] Error deleting shared links', {
|
||||
error: error.message,
|
||||
user,
|
||||
});
|
||||
throw new ShareServiceError('Error deleting shared links', 'BULK_DELETE_ERROR');
|
||||
}
|
||||
}
|
||||
|
||||
async function createSharedLink(user, conversationId) {
|
||||
if (!user || !conversationId) {
|
||||
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
|
||||
}
|
||||
|
||||
try {
|
||||
const [existingShare, conversationMessages] = await Promise.all([
|
||||
SharedLink.findOne({ conversationId, isPublic: true }).select('-_id -__v -user').lean(),
|
||||
getMessages({ conversationId }),
|
||||
]);
|
||||
|
||||
if (existingShare && existingShare.isPublic) {
|
||||
throw new ShareServiceError('Share already exists', 'SHARE_EXISTS');
|
||||
} else if (existingShare) {
|
||||
await SharedLink.deleteOne({ conversationId });
|
||||
}
|
||||
|
||||
const conversation = await Conversation.findOne({ conversationId }).lean();
|
||||
const title = conversation?.title || 'Untitled';
|
||||
|
||||
const shareId = nanoid();
|
||||
await SharedLink.create({
|
||||
shareId,
|
||||
conversationId,
|
||||
messages: conversationMessages,
|
||||
title,
|
||||
user,
|
||||
});
|
||||
|
||||
return { shareId, conversationId };
|
||||
} catch (error) {
|
||||
logger.error('[createSharedLink] Error creating shared link', {
|
||||
error: error.message,
|
||||
user,
|
||||
conversationId,
|
||||
});
|
||||
throw new ShareServiceError('Error creating shared link', 'SHARE_CREATE_ERROR');
|
||||
}
|
||||
}
|
||||
|
||||
async function getSharedLink(user, conversationId) {
|
||||
if (!user || !conversationId) {
|
||||
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
|
||||
}
|
||||
|
||||
try {
|
||||
const share = await SharedLink.findOne({ conversationId, user, isPublic: true })
|
||||
.select('shareId -_id')
|
||||
.lean();
|
||||
|
||||
if (!share) {
|
||||
return { shareId: null, success: false };
|
||||
}
|
||||
|
||||
return { shareId: share.shareId, success: true };
|
||||
} catch (error) {
|
||||
logger.error('[getSharedLink] Error getting shared link', {
|
||||
error: error.message,
|
||||
user,
|
||||
conversationId,
|
||||
});
|
||||
throw new ShareServiceError('Error getting shared link', 'SHARE_FETCH_ERROR');
|
||||
}
|
||||
}
|
||||
|
||||
async function updateSharedLink(user, shareId) {
|
||||
if (!user || !shareId) {
|
||||
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
|
||||
}
|
||||
|
||||
try {
|
||||
const share = await SharedLink.findOne({ shareId }).select('-_id -__v -user').lean();
|
||||
|
||||
if (!share) {
|
||||
throw new ShareServiceError('Share not found', 'SHARE_NOT_FOUND');
|
||||
}
|
||||
|
||||
const [updatedMessages] = await Promise.all([
|
||||
getMessages({ conversationId: share.conversationId }),
|
||||
]);
|
||||
|
||||
const newShareId = nanoid();
|
||||
const update = {
|
||||
messages: updatedMessages,
|
||||
user,
|
||||
shareId: newShareId,
|
||||
};
|
||||
|
||||
const updatedShare = await SharedLink.findOneAndUpdate({ shareId, user }, update, {
|
||||
new: true,
|
||||
upsert: false,
|
||||
runValidators: true,
|
||||
}).lean();
|
||||
|
||||
if (!updatedShare) {
|
||||
throw new ShareServiceError('Share update failed', 'SHARE_UPDATE_ERROR');
|
||||
}
|
||||
|
||||
anonymizeConvo(updatedShare);
|
||||
|
||||
return { shareId: newShareId, conversationId: updatedShare.conversationId };
|
||||
} catch (error) {
|
||||
logger.error('[updateSharedLink] Error updating shared link', {
|
||||
error: error.message,
|
||||
user,
|
||||
shareId,
|
||||
});
|
||||
throw new ShareServiceError(
|
||||
error.code === 'SHARE_UPDATE_ERROR' ? error.message : 'Error updating shared link',
|
||||
error.code || 'SHARE_UPDATE_ERROR',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteSharedLink(user, shareId) {
|
||||
if (!user || !shareId) {
|
||||
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await SharedLink.findOneAndDelete({ shareId, user }).lean();
|
||||
|
||||
if (!result) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
shareId,
|
||||
message: 'Share deleted successfully',
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('[deleteSharedLink] Error deleting shared link', {
|
||||
error: error.message,
|
||||
user,
|
||||
shareId,
|
||||
});
|
||||
throw new ShareServiceError('Error deleting shared link', 'SHARE_DELETE_ERROR');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
SharedLink,
|
||||
getSharedLink,
|
||||
getSharedLinks,
|
||||
createSharedLink,
|
||||
updateSharedLink,
|
||||
deleteSharedLink,
|
||||
getSharedMessages,
|
||||
deleteAllSharedLinks,
|
||||
};
|
||||
@@ -1,199 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { encryptV2 } = require('~/server/utils/crypto');
|
||||
const { tokenSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Token model.
|
||||
* @type {mongoose.Model}
|
||||
*/
|
||||
const Token = mongoose.model('Token', tokenSchema);
|
||||
/**
|
||||
* Fixes the indexes for the Token collection from legacy TTL indexes to the new expiresAt index.
|
||||
*/
|
||||
async function fixIndexes() {
|
||||
try {
|
||||
if (
|
||||
process.env.NODE_ENV === 'CI' ||
|
||||
process.env.NODE_ENV === 'development' ||
|
||||
process.env.NODE_ENV === 'test'
|
||||
) {
|
||||
return;
|
||||
}
|
||||
const indexes = await Token.collection.indexes();
|
||||
logger.debug('Existing Token Indexes:', JSON.stringify(indexes, null, 2));
|
||||
const unwantedTTLIndexes = indexes.filter(
|
||||
(index) => index.key.createdAt === 1 && index.expireAfterSeconds !== undefined,
|
||||
);
|
||||
if (unwantedTTLIndexes.length === 0) {
|
||||
logger.debug('No unwanted Token indexes found.');
|
||||
return;
|
||||
}
|
||||
for (const index of unwantedTTLIndexes) {
|
||||
logger.debug(`Dropping unwanted Token index: ${index.name}`);
|
||||
await Token.collection.dropIndex(index.name);
|
||||
logger.debug(`Dropped Token index: ${index.name}`);
|
||||
}
|
||||
logger.debug('Token index cleanup completed successfully.');
|
||||
} catch (error) {
|
||||
logger.error('An error occurred while fixing Token indexes:', error);
|
||||
}
|
||||
}
|
||||
|
||||
fixIndexes();
|
||||
|
||||
/**
|
||||
* Creates a new Token instance.
|
||||
* @param {Object} tokenData - The data for the new Token.
|
||||
* @param {mongoose.Types.ObjectId} tokenData.userId - The user's ID. It is required.
|
||||
* @param {String} tokenData.email - The user's email.
|
||||
* @param {String} tokenData.token - The token. It is required.
|
||||
* @param {Number} tokenData.expiresIn - The number of seconds until the token expires.
|
||||
* @returns {Promise<mongoose.Document>} The new Token instance.
|
||||
* @throws Will throw an error if token creation fails.
|
||||
*/
|
||||
async function createToken(tokenData) {
|
||||
try {
|
||||
const currentTime = new Date();
|
||||
const expiresAt = new Date(currentTime.getTime() + tokenData.expiresIn * 1000);
|
||||
|
||||
const newTokenData = {
|
||||
...tokenData,
|
||||
createdAt: currentTime,
|
||||
expiresAt,
|
||||
};
|
||||
|
||||
return await Token.create(newTokenData);
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while creating token:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds a Token document that matches the provided query.
|
||||
* @param {Object} query - The query to match against.
|
||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
||||
* @param {String} query.token - The token value.
|
||||
* @param {String} [query.email] - The email of the user.
|
||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
||||
* @returns {Promise<Object|null>} The matched Token document, or null if not found.
|
||||
* @throws Will throw an error if the find operation fails.
|
||||
*/
|
||||
async function findToken(query) {
|
||||
try {
|
||||
const conditions = [];
|
||||
|
||||
if (query.userId) {
|
||||
conditions.push({ userId: query.userId });
|
||||
}
|
||||
if (query.token) {
|
||||
conditions.push({ token: query.token });
|
||||
}
|
||||
if (query.email) {
|
||||
conditions.push({ email: query.email });
|
||||
}
|
||||
if (query.identifier) {
|
||||
conditions.push({ identifier: query.identifier });
|
||||
}
|
||||
|
||||
const token = await Token.findOne({
|
||||
$and: conditions,
|
||||
}).lean();
|
||||
|
||||
return token;
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while finding token:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Token document that matches the provided query.
|
||||
* @param {Object} query - The query to match against.
|
||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
||||
* @param {String} query.token - The token value.
|
||||
* @param {String} [query.email] - The email of the user.
|
||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
||||
* @param {Object} updateData - The data to update the Token with.
|
||||
* @returns {Promise<mongoose.Document|null>} The updated Token document, or null if not found.
|
||||
* @throws Will throw an error if the update operation fails.
|
||||
*/
|
||||
async function updateToken(query, updateData) {
|
||||
try {
|
||||
return await Token.findOneAndUpdate(query, updateData, { new: true });
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while updating token:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes all Token documents that match the provided token, user ID, or email.
|
||||
* @param {Object} query - The query to match against.
|
||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
||||
* @param {String} query.token - The token value.
|
||||
* @param {String} [query.email] - The email of the user.
|
||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
||||
* @returns {Promise<Object>} The result of the delete operation.
|
||||
* @throws Will throw an error if the delete operation fails.
|
||||
*/
|
||||
async function deleteTokens(query) {
|
||||
try {
|
||||
return await Token.deleteMany({
|
||||
$or: [
|
||||
{ userId: query.userId },
|
||||
{ token: query.token },
|
||||
{ email: query.email },
|
||||
{ identifier: query.identifier },
|
||||
],
|
||||
});
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while deleting tokens:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles the OAuth token by creating or updating the token.
|
||||
* @param {object} fields
|
||||
* @param {string} fields.userId - The user's ID.
|
||||
* @param {string} fields.token - The full token to store.
|
||||
* @param {string} fields.identifier - Unique, alternative identifier for the token.
|
||||
* @param {number} fields.expiresIn - The number of seconds until the token expires.
|
||||
* @param {object} fields.metadata - Additional metadata to store with the token.
|
||||
* @param {string} [fields.type="oauth"] - The type of token. Default is 'oauth'.
|
||||
*/
|
||||
async function handleOAuthToken({
|
||||
token,
|
||||
userId,
|
||||
identifier,
|
||||
expiresIn,
|
||||
metadata,
|
||||
type = 'oauth',
|
||||
}) {
|
||||
const encrypedToken = await encryptV2(token);
|
||||
const tokenData = {
|
||||
type,
|
||||
userId,
|
||||
metadata,
|
||||
identifier,
|
||||
token: encrypedToken,
|
||||
expiresIn: parseInt(expiresIn, 10) || 3600,
|
||||
};
|
||||
|
||||
const existingToken = await findToken({ userId, identifier });
|
||||
if (existingToken) {
|
||||
return await updateToken({ identifier }, tokenData);
|
||||
} else {
|
||||
return await createToken(tokenData);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
findToken,
|
||||
createToken,
|
||||
updateToken,
|
||||
deleteTokens,
|
||||
handleOAuthToken,
|
||||
};
|
||||
@@ -1,6 +1,4 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { toolCallSchema } = require('@librechat/data-schemas');
|
||||
const ToolCall = mongoose.model('ToolCall', toolCallSchema);
|
||||
const { ToolCall } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* Create a new tool call
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { transactionSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||
const { logger } = require('~/config');
|
||||
const Balance = require('./Balance');
|
||||
const { Transaction, Balance } = require('~/db/models');
|
||||
|
||||
const cancelRate = 1.15;
|
||||
|
||||
@@ -140,19 +138,19 @@ const updateBalance = async ({ user, incrementValue, setValues }) => {
|
||||
};
|
||||
|
||||
/** Method to calculate and set the tokenValue for a transaction */
|
||||
transactionSchema.methods.calculateTokenValue = function () {
|
||||
if (!this.valueKey || !this.tokenType) {
|
||||
this.tokenValue = this.rawAmount;
|
||||
function calculateTokenValue(txn) {
|
||||
if (!txn.valueKey || !txn.tokenType) {
|
||||
txn.tokenValue = txn.rawAmount;
|
||||
}
|
||||
const { valueKey, tokenType, model, endpointTokenConfig } = this;
|
||||
const { valueKey, tokenType, model, endpointTokenConfig } = txn;
|
||||
const multiplier = Math.abs(getMultiplier({ valueKey, tokenType, model, endpointTokenConfig }));
|
||||
this.rate = multiplier;
|
||||
this.tokenValue = this.rawAmount * multiplier;
|
||||
if (this.context && this.tokenType === 'completion' && this.context === 'incomplete') {
|
||||
this.tokenValue = Math.ceil(this.tokenValue * cancelRate);
|
||||
this.rate *= cancelRate;
|
||||
txn.rate = multiplier;
|
||||
txn.tokenValue = txn.rawAmount * multiplier;
|
||||
if (txn.context && txn.tokenType === 'completion' && txn.context === 'incomplete') {
|
||||
txn.tokenValue = Math.ceil(txn.tokenValue * cancelRate);
|
||||
txn.rate *= cancelRate;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* New static method to create an auto-refill transaction that does NOT trigger a balance update.
|
||||
@@ -163,13 +161,13 @@ transactionSchema.methods.calculateTokenValue = function () {
|
||||
* @param {number} txData.rawAmount - The raw amount of tokens.
|
||||
* @returns {Promise<object>} - The created transaction.
|
||||
*/
|
||||
transactionSchema.statics.createAutoRefillTransaction = async function (txData) {
|
||||
async function createAutoRefillTransaction(txData) {
|
||||
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
||||
return;
|
||||
}
|
||||
const transaction = new this(txData);
|
||||
const transaction = new Transaction(txData);
|
||||
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
||||
transaction.calculateTokenValue();
|
||||
calculateTokenValue(transaction);
|
||||
await transaction.save();
|
||||
|
||||
const balanceResponse = await updateBalance({
|
||||
@@ -185,21 +183,20 @@ transactionSchema.statics.createAutoRefillTransaction = async function (txData)
|
||||
logger.debug('[Balance.check] Auto-refill performed', result);
|
||||
result.transaction = transaction;
|
||||
return result;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method to create a transaction and update the balance
|
||||
* @param {txData} txData - Transaction data.
|
||||
*/
|
||||
transactionSchema.statics.create = async function (txData) {
|
||||
const Transaction = this;
|
||||
async function createTransaction(txData) {
|
||||
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const transaction = new Transaction(txData);
|
||||
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
||||
transaction.calculateTokenValue();
|
||||
calculateTokenValue(transaction);
|
||||
|
||||
await transaction.save();
|
||||
|
||||
@@ -209,7 +206,6 @@ transactionSchema.statics.create = async function (txData) {
|
||||
}
|
||||
|
||||
let incrementValue = transaction.tokenValue;
|
||||
|
||||
const balanceResponse = await updateBalance({
|
||||
user: transaction.user,
|
||||
incrementValue,
|
||||
@@ -221,21 +217,19 @@ transactionSchema.statics.create = async function (txData) {
|
||||
balance: balanceResponse.tokenCredits,
|
||||
[transaction.tokenType]: incrementValue,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method to create a structured transaction and update the balance
|
||||
* @param {txData} txData - Transaction data.
|
||||
*/
|
||||
transactionSchema.statics.createStructured = async function (txData) {
|
||||
const Transaction = this;
|
||||
|
||||
async function createStructuredTransaction(txData) {
|
||||
const transaction = new Transaction({
|
||||
...txData,
|
||||
endpointTokenConfig: txData.endpointTokenConfig,
|
||||
});
|
||||
|
||||
transaction.calculateStructuredTokenValue();
|
||||
calculateStructuredTokenValue(transaction);
|
||||
|
||||
await transaction.save();
|
||||
|
||||
@@ -257,71 +251,69 @@ transactionSchema.statics.createStructured = async function (txData) {
|
||||
balance: balanceResponse.tokenCredits,
|
||||
[transaction.tokenType]: incrementValue,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/** Method to calculate token value for structured tokens */
|
||||
transactionSchema.methods.calculateStructuredTokenValue = function () {
|
||||
if (!this.tokenType) {
|
||||
this.tokenValue = this.rawAmount;
|
||||
function calculateStructuredTokenValue(txn) {
|
||||
if (!txn.tokenType) {
|
||||
txn.tokenValue = txn.rawAmount;
|
||||
return;
|
||||
}
|
||||
|
||||
const { model, endpointTokenConfig } = this;
|
||||
const { model, endpointTokenConfig } = txn;
|
||||
|
||||
if (this.tokenType === 'prompt') {
|
||||
if (txn.tokenType === 'prompt') {
|
||||
const inputMultiplier = getMultiplier({ tokenType: 'prompt', model, endpointTokenConfig });
|
||||
const writeMultiplier =
|
||||
getCacheMultiplier({ cacheType: 'write', model, endpointTokenConfig }) ?? inputMultiplier;
|
||||
const readMultiplier =
|
||||
getCacheMultiplier({ cacheType: 'read', model, endpointTokenConfig }) ?? inputMultiplier;
|
||||
|
||||
this.rateDetail = {
|
||||
txn.rateDetail = {
|
||||
input: inputMultiplier,
|
||||
write: writeMultiplier,
|
||||
read: readMultiplier,
|
||||
};
|
||||
|
||||
const totalPromptTokens =
|
||||
Math.abs(this.inputTokens || 0) +
|
||||
Math.abs(this.writeTokens || 0) +
|
||||
Math.abs(this.readTokens || 0);
|
||||
Math.abs(txn.inputTokens || 0) +
|
||||
Math.abs(txn.writeTokens || 0) +
|
||||
Math.abs(txn.readTokens || 0);
|
||||
|
||||
if (totalPromptTokens > 0) {
|
||||
this.rate =
|
||||
(Math.abs(inputMultiplier * (this.inputTokens || 0)) +
|
||||
Math.abs(writeMultiplier * (this.writeTokens || 0)) +
|
||||
Math.abs(readMultiplier * (this.readTokens || 0))) /
|
||||
txn.rate =
|
||||
(Math.abs(inputMultiplier * (txn.inputTokens || 0)) +
|
||||
Math.abs(writeMultiplier * (txn.writeTokens || 0)) +
|
||||
Math.abs(readMultiplier * (txn.readTokens || 0))) /
|
||||
totalPromptTokens;
|
||||
} else {
|
||||
this.rate = Math.abs(inputMultiplier); // Default to input rate if no tokens
|
||||
txn.rate = Math.abs(inputMultiplier); // Default to input rate if no tokens
|
||||
}
|
||||
|
||||
this.tokenValue = -(
|
||||
Math.abs(this.inputTokens || 0) * inputMultiplier +
|
||||
Math.abs(this.writeTokens || 0) * writeMultiplier +
|
||||
Math.abs(this.readTokens || 0) * readMultiplier
|
||||
txn.tokenValue = -(
|
||||
Math.abs(txn.inputTokens || 0) * inputMultiplier +
|
||||
Math.abs(txn.writeTokens || 0) * writeMultiplier +
|
||||
Math.abs(txn.readTokens || 0) * readMultiplier
|
||||
);
|
||||
|
||||
this.rawAmount = -totalPromptTokens;
|
||||
} else if (this.tokenType === 'completion') {
|
||||
const multiplier = getMultiplier({ tokenType: this.tokenType, model, endpointTokenConfig });
|
||||
this.rate = Math.abs(multiplier);
|
||||
this.tokenValue = -Math.abs(this.rawAmount) * multiplier;
|
||||
this.rawAmount = -Math.abs(this.rawAmount);
|
||||
txn.rawAmount = -totalPromptTokens;
|
||||
} else if (txn.tokenType === 'completion') {
|
||||
const multiplier = getMultiplier({ tokenType: txn.tokenType, model, endpointTokenConfig });
|
||||
txn.rate = Math.abs(multiplier);
|
||||
txn.tokenValue = -Math.abs(txn.rawAmount) * multiplier;
|
||||
txn.rawAmount = -Math.abs(txn.rawAmount);
|
||||
}
|
||||
|
||||
if (this.context && this.tokenType === 'completion' && this.context === 'incomplete') {
|
||||
this.tokenValue = Math.ceil(this.tokenValue * cancelRate);
|
||||
this.rate *= cancelRate;
|
||||
if (this.rateDetail) {
|
||||
this.rateDetail = Object.fromEntries(
|
||||
Object.entries(this.rateDetail).map(([k, v]) => [k, v * cancelRate]),
|
||||
if (txn.context && txn.tokenType === 'completion' && txn.context === 'incomplete') {
|
||||
txn.tokenValue = Math.ceil(txn.tokenValue * cancelRate);
|
||||
txn.rate *= cancelRate;
|
||||
if (txn.rateDetail) {
|
||||
txn.rateDetail = Object.fromEntries(
|
||||
Object.entries(txn.rateDetail).map(([k, v]) => [k, v * cancelRate]),
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const Transaction = mongoose.model('Transaction', transactionSchema);
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries and retrieves transactions based on a given filter.
|
||||
@@ -340,4 +332,9 @@ async function getTransactions(filter) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { Transaction, getTransactions };
|
||||
module.exports = {
|
||||
getTransactions,
|
||||
createTransaction,
|
||||
createAutoRefillTransaction,
|
||||
createStructuredTransaction,
|
||||
};
|
||||
|
||||
@@ -3,14 +3,13 @@ const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||
const { Transaction } = require('./Transaction');
|
||||
const Balance = require('./Balance');
|
||||
const { createTransaction } = require('./Transaction');
|
||||
const { Balance } = require('~/db/models');
|
||||
|
||||
// Mock the custom config module so we can control the balance flag.
|
||||
jest.mock('~/server/services/Config');
|
||||
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
@@ -368,7 +367,7 @@ describe('NaN Handling Tests', () => {
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await Transaction.create(txData);
|
||||
const result = await createTransaction(txData);
|
||||
|
||||
// Assert: No transaction should be created and balance remains unchanged.
|
||||
expect(result).toBeUndefined();
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { userSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const User = mongoose.model('User', userSchema);
|
||||
|
||||
module.exports = User;
|
||||
@@ -1,9 +1,9 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { Transaction } = require('./Transaction');
|
||||
const { createAutoRefillTransaction } = require('./Transaction');
|
||||
const { logViolation } = require('~/cache');
|
||||
const { getMultiplier } = require('./tx');
|
||||
const { logger } = require('~/config');
|
||||
const Balance = require('./Balance');
|
||||
const { Balance } = require('~/db/models');
|
||||
|
||||
function isInvalidDate(date) {
|
||||
return isNaN(date);
|
||||
@@ -60,7 +60,7 @@ const checkBalanceRecord = async function ({
|
||||
) {
|
||||
try {
|
||||
/** @type {{ rate: number, user: string, balance: number, transaction: import('@librechat/data-schemas').ITransaction}} */
|
||||
const result = await Transaction.createAutoRefillTransaction({
|
||||
const result = await createAutoRefillTransaction({
|
||||
user: user,
|
||||
tokenType: 'credits',
|
||||
context: 'autoRefill',
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { Message, getMessages, bulkSaveMessages } = require('./Message');
|
||||
const { getMessages, bulkSaveMessages } = require('./Message');
|
||||
const { Message } = require('~/db/models');
|
||||
|
||||
// Original version of buildTree function
|
||||
function buildTree({ messages, fileMap }) {
|
||||
@@ -42,7 +43,6 @@ function buildTree({ messages, fileMap }) {
|
||||
}
|
||||
|
||||
let mongod;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongod = await MongoMemoryServer.create();
|
||||
const uri = mongod.getUri();
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
const {
|
||||
comparePassword,
|
||||
deleteUserById,
|
||||
generateToken,
|
||||
getUserById,
|
||||
updateUser,
|
||||
createUser,
|
||||
countUsers,
|
||||
findUser,
|
||||
} = require('./userMethods');
|
||||
const mongoose = require('mongoose');
|
||||
const { createMethods } = require('@librechat/data-schemas');
|
||||
const methods = createMethods(mongoose);
|
||||
const { comparePassword } = require('./userMethods');
|
||||
const {
|
||||
findFileById,
|
||||
createFile,
|
||||
@@ -26,32 +20,12 @@ const {
|
||||
deleteMessagesSince,
|
||||
deleteMessages,
|
||||
} = require('./Message');
|
||||
const {
|
||||
createSession,
|
||||
findSession,
|
||||
updateExpiration,
|
||||
deleteSession,
|
||||
deleteAllUserSessions,
|
||||
generateRefreshToken,
|
||||
countActiveSessions,
|
||||
} = require('./Session');
|
||||
const { getConvoTitle, getConvo, saveConvo, deleteConvos } = require('./Conversation');
|
||||
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
|
||||
const { createToken, findToken, updateToken, deleteTokens } = require('./Token');
|
||||
const Balance = require('./Balance');
|
||||
const User = require('./User');
|
||||
const Key = require('./Key');
|
||||
|
||||
module.exports = {
|
||||
...methods,
|
||||
comparePassword,
|
||||
deleteUserById,
|
||||
generateToken,
|
||||
getUserById,
|
||||
updateUser,
|
||||
createUser,
|
||||
countUsers,
|
||||
findUser,
|
||||
|
||||
findFileById,
|
||||
createFile,
|
||||
updateFile,
|
||||
@@ -77,21 +51,4 @@ module.exports = {
|
||||
getPresets,
|
||||
savePreset,
|
||||
deletePresets,
|
||||
|
||||
createToken,
|
||||
findToken,
|
||||
updateToken,
|
||||
deleteTokens,
|
||||
|
||||
createSession,
|
||||
findSession,
|
||||
updateExpiration,
|
||||
deleteSession,
|
||||
deleteAllUserSessions,
|
||||
generateRefreshToken,
|
||||
countActiveSessions,
|
||||
|
||||
User,
|
||||
Key,
|
||||
Balance,
|
||||
};
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { getRandomValues, hashToken } = require('~/server/utils/crypto');
|
||||
const { createToken, findToken } = require('./Token');
|
||||
const logger = require('~/config/winston');
|
||||
const { getRandomValues } = require('@librechat/api');
|
||||
const { logger, hashToken } = require('@librechat/data-schemas');
|
||||
const { createToken, findToken } = require('~/models');
|
||||
|
||||
/**
|
||||
* @module inviteUser
|
||||
|
||||
@@ -1,475 +0,0 @@
|
||||
const _ = require('lodash');
|
||||
const mongoose = require('mongoose');
|
||||
const { MeiliSearch } = require('meilisearch');
|
||||
const { parseTextParts, ContentTypes } = require('librechat-data-provider');
|
||||
const { cleanUpPrimaryKeyValue } = require('~/lib/utils/misc');
|
||||
const logger = require('~/config/meiliLogger');
|
||||
|
||||
// Environment flags
|
||||
/**
|
||||
* Flag to indicate if search is enabled based on environment variables.
|
||||
* @type {boolean}
|
||||
*/
|
||||
const searchEnabled = process.env.SEARCH && process.env.SEARCH.toLowerCase() === 'true';
|
||||
|
||||
/**
|
||||
* Flag to indicate if MeiliSearch is enabled based on required environment variables.
|
||||
* @type {boolean}
|
||||
*/
|
||||
const meiliEnabled = process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY && searchEnabled;
|
||||
|
||||
/**
|
||||
* Validates the required options for configuring the mongoMeili plugin.
|
||||
*
|
||||
* @param {Object} options - The configuration options.
|
||||
* @param {string} options.host - The MeiliSearch host.
|
||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
||||
* @param {string} options.indexName - The name of the index.
|
||||
* @throws {Error} Throws an error if any required option is missing.
|
||||
*/
|
||||
const validateOptions = function (options) {
|
||||
const requiredKeys = ['host', 'apiKey', 'indexName'];
|
||||
requiredKeys.forEach((key) => {
|
||||
if (!options[key]) {
|
||||
throw new Error(`Missing mongoMeili Option: ${key}`);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Factory function to create a MeiliMongooseModel class which extends a Mongoose model.
|
||||
* This class contains static and instance methods to synchronize and manage the MeiliSearch index
|
||||
* corresponding to the MongoDB collection.
|
||||
*
|
||||
* @param {Object} config - Configuration object.
|
||||
* @param {Object} config.index - The MeiliSearch index object.
|
||||
* @param {Array<string>} config.attributesToIndex - List of attributes to index.
|
||||
* @returns {Function} A class definition that will be loaded into the Mongoose schema.
|
||||
*/
|
||||
const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
// The primary key is assumed to be the first attribute in the attributesToIndex array.
|
||||
const primaryKey = attributesToIndex[0];
|
||||
|
||||
class MeiliMongooseModel {
|
||||
/**
|
||||
* Synchronizes the data between the MongoDB collection and the MeiliSearch index.
|
||||
*
|
||||
* The synchronization process involves:
|
||||
* 1. Fetching all documents from the MongoDB collection and MeiliSearch index.
|
||||
* 2. Comparing documents from both sources.
|
||||
* 3. Deleting documents from MeiliSearch that no longer exist in MongoDB.
|
||||
* 4. Adding documents to MeiliSearch that exist in MongoDB but not in the index.
|
||||
* 5. Updating documents in MeiliSearch if key fields (such as `text` or `title`) differ.
|
||||
* 6. Updating the `_meiliIndex` field in MongoDB to indicate the indexing status.
|
||||
*
|
||||
* Note: The function processes documents in batches because MeiliSearch's
|
||||
* `index.getDocuments` requires an exact limit and `index.addDocuments` does not handle
|
||||
* partial failures in a batch.
|
||||
*
|
||||
* @returns {Promise<void>} Resolves when the synchronization is complete.
|
||||
*/
|
||||
static async syncWithMeili() {
|
||||
try {
|
||||
let moreDocuments = true;
|
||||
// Retrieve all MongoDB documents from the collection as plain JavaScript objects.
|
||||
const mongoDocuments = await this.find().lean();
|
||||
|
||||
// Helper function to format a document by selecting only the attributes to index
|
||||
// and omitting keys starting with '$'.
|
||||
const format = (doc) =>
|
||||
_.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$'));
|
||||
|
||||
// Build a map of MongoDB documents for quick lookup based on the primary key.
|
||||
const mongoMap = new Map(mongoDocuments.map((doc) => [doc[primaryKey], format(doc)]));
|
||||
const indexMap = new Map();
|
||||
let offset = 0;
|
||||
const batchSize = 1000;
|
||||
|
||||
// Fetch documents from the MeiliSearch index in batches.
|
||||
while (moreDocuments) {
|
||||
const batch = await index.getDocuments({ limit: batchSize, offset });
|
||||
if (batch.results.length === 0) {
|
||||
moreDocuments = false;
|
||||
}
|
||||
for (const doc of batch.results) {
|
||||
indexMap.set(doc[primaryKey], format(doc));
|
||||
}
|
||||
offset += batchSize;
|
||||
}
|
||||
|
||||
logger.debug('[syncWithMeili]', { indexMap: indexMap.size, mongoMap: mongoMap.size });
|
||||
|
||||
const updateOps = [];
|
||||
|
||||
// Process documents present in the MeiliSearch index.
|
||||
for (const [id, doc] of indexMap) {
|
||||
const update = {};
|
||||
update[primaryKey] = id;
|
||||
if (mongoMap.has(id)) {
|
||||
// If document exists in MongoDB, check for discrepancies in key fields.
|
||||
if (
|
||||
(doc.text && doc.text !== mongoMap.get(id).text) ||
|
||||
(doc.title && doc.title !== mongoMap.get(id).title)
|
||||
) {
|
||||
logger.debug(
|
||||
`[syncWithMeili] ${id} had document discrepancy in ${
|
||||
doc.text ? 'text' : 'title'
|
||||
} field`,
|
||||
);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
await index.addDocuments([doc]);
|
||||
}
|
||||
} else {
|
||||
// If the document does not exist in MongoDB, delete it from MeiliSearch.
|
||||
await index.deleteDocument(id);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: false } } },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Process documents present in MongoDB.
|
||||
for (const [id, doc] of mongoMap) {
|
||||
const update = {};
|
||||
update[primaryKey] = id;
|
||||
// If the document is missing in the Meili index, add it.
|
||||
if (!indexMap.has(id)) {
|
||||
await index.addDocuments([doc]);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
} else if (doc._meiliIndex === false) {
|
||||
// If the document exists but is marked as not indexed, update the flag.
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Execute bulk update operations in MongoDB to update the _meiliIndex flags.
|
||||
if (updateOps.length > 0) {
|
||||
await this.collection.bulkWrite(updateOps);
|
||||
logger.debug(
|
||||
`[syncWithMeili] Finished indexing ${
|
||||
primaryKey === 'messageId' ? 'messages' : 'conversations'
|
||||
}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[syncWithMeili] Error adding document to Meili', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates settings for the MeiliSearch index.
|
||||
*
|
||||
* @param {Object} settings - The settings to update on the MeiliSearch index.
|
||||
* @returns {Promise<Object>} Promise resolving to the update result.
|
||||
*/
|
||||
static async setMeiliIndexSettings(settings) {
|
||||
return await index.updateSettings(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the MeiliSearch index and optionally populates the results with data from MongoDB.
|
||||
*
|
||||
* @param {string} q - The search query.
|
||||
* @param {Object} params - Additional search parameters for MeiliSearch.
|
||||
* @param {boolean} populate - Whether to populate search hits with full MongoDB documents.
|
||||
* @returns {Promise<Object>} The search results with populated hits if requested.
|
||||
*/
|
||||
static async meiliSearch(q, params, populate) {
|
||||
const data = await index.search(q, params);
|
||||
|
||||
if (populate) {
|
||||
// Build a query using the primary key values from the search hits.
|
||||
const query = {};
|
||||
query[primaryKey] = _.map(data.hits, (hit) => cleanUpPrimaryKeyValue(hit[primaryKey]));
|
||||
|
||||
// Build a projection object, including only keys that do not start with '$'.
|
||||
const projection = Object.keys(this.schema.obj).reduce(
|
||||
(results, key) => {
|
||||
if (!key.startsWith('$')) {
|
||||
results[key] = 1;
|
||||
}
|
||||
return results;
|
||||
},
|
||||
{ _id: 1, __v: 1 },
|
||||
);
|
||||
|
||||
// Retrieve the full documents from MongoDB.
|
||||
const hitsFromMongoose = await this.find(query, projection).lean();
|
||||
|
||||
// Merge the MongoDB documents with the search hits.
|
||||
const populatedHits = data.hits.map(function (hit) {
|
||||
const query = {};
|
||||
query[primaryKey] = hit[primaryKey];
|
||||
const originalHit = _.find(hitsFromMongoose, query);
|
||||
|
||||
return {
|
||||
...(originalHit ?? {}),
|
||||
...hit,
|
||||
};
|
||||
});
|
||||
data.hits = populatedHits;
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocesses the current document for indexing.
|
||||
*
|
||||
* This method:
|
||||
* - Picks only the defined attributes to index.
|
||||
* - Omits any keys starting with '$'.
|
||||
* - Replaces pipe characters ('|') in `conversationId` with '--'.
|
||||
* - Extracts and concatenates text from an array of content items.
|
||||
*
|
||||
* @returns {Object} The preprocessed object ready for indexing.
|
||||
*/
|
||||
preprocessObjectForIndex() {
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
if (object.conversationId && object.conversationId.includes('|')) {
|
||||
object.conversationId = object.conversationId.replace(/\|/g, '--');
|
||||
}
|
||||
|
||||
if (object.content && Array.isArray(object.content)) {
|
||||
object.text = parseTextParts(object.content);
|
||||
delete object.content;
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the current document to the MeiliSearch index.
|
||||
*
|
||||
* The method preprocesses the document, adds it to MeiliSearch, and then updates
|
||||
* the MongoDB document's `_meiliIndex` flag to true.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async addObjectToMeili() {
|
||||
const object = this.preprocessObjectForIndex();
|
||||
try {
|
||||
await index.addDocuments([object]);
|
||||
} catch (error) {
|
||||
// Error handling can be enhanced as needed.
|
||||
logger.error('[addObjectToMeili] Error adding document to Meili', error);
|
||||
}
|
||||
|
||||
await this.collection.updateMany({ _id: this._id }, { $set: { _meiliIndex: true } });
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the current document in the MeiliSearch index.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async updateObjectToMeili() {
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
await index.updateDocuments([object]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the current document from the MeiliSearch index.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async deleteObjectFromMeili() {
|
||||
await index.deleteDocument(this._id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-save hook to synchronize the document with MeiliSearch.
|
||||
*
|
||||
* If the document is already indexed (i.e. `_meiliIndex` is true), it updates it;
|
||||
* otherwise, it adds the document to the index.
|
||||
*/
|
||||
postSaveHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili();
|
||||
} else {
|
||||
this.addObjectToMeili();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-update hook to update the document in MeiliSearch.
|
||||
*
|
||||
* This hook is triggered after a document update, ensuring that changes are
|
||||
* propagated to the MeiliSearch index if the document is indexed.
|
||||
*/
|
||||
postUpdateHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-remove hook to delete the document from MeiliSearch.
|
||||
*
|
||||
* This hook is triggered after a document is removed, ensuring that the document
|
||||
* is also removed from the MeiliSearch index if it was previously indexed.
|
||||
*/
|
||||
postRemoveHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.deleteObjectFromMeili();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return MeiliMongooseModel;
|
||||
};
|
||||
|
||||
/**
|
||||
* Mongoose plugin to synchronize MongoDB collections with a MeiliSearch index.
|
||||
*
|
||||
* This plugin:
|
||||
* - Validates the provided options.
|
||||
* - Adds a `_meiliIndex` field to the schema to track indexing status.
|
||||
* - Sets up a MeiliSearch client and creates an index if it doesn't already exist.
|
||||
* - Loads class methods for syncing, searching, and managing documents in MeiliSearch.
|
||||
* - Registers Mongoose hooks (post-save, post-update, post-remove, etc.) to maintain index consistency.
|
||||
*
|
||||
* @param {mongoose.Schema} schema - The Mongoose schema to which the plugin is applied.
|
||||
* @param {Object} options - Configuration options.
|
||||
* @param {string} options.host - The MeiliSearch host.
|
||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
||||
* @param {string} options.indexName - The name of the MeiliSearch index.
|
||||
* @param {string} options.primaryKey - The primary key field for indexing.
|
||||
*/
|
||||
module.exports = function mongoMeili(schema, options) {
|
||||
validateOptions(options);
|
||||
|
||||
// Add _meiliIndex field to the schema to track if a document has been indexed in MeiliSearch.
|
||||
schema.add({
|
||||
_meiliIndex: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
select: false,
|
||||
default: false,
|
||||
},
|
||||
});
|
||||
|
||||
const { host, apiKey, indexName, primaryKey } = options;
|
||||
|
||||
// Setup the MeiliSearch client.
|
||||
const client = new MeiliSearch({ host, apiKey });
|
||||
|
||||
// Create the index asynchronously if it doesn't exist.
|
||||
client.createIndex(indexName, { primaryKey });
|
||||
|
||||
// Setup the MeiliSearch index for this schema.
|
||||
const index = client.index(indexName);
|
||||
|
||||
// Collect attributes from the schema that should be indexed.
|
||||
const attributesToIndex = [
|
||||
..._.reduce(
|
||||
schema.obj,
|
||||
function (results, value, key) {
|
||||
return value.meiliIndex ? [...results, key] : results;
|
||||
},
|
||||
[],
|
||||
),
|
||||
];
|
||||
|
||||
// Load the class methods into the schema.
|
||||
schema.loadClass(createMeiliMongooseModel({ index, indexName, client, attributesToIndex }));
|
||||
|
||||
// Register Mongoose hooks to synchronize with MeiliSearch.
|
||||
|
||||
// Post-save: synchronize after a document is saved.
|
||||
schema.post('save', function (doc) {
|
||||
doc.postSaveHook();
|
||||
});
|
||||
|
||||
// Post-update: synchronize after a document is updated.
|
||||
schema.post('update', function (doc) {
|
||||
doc.postUpdateHook();
|
||||
});
|
||||
|
||||
// Post-remove: synchronize after a document is removed.
|
||||
schema.post('remove', function (doc) {
|
||||
doc.postRemoveHook();
|
||||
});
|
||||
|
||||
// Pre-deleteMany hook: remove corresponding documents from MeiliSearch when multiple documents are deleted.
|
||||
schema.pre('deleteMany', async function (next) {
|
||||
if (!meiliEnabled) {
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if the schema has a "messages" field to determine if it's a conversation schema.
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messages')) {
|
||||
const convoIndex = client.index('convos');
|
||||
const deletedConvos = await mongoose.model('Conversation').find(this._conditions).lean();
|
||||
const promises = deletedConvos.map((convo) =>
|
||||
convoIndex.deleteDocument(convo.conversationId),
|
||||
);
|
||||
await Promise.all(promises);
|
||||
}
|
||||
|
||||
// Check if the schema has a "messageId" field to determine if it's a message schema.
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messageId')) {
|
||||
const messageIndex = client.index('messages');
|
||||
const deletedMessages = await mongoose.model('Message').find(this._conditions).lean();
|
||||
const promises = deletedMessages.map((message) =>
|
||||
messageIndex.deleteDocument(message.messageId),
|
||||
);
|
||||
await Promise.all(promises);
|
||||
}
|
||||
return next();
|
||||
} catch (error) {
|
||||
if (meiliEnabled) {
|
||||
logger.error(
|
||||
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion. Next startup may be slow due to syncing.',
|
||||
error,
|
||||
);
|
||||
}
|
||||
return next();
|
||||
}
|
||||
});
|
||||
|
||||
// Post-findOneAndUpdate hook: update MeiliSearch index after a document is updated via findOneAndUpdate.
|
||||
schema.post('findOneAndUpdate', async function (doc) {
|
||||
if (!meiliEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the document is unfinished, do not update the index.
|
||||
if (doc.unfinished) {
|
||||
return;
|
||||
}
|
||||
|
||||
let meiliDoc;
|
||||
// For conversation documents, try to fetch the document from the "convos" index.
|
||||
if (doc.messages) {
|
||||
try {
|
||||
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
|
||||
} catch (error) {
|
||||
logger.debug(
|
||||
'[MeiliMongooseModel.findOneAndUpdate] Convo not found in MeiliSearch and will index ' +
|
||||
doc.conversationId,
|
||||
error,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// If the MeiliSearch document exists and the title is unchanged, do nothing.
|
||||
if (meiliDoc && meiliDoc.title === doc.title) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, trigger a post-save hook to synchronize the document.
|
||||
doc.postSaveHook();
|
||||
});
|
||||
};
|
||||
@@ -1,18 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const mongoMeili = require('../plugins/mongoMeili');
|
||||
|
||||
const { convoSchema } = require('@librechat/data-schemas');
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
convoSchema.plugin(mongoMeili, {
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
/** Note: Will get created automatically if it doesn't exist already */
|
||||
indexName: 'convos',
|
||||
primaryKey: 'conversationId',
|
||||
});
|
||||
}
|
||||
|
||||
const Conversation = mongoose.models.Conversation || mongoose.model('Conversation', convoSchema);
|
||||
|
||||
module.exports = Conversation;
|
||||
@@ -1,16 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const mongoMeili = require('~/models/plugins/mongoMeili');
|
||||
const { messageSchema } = require('@librechat/data-schemas');
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
messageSchema.plugin(mongoMeili, {
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
indexName: 'messages',
|
||||
primaryKey: 'messageId',
|
||||
});
|
||||
}
|
||||
|
||||
const Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
|
||||
|
||||
module.exports = Message;
|
||||
@@ -1,6 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { pluginAuthSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const PluginAuth = mongoose.models.Plugin || mongoose.model('PluginAuth', pluginAuthSchema);
|
||||
|
||||
module.exports = PluginAuth;
|
||||
@@ -1,6 +0,0 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { presetSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Preset = mongoose.models.Preset || mongoose.model('Preset', presetSchema);
|
||||
|
||||
module.exports = Preset;
|
||||
@@ -1,6 +1,5 @@
|
||||
const { Transaction } = require('./Transaction');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { createTransaction, createStructuredTransaction } = require('./Transaction');
|
||||
/**
|
||||
* Creates up to two transactions to record the spending of tokens.
|
||||
*
|
||||
@@ -33,7 +32,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
||||
let prompt, completion;
|
||||
try {
|
||||
if (promptTokens !== undefined) {
|
||||
prompt = await Transaction.create({
|
||||
prompt = await createTransaction({
|
||||
...txData,
|
||||
tokenType: 'prompt',
|
||||
rawAmount: promptTokens === 0 ? 0 : -Math.max(promptTokens, 0),
|
||||
@@ -41,7 +40,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
||||
}
|
||||
|
||||
if (completionTokens !== undefined) {
|
||||
completion = await Transaction.create({
|
||||
completion = await createTransaction({
|
||||
...txData,
|
||||
tokenType: 'completion',
|
||||
rawAmount: completionTokens === 0 ? 0 : -Math.max(completionTokens, 0),
|
||||
@@ -101,7 +100,7 @@ const spendStructuredTokens = async (txData, tokenUsage) => {
|
||||
try {
|
||||
if (promptTokens) {
|
||||
const { input = 0, write = 0, read = 0 } = promptTokens;
|
||||
prompt = await Transaction.createStructured({
|
||||
prompt = await createStructuredTransaction({
|
||||
...txData,
|
||||
tokenType: 'prompt',
|
||||
inputTokens: -input,
|
||||
@@ -111,7 +110,7 @@ const spendStructuredTokens = async (txData, tokenUsage) => {
|
||||
}
|
||||
|
||||
if (completionTokens) {
|
||||
completion = await Transaction.create({
|
||||
completion = await createTransaction({
|
||||
...txData,
|
||||
tokenType: 'completion',
|
||||
rawAmount: -completionTokens,
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { Transaction } = require('./Transaction');
|
||||
const Balance = require('./Balance');
|
||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||
const { createTransaction, createAutoRefillTransaction } = require('./Transaction');
|
||||
|
||||
require('~/db/models');
|
||||
|
||||
// Mock the logger to prevent console output during tests
|
||||
jest.mock('~/config', () => ({
|
||||
@@ -19,11 +20,15 @@ jest.mock('~/server/services/Config');
|
||||
describe('spendTokens', () => {
|
||||
let mongoServer;
|
||||
let userId;
|
||||
let Transaction;
|
||||
let Balance;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
await mongoose.connect(mongoServer.getUri());
|
||||
|
||||
Transaction = mongoose.model('Transaction');
|
||||
Balance = mongoose.model('Balance');
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -197,7 +202,7 @@ describe('spendTokens', () => {
|
||||
// Check that the transaction records show the adjusted values
|
||||
const transactionResults = await Promise.all(
|
||||
transactions.map((t) =>
|
||||
Transaction.create({
|
||||
createTransaction({
|
||||
...txData,
|
||||
tokenType: t.tokenType,
|
||||
rawAmount: t.rawAmount,
|
||||
@@ -280,7 +285,7 @@ describe('spendTokens', () => {
|
||||
|
||||
// Check the return values from Transaction.create directly
|
||||
// This is to verify that the incrementValue is not becoming positive
|
||||
const directResult = await Transaction.create({
|
||||
const directResult = await createTransaction({
|
||||
user: userId,
|
||||
conversationId: 'test-convo-3',
|
||||
model: 'gpt-4',
|
||||
@@ -607,7 +612,7 @@ describe('spendTokens', () => {
|
||||
const promises = [];
|
||||
for (let i = 0; i < numberOfRefills; i++) {
|
||||
promises.push(
|
||||
Transaction.createAutoRefillTransaction({
|
||||
createAutoRefillTransaction({
|
||||
user: userId,
|
||||
tokenType: 'credits',
|
||||
context: 'concurrent-refill-test',
|
||||
|
||||
@@ -76,10 +76,15 @@ const tokenValues = Object.assign(
|
||||
'4k': { prompt: 1.5, completion: 2 },
|
||||
'16k': { prompt: 3, completion: 4 },
|
||||
'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 },
|
||||
'o4-mini': { prompt: 1.1, completion: 4.4 },
|
||||
'o3-mini': { prompt: 1.1, completion: 4.4 },
|
||||
o3: { prompt: 2, completion: 8 },
|
||||
'o1-mini': { prompt: 1.1, completion: 4.4 },
|
||||
'o1-preview': { prompt: 15, completion: 60 },
|
||||
o1: { prompt: 15, completion: 60 },
|
||||
'gpt-4.1-nano': { prompt: 0.1, completion: 0.4 },
|
||||
'gpt-4.1-mini': { prompt: 0.4, completion: 1.6 },
|
||||
'gpt-4.1': { prompt: 2, completion: 8 },
|
||||
'gpt-4.5': { prompt: 75, completion: 150 },
|
||||
'gpt-4o-mini': { prompt: 0.15, completion: 0.6 },
|
||||
'gpt-4o': { prompt: 2.5, completion: 10 },
|
||||
@@ -95,6 +100,8 @@ const tokenValues = Object.assign(
|
||||
'claude-3-5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3.5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3-haiku': { prompt: 0.25, completion: 1.25 },
|
||||
'claude-sonnet-4': { prompt: 3, completion: 15 },
|
||||
'claude-opus-4': { prompt: 15, completion: 75 },
|
||||
'claude-2.1': { prompt: 8, completion: 24 },
|
||||
'claude-2': { prompt: 8, completion: 24 },
|
||||
'claude-instant': { prompt: 0.8, completion: 2.4 },
|
||||
@@ -106,9 +113,15 @@ const tokenValues = Object.assign(
|
||||
/* cohere doesn't have rates for the older command models,
|
||||
so this was from https://artificialanalysis.ai/models/command-light/providers */
|
||||
command: { prompt: 0.38, completion: 0.38 },
|
||||
gemma: { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemma-2': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemma-3': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemma-3-27b': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemini-2.0-flash-lite': { prompt: 0.075, completion: 0.3 },
|
||||
'gemini-2.0-flash': { prompt: 0.1, completion: 0.7 },
|
||||
'gemini-2.0-flash': { prompt: 0.1, completion: 0.4 },
|
||||
'gemini-2.0': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemini-2.5-pro': { prompt: 1.25, completion: 10 },
|
||||
'gemini-2.5-flash': { prompt: 0.15, completion: 3.5 },
|
||||
'gemini-2.5': { prompt: 0, completion: 0 }, // Free for a period of time
|
||||
'gemini-1.5-flash-8b': { prompt: 0.075, completion: 0.3 },
|
||||
'gemini-1.5-flash': { prompt: 0.15, completion: 0.6 },
|
||||
@@ -122,6 +135,10 @@ const tokenValues = Object.assign(
|
||||
'grok-2-1212': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2-latest': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-3-mini-fast': { prompt: 0.4, completion: 4 },
|
||||
'grok-3-mini': { prompt: 0.3, completion: 0.5 },
|
||||
'grok-3-fast': { prompt: 5.0, completion: 25.0 },
|
||||
'grok-3': { prompt: 3.0, completion: 15.0 },
|
||||
'grok-beta': { prompt: 5.0, completion: 15.0 },
|
||||
'mistral-large': { prompt: 2.0, completion: 6.0 },
|
||||
'pixtral-large': { prompt: 2.0, completion: 6.0 },
|
||||
@@ -147,6 +164,8 @@ const cacheTokenValues = {
|
||||
'claude-3.5-haiku': { write: 1, read: 0.08 },
|
||||
'claude-3-5-haiku': { write: 1, read: 0.08 },
|
||||
'claude-3-haiku': { write: 0.3, read: 0.03 },
|
||||
'claude-sonnet-4': { write: 3.75, read: 0.3 },
|
||||
'claude-opus-4': { write: 18.75, read: 1.5 },
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -170,6 +189,14 @@ const getValueKey = (model, endpoint) => {
|
||||
return 'gpt-3.5-turbo-1106';
|
||||
} else if (modelName.includes('gpt-3.5')) {
|
||||
return '4k';
|
||||
} else if (modelName.includes('o4-mini')) {
|
||||
return 'o4-mini';
|
||||
} else if (modelName.includes('o4')) {
|
||||
return 'o4';
|
||||
} else if (modelName.includes('o3-mini')) {
|
||||
return 'o3-mini';
|
||||
} else if (modelName.includes('o3')) {
|
||||
return 'o3';
|
||||
} else if (modelName.includes('o1-preview')) {
|
||||
return 'o1-preview';
|
||||
} else if (modelName.includes('o1-mini')) {
|
||||
@@ -178,6 +205,12 @@ const getValueKey = (model, endpoint) => {
|
||||
return 'o1';
|
||||
} else if (modelName.includes('gpt-4.5')) {
|
||||
return 'gpt-4.5';
|
||||
} else if (modelName.includes('gpt-4.1-nano')) {
|
||||
return 'gpt-4.1-nano';
|
||||
} else if (modelName.includes('gpt-4.1-mini')) {
|
||||
return 'gpt-4.1-mini';
|
||||
} else if (modelName.includes('gpt-4.1')) {
|
||||
return 'gpt-4.1';
|
||||
} else if (modelName.includes('gpt-4o-2024-05-13')) {
|
||||
return 'gpt-4o-2024-05-13';
|
||||
} else if (modelName.includes('gpt-4o-mini')) {
|
||||
|
||||
@@ -60,6 +60,30 @@ describe('getValueKey', () => {
|
||||
expect(getValueKey('gpt-4.5-0125')).toBe('gpt-4.5');
|
||||
});
|
||||
|
||||
it('should return "gpt-4.1" for model type of "gpt-4.1"', () => {
|
||||
expect(getValueKey('gpt-4.1-preview')).toBe('gpt-4.1');
|
||||
expect(getValueKey('gpt-4.1-2024-08-06')).toBe('gpt-4.1');
|
||||
expect(getValueKey('gpt-4.1-2024-08-06-0718')).toBe('gpt-4.1');
|
||||
expect(getValueKey('openai/gpt-4.1')).toBe('gpt-4.1');
|
||||
expect(getValueKey('openai/gpt-4.1-2024-08-06')).toBe('gpt-4.1');
|
||||
expect(getValueKey('gpt-4.1-turbo')).toBe('gpt-4.1');
|
||||
expect(getValueKey('gpt-4.1-0125')).toBe('gpt-4.1');
|
||||
});
|
||||
|
||||
it('should return "gpt-4.1-mini" for model type of "gpt-4.1-mini"', () => {
|
||||
expect(getValueKey('gpt-4.1-mini-preview')).toBe('gpt-4.1-mini');
|
||||
expect(getValueKey('gpt-4.1-mini-2024-08-06')).toBe('gpt-4.1-mini');
|
||||
expect(getValueKey('openai/gpt-4.1-mini')).toBe('gpt-4.1-mini');
|
||||
expect(getValueKey('gpt-4.1-mini-0125')).toBe('gpt-4.1-mini');
|
||||
});
|
||||
|
||||
it('should return "gpt-4.1-nano" for model type of "gpt-4.1-nano"', () => {
|
||||
expect(getValueKey('gpt-4.1-nano-preview')).toBe('gpt-4.1-nano');
|
||||
expect(getValueKey('gpt-4.1-nano-2024-08-06')).toBe('gpt-4.1-nano');
|
||||
expect(getValueKey('openai/gpt-4.1-nano')).toBe('gpt-4.1-nano');
|
||||
expect(getValueKey('gpt-4.1-nano-0125')).toBe('gpt-4.1-nano');
|
||||
});
|
||||
|
||||
it('should return "gpt-4o" for model type of "gpt-4o"', () => {
|
||||
expect(getValueKey('gpt-4o-2024-08-06')).toBe('gpt-4o');
|
||||
expect(getValueKey('gpt-4o-2024-08-06-0718')).toBe('gpt-4o');
|
||||
@@ -141,6 +165,15 @@ describe('getMultiplier', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should return correct multipliers for o4-mini and o3', () => {
|
||||
['o4-mini', 'o3'].forEach((model) => {
|
||||
const prompt = getMultiplier({ model, tokenType: 'prompt' });
|
||||
const completion = getMultiplier({ model, tokenType: 'completion' });
|
||||
expect(prompt).toBe(tokenValues[model].prompt);
|
||||
expect(completion).toBe(tokenValues[model].completion);
|
||||
});
|
||||
});
|
||||
|
||||
it('should return defaultRate if tokenType is provided but not found in tokenValues', () => {
|
||||
expect(getMultiplier({ valueKey: '8k', tokenType: 'unknownType' })).toBe(defaultRate);
|
||||
});
|
||||
@@ -185,6 +218,52 @@ describe('getMultiplier', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-4.1', () => {
|
||||
const valueKey = getValueKey('gpt-4.1-2024-08-06');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-4.1'].prompt);
|
||||
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-4.1'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'gpt-4.1-preview', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-4.1'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'openai/gpt-4.1', tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-4.1'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-4.1-mini', () => {
|
||||
const valueKey = getValueKey('gpt-4.1-mini-2024-08-06');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-4.1-mini'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-4.1-mini'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'gpt-4.1-mini-preview', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-4.1-mini'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'openai/gpt-4.1-mini', tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-4.1-mini'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-4.1-nano', () => {
|
||||
const valueKey = getValueKey('gpt-4.1-nano-2024-08-06');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-4.1-nano'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-4.1-nano'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'gpt-4.1-nano-preview', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-4.1-nano'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'openai/gpt-4.1-nano', tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-4.1-nano'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-4o-mini', () => {
|
||||
const valueKey = getValueKey('gpt-4o-mini-2024-07-18');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(
|
||||
@@ -348,9 +427,11 @@ describe('getCacheMultiplier', () => {
|
||||
|
||||
it('should derive the valueKey from the model if not provided', () => {
|
||||
expect(getCacheMultiplier({ cacheType: 'write', model: 'claude-3-5-sonnet-20240620' })).toBe(
|
||||
3.75,
|
||||
cacheTokenValues['claude-3-5-sonnet'].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ cacheType: 'read', model: 'claude-3-haiku-20240307' })).toBe(
|
||||
cacheTokenValues['claude-3-haiku'].read,
|
||||
);
|
||||
expect(getCacheMultiplier({ cacheType: 'read', model: 'claude-3-haiku-20240307' })).toBe(0.03);
|
||||
});
|
||||
|
||||
it('should return null if only model or cacheType is missing', () => {
|
||||
@@ -371,10 +452,10 @@ describe('getCacheMultiplier', () => {
|
||||
};
|
||||
expect(
|
||||
getCacheMultiplier({ model: 'custom-model', cacheType: 'write', endpointTokenConfig }),
|
||||
).toBe(5);
|
||||
).toBe(endpointTokenConfig['custom-model'].write);
|
||||
expect(
|
||||
getCacheMultiplier({ model: 'custom-model', cacheType: 'read', endpointTokenConfig }),
|
||||
).toBe(1);
|
||||
).toBe(endpointTokenConfig['custom-model'].read);
|
||||
});
|
||||
|
||||
it('should return null if model is not found in endpointTokenConfig', () => {
|
||||
@@ -395,18 +476,21 @@ describe('getCacheMultiplier', () => {
|
||||
model: 'bedrock/anthropic.claude-3-5-sonnet-20240620-v1:0',
|
||||
cacheType: 'write',
|
||||
}),
|
||||
).toBe(3.75);
|
||||
).toBe(cacheTokenValues['claude-3-5-sonnet'].write);
|
||||
expect(
|
||||
getCacheMultiplier({
|
||||
model: 'bedrock/anthropic.claude-3-haiku-20240307-v1:0',
|
||||
cacheType: 'read',
|
||||
}),
|
||||
).toBe(0.03);
|
||||
).toBe(cacheTokenValues['claude-3-haiku'].read);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Google Model Tests', () => {
|
||||
const googleModels = [
|
||||
'gemini-2.5-pro-preview-05-06',
|
||||
'gemini-2.5-flash-preview-04-17',
|
||||
'gemini-2.5-exp',
|
||||
'gemini-2.0-flash-lite-preview-02-05',
|
||||
'gemini-2.0-flash-001',
|
||||
'gemini-2.0-flash-exp',
|
||||
@@ -444,6 +528,9 @@ describe('Google Model Tests', () => {
|
||||
|
||||
it('should map to the correct model keys', () => {
|
||||
const expected = {
|
||||
'gemini-2.5-pro-preview-05-06': 'gemini-2.5-pro',
|
||||
'gemini-2.5-flash-preview-04-17': 'gemini-2.5-flash',
|
||||
'gemini-2.5-exp': 'gemini-2.5',
|
||||
'gemini-2.0-flash-lite-preview-02-05': 'gemini-2.0-flash-lite',
|
||||
'gemini-2.0-flash-001': 'gemini-2.0-flash',
|
||||
'gemini-2.0-flash-exp': 'gemini-2.0-flash',
|
||||
@@ -488,24 +575,186 @@ describe('Grok Model Tests - Pricing', () => {
|
||||
test('should return correct prompt and completion rates for Grok vision models', () => {
|
||||
const models = ['grok-2-vision-1212', 'grok-2-vision', 'grok-2-vision-latest'];
|
||||
models.forEach((model) => {
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(2.0);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(10.0);
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-2-vision'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-2-vision'].completion,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('should return correct prompt and completion rates for Grok text models', () => {
|
||||
const models = ['grok-2-1212', 'grok-2', 'grok-2-latest'];
|
||||
models.forEach((model) => {
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(2.0);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(10.0);
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(tokenValues['grok-2'].prompt);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-2'].completion,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('should return correct prompt and completion rates for Grok beta models', () => {
|
||||
expect(getMultiplier({ model: 'grok-vision-beta', tokenType: 'prompt' })).toBe(5.0);
|
||||
expect(getMultiplier({ model: 'grok-vision-beta', tokenType: 'completion' })).toBe(15.0);
|
||||
expect(getMultiplier({ model: 'grok-beta', tokenType: 'prompt' })).toBe(5.0);
|
||||
expect(getMultiplier({ model: 'grok-beta', tokenType: 'completion' })).toBe(15.0);
|
||||
expect(getMultiplier({ model: 'grok-vision-beta', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-vision-beta'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'grok-vision-beta', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-vision-beta'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'grok-beta', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-beta'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'grok-beta', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-beta'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct prompt and completion rates for Grok 3 models', () => {
|
||||
expect(getMultiplier({ model: 'grok-3', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-3'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'grok-3', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-3'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'grok-3-fast', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-3-fast'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'grok-3-fast', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-3-fast'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'grok-3-mini', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-3-mini'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'grok-3-mini', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-3-mini'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'grok-3-mini-fast', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-3-mini-fast'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'grok-3-mini-fast', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-3-mini-fast'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct prompt and completion rates for Grok 3 models with prefixes', () => {
|
||||
expect(getMultiplier({ model: 'xai/grok-3', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-3'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'xai/grok-3', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-3'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'xai/grok-3-fast', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-3-fast'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'xai/grok-3-fast', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-3-fast'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'xai/grok-3-mini', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-3-mini'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'xai/grok-3-mini', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-3-mini'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'xai/grok-3-mini-fast', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-3-mini-fast'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'xai/grok-3-mini-fast', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-3-mini-fast'].completion,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Claude Model Tests', () => {
|
||||
it('should return correct prompt and completion rates for Claude 4 models', () => {
|
||||
expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['claude-sonnet-4'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'completion' })).toBe(
|
||||
tokenValues['claude-sonnet-4'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-opus-4', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['claude-opus-4'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-opus-4', tokenType: 'completion' })).toBe(
|
||||
tokenValues['claude-opus-4'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle Claude 4 model name variations with different prefixes and suffixes', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const valueKey = getValueKey(model);
|
||||
const isSonnet = model.includes('sonnet');
|
||||
const expectedKey = isSonnet ? 'claude-sonnet-4' : 'claude-opus-4';
|
||||
|
||||
expect(valueKey).toBe(expectedKey);
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(tokenValues[expectedKey].prompt);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(
|
||||
tokenValues[expectedKey].completion,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should return correct cache rates for Claude 4 models', () => {
|
||||
expect(getCacheMultiplier({ model: 'claude-sonnet-4', cacheType: 'write' })).toBe(
|
||||
cacheTokenValues['claude-sonnet-4'].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-sonnet-4', cacheType: 'read' })).toBe(
|
||||
cacheTokenValues['claude-sonnet-4'].read,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-opus-4', cacheType: 'write' })).toBe(
|
||||
cacheTokenValues['claude-opus-4'].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-opus-4', cacheType: 'read' })).toBe(
|
||||
cacheTokenValues['claude-opus-4'].read,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle Claude 4 model cache rates with different prefixes and suffixes', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const isSonnet = model.includes('sonnet');
|
||||
const expectedKey = isSonnet ? 'claude-sonnet-4' : 'claude-opus-4';
|
||||
|
||||
expect(getCacheMultiplier({ model, cacheType: 'write' })).toBe(
|
||||
cacheTokenValues[expectedKey].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model, cacheType: 'read' })).toBe(
|
||||
cacheTokenValues[expectedKey].read,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,159 +1,4 @@
|
||||
const bcrypt = require('bcryptjs');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const signPayload = require('~/server/services/signPayload');
|
||||
const Balance = require('./Balance');
|
||||
const User = require('./User');
|
||||
|
||||
/**
|
||||
* Retrieve a user by ID and convert the found user document to a plain object.
|
||||
*
|
||||
* @param {string} userId - The ID of the user to find and return as a plain object.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<MongoUser>} A plain object representing the user document, or `null` if no user is found.
|
||||
*/
|
||||
const getUserById = async function (userId, fieldsToSelect = null) {
|
||||
const query = User.findById(userId);
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
return await query.lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Search for a single user based on partial data and return matching user document as plain object.
|
||||
* @param {Partial<MongoUser>} searchCriteria - The partial data to use for searching the user.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<MongoUser>} A plain object representing the user document, or `null` if no user is found.
|
||||
*/
|
||||
const findUser = async function (searchCriteria, fieldsToSelect = null) {
|
||||
const query = User.findOne(searchCriteria);
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
return await query.lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Update a user with new data without overwriting existing properties.
|
||||
*
|
||||
* @param {string} userId - The ID of the user to update.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @returns {Promise<MongoUser>} The updated user document as a plain object, or `null` if no user is found.
|
||||
*/
|
||||
const updateUser = async function (userId, updateData) {
|
||||
const updateOperation = {
|
||||
$set: updateData,
|
||||
$unset: { expiresAt: '' }, // Remove the expiresAt field to prevent TTL
|
||||
};
|
||||
return await User.findByIdAndUpdate(userId, updateOperation, {
|
||||
new: true,
|
||||
runValidators: true,
|
||||
}).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a new user, optionally with a TTL of 1 week.
|
||||
* @param {MongoUser} data - The user data to be created, must contain user_id.
|
||||
* @param {boolean} [disableTTL=true] - Whether to disable the TTL. Defaults to `true`.
|
||||
* @param {boolean} [returnUser=false] - Whether to return the created user object.
|
||||
* @returns {Promise<ObjectId|MongoUser>} A promise that resolves to the created user document ID or user object.
|
||||
* @throws {Error} If a user with the same user_id already exists.
|
||||
*/
|
||||
const createUser = async (data, disableTTL = true, returnUser = false) => {
|
||||
const balance = await getBalanceConfig();
|
||||
const userData = {
|
||||
...data,
|
||||
expiresAt: disableTTL ? null : new Date(Date.now() + 604800 * 1000), // 1 week in milliseconds
|
||||
};
|
||||
|
||||
if (disableTTL) {
|
||||
delete userData.expiresAt;
|
||||
}
|
||||
|
||||
const user = await User.create(userData);
|
||||
|
||||
// If balance is enabled, create or update a balance record for the user using global.interfaceConfig.balance
|
||||
if (balance?.enabled && balance?.startBalance) {
|
||||
const update = {
|
||||
$inc: { tokenCredits: balance.startBalance },
|
||||
};
|
||||
|
||||
if (
|
||||
balance.autoRefillEnabled &&
|
||||
balance.refillIntervalValue != null &&
|
||||
balance.refillIntervalUnit != null &&
|
||||
balance.refillAmount != null
|
||||
) {
|
||||
update.$set = {
|
||||
autoRefillEnabled: true,
|
||||
refillIntervalValue: balance.refillIntervalValue,
|
||||
refillIntervalUnit: balance.refillIntervalUnit,
|
||||
refillAmount: balance.refillAmount,
|
||||
};
|
||||
}
|
||||
|
||||
await Balance.findOneAndUpdate({ user: user._id }, update, { upsert: true, new: true }).lean();
|
||||
}
|
||||
|
||||
if (returnUser) {
|
||||
return user.toObject();
|
||||
}
|
||||
return user._id;
|
||||
};
|
||||
|
||||
/**
|
||||
* Count the number of user documents in the collection based on the provided filter.
|
||||
*
|
||||
* @param {Object} [filter={}] - The filter to apply when counting the documents.
|
||||
* @returns {Promise<number>} The count of documents that match the filter.
|
||||
*/
|
||||
const countUsers = async function (filter = {}) {
|
||||
return await User.countDocuments(filter);
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a user by their unique ID.
|
||||
*
|
||||
* @param {string} userId - The ID of the user to delete.
|
||||
* @returns {Promise<{ deletedCount: number }>} An object indicating the number of deleted documents.
|
||||
*/
|
||||
const deleteUserById = async function (userId) {
|
||||
try {
|
||||
const result = await User.deleteOne({ _id: userId });
|
||||
if (result.deletedCount === 0) {
|
||||
return { deletedCount: 0, message: 'No user found with that ID.' };
|
||||
}
|
||||
return { deletedCount: result.deletedCount, message: 'User was deleted successfully.' };
|
||||
} catch (error) {
|
||||
throw new Error('Error deleting user: ' + error.message);
|
||||
}
|
||||
};
|
||||
|
||||
const { SESSION_EXPIRY } = process.env ?? {};
|
||||
const expires = eval(SESSION_EXPIRY) ?? 1000 * 60 * 15;
|
||||
|
||||
/**
|
||||
* Generates a JWT token for a given user.
|
||||
*
|
||||
* @param {MongoUser} user - The user for whom the token is being generated.
|
||||
* @returns {Promise<string>} A promise that resolves to a JWT token.
|
||||
*/
|
||||
const generateToken = async (user) => {
|
||||
if (!user) {
|
||||
throw new Error('No user provided');
|
||||
}
|
||||
|
||||
return await signPayload({
|
||||
payload: {
|
||||
id: user._id,
|
||||
username: user.username,
|
||||
provider: user.provider,
|
||||
email: user.email,
|
||||
},
|
||||
secret: process.env.JWT_SECRET,
|
||||
expirationTime: expires / 1000,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Compares the provided password with the user's password.
|
||||
@@ -167,6 +12,10 @@ const comparePassword = async (user, candidatePassword) => {
|
||||
throw new Error('No user provided');
|
||||
}
|
||||
|
||||
if (!user.password) {
|
||||
throw new Error('No password, likely an email first registered via Social/OIDC login');
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
bcrypt.compare(candidatePassword, user.password, (err, isMatch) => {
|
||||
if (err) {
|
||||
@@ -179,11 +28,4 @@ const comparePassword = async (user, candidatePassword) => {
|
||||
|
||||
module.exports = {
|
||||
comparePassword,
|
||||
deleteUserById,
|
||||
generateToken,
|
||||
getUserById,
|
||||
countUsers,
|
||||
createUser,
|
||||
updateUser,
|
||||
findUser,
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user