Compare commits
312 Commits
refactor/o
...
feat/updat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8bc585ddaa | ||
|
|
7d46a4fd08 | ||
|
|
ede95e9e70 | ||
|
|
75f1b4cfff | ||
|
|
fe41a996e7 | ||
|
|
3c02a7b2e8 | ||
|
|
6fb76c7d60 | ||
|
|
cf9de4d4a5 | ||
|
|
a7dc109856 | ||
|
|
7e1d02bcc3 | ||
|
|
44fa479bd4 | ||
|
|
29b8314870 | ||
|
|
2a5a3fe508 | ||
|
|
0ee5712df1 | ||
|
|
114deecc4e | ||
|
|
f59daaeecc | ||
|
|
bc77bbd1ba | ||
|
|
c602088178 | ||
|
|
3d1cedb85b | ||
|
|
bf2567bc8f | ||
|
|
5ce67b5b71 | ||
|
|
cbd217efae | ||
|
|
d990fe1d5f | ||
|
|
7c9a868d34 | ||
|
|
e9a85d5c65 | ||
|
|
f61afc1124 | ||
|
|
5566cc499e | ||
|
|
ded3f2e998 | ||
|
|
7792fcee17 | ||
|
|
f931731ef8 | ||
|
|
07d0abc9fd | ||
|
|
fbe341a171 | ||
|
|
20282f32c8 | ||
|
|
6fa3db2969 | ||
|
|
ff027e8243 | ||
|
|
e9b678dd6a | ||
|
|
bb7a0274fa | ||
|
|
a5189052ec | ||
|
|
bcd97aad2f | ||
|
|
9c77f53454 | ||
|
|
31a283a4fe | ||
|
|
857c054a9a | ||
|
|
c9103a1708 | ||
|
|
7288449011 | ||
|
|
7897801fbc | ||
|
|
838fb53208 | ||
|
|
9ff608e6af | ||
|
|
1b8a0bfaee | ||
|
|
c0ed738aed | ||
|
|
0e5bb6f98c | ||
|
|
341435fb25 | ||
|
|
dbe4dd96b4 | ||
|
|
b7d13cec6f | ||
|
|
37321ea10d | ||
|
|
17ab91f1fd | ||
|
|
4777bd22c5 | ||
|
|
dfe236acb5 | ||
|
|
c5d1861acf | ||
|
|
b8720a9b7a | ||
|
|
0b2fde73e3 | ||
|
|
c19b8755a7 | ||
|
|
f6e19d8034 | ||
|
|
c0eb19730a | ||
|
|
a1471c2f37 | ||
|
|
712f0b3ca2 | ||
|
|
062d813b21 | ||
|
|
4b5b46604c | ||
|
|
3d7eaf0fcc | ||
|
|
823015160c | ||
|
|
3219734b9e | ||
|
|
4f3683fd9a | ||
|
|
57f8b333bc | ||
|
|
f9aebeba92 | ||
|
|
b85950aa9a | ||
|
|
bcec5bfceb | ||
|
|
e4f323e71a | ||
|
|
d83826b604 | ||
|
|
2153db2f5f | ||
|
|
de02892396 | ||
|
|
f61e057f7f | ||
|
|
91e49d82aa | ||
|
|
880c7b43a1 | ||
|
|
c99a29f8da | ||
|
|
8a60e8990f | ||
|
|
a6bf2b6ce3 | ||
|
|
ff8dac570f | ||
|
|
96870e0da0 | ||
|
|
f0599ad36c | ||
|
|
5b1a31ef4d | ||
|
|
386900fb4f | ||
|
|
9d2aba5df5 | ||
|
|
a5195a57a4 | ||
|
|
2489670f54 | ||
|
|
0352067da2 | ||
|
|
fcaf55143d | ||
|
|
aae3694b11 | ||
|
|
68c9f668c1 | ||
|
|
8b2e1c6088 | ||
|
|
99135a3dc1 | ||
|
|
344e7c44b5 | ||
|
|
e5d2a932bc | ||
|
|
c40554c03b | ||
|
|
98af4564e8 | ||
|
|
26a58fcabc | ||
|
|
3fec63e597 | ||
|
|
81139046e5 | ||
|
|
89d12a8ccd | ||
|
|
f6d34d78ca | ||
|
|
48ca1bfd88 | ||
|
|
208be7c06c | ||
|
|
02bfe32905 | ||
|
|
4499494aba | ||
|
|
d04da60b3b | ||
|
|
0e94d97bfb | ||
|
|
45ab4d4503 | ||
|
|
0ceef12eea | ||
|
|
6738360051 | ||
|
|
52b65492d5 | ||
|
|
7a9a99d2a0 | ||
|
|
5bfb06b417 | ||
|
|
2ce8f1f686 | ||
|
|
1a47601533 | ||
|
|
5245aeea8f | ||
|
|
dd93db40bc | ||
|
|
136cf1d5a8 | ||
|
|
751522087a | ||
|
|
7fe830acfc | ||
|
|
cdfe686987 | ||
|
|
5b5723343c | ||
|
|
30c24a66f6 | ||
|
|
ecf9733bc1 | ||
|
|
133312fb40 | ||
|
|
b62ffb533c | ||
|
|
d75fb76338 | ||
|
|
51f2d43fed | ||
|
|
e3a645e8fb | ||
|
|
180046a3c5 | ||
|
|
916742ab9d | ||
|
|
d91f34dd42 | ||
|
|
5676976564 | ||
|
|
85aa3e7d9c | ||
|
|
a2ff6613c5 | ||
|
|
8d6cb5eee0 | ||
|
|
31445e391a | ||
|
|
04c3a5a861 | ||
|
|
5667cc9702 | ||
|
|
c0f95f971a | ||
|
|
f125f5bd32 | ||
|
|
f3eca8c7a7 | ||
|
|
f22e5f965e | ||
|
|
749f539dfc | ||
|
|
1247207afe | ||
|
|
5c0e9d8fbb | ||
|
|
957fa7a994 | ||
|
|
751c2e1d17 | ||
|
|
519645c0b0 | ||
|
|
0d0a318c3c | ||
|
|
588e0c4611 | ||
|
|
79144a6365 | ||
|
|
ca53c20370 | ||
|
|
d635503f49 | ||
|
|
920966f895 | ||
|
|
c46e0d3ecc | ||
|
|
c6ecf0095b | ||
|
|
7de6f6e44c | ||
|
|
035f85c3ba | ||
|
|
6f6a34d126 | ||
|
|
fff1f1cf27 | ||
|
|
1869854d70 | ||
|
|
4dd2998592 | ||
|
|
a4a174b3dc | ||
|
|
65c83317aa | ||
|
|
e95e0052da | ||
|
|
0ecafcd38e | ||
|
|
cadfe14abe | ||
|
|
75dd6fb28b | ||
|
|
eef93024d5 | ||
|
|
cd73cb0b3e | ||
|
|
e705b09280 | ||
|
|
23bd4dfbfd | ||
|
|
df17582103 | ||
|
|
d79b80a4bf | ||
|
|
45da421e7d | ||
|
|
122ff416ac | ||
|
|
b66bf93b31 | ||
|
|
6d791e3e12 | ||
|
|
f9b12517b0 | ||
|
|
195e1e9eb2 | ||
|
|
47aa90df1d | ||
|
|
460eac36f6 | ||
|
|
3a47deac07 | ||
|
|
49e8443ec5 | ||
|
|
d16f93b5f7 | ||
|
|
20b29bbfa6 | ||
|
|
e2a6937ca6 | ||
|
|
005a0cb84a | ||
|
|
beabe38311 | ||
|
|
62315be197 | ||
|
|
a26597a696 | ||
|
|
8772b04d1d | ||
|
|
7742b18c9c | ||
|
|
b75b799e34 | ||
|
|
43add11b05 | ||
|
|
1764de53a5 | ||
|
|
c0511b9a5f | ||
|
|
2483623c88 | ||
|
|
229d6f2dfe | ||
|
|
d5ec838218 | ||
|
|
15d7a3d221 | ||
|
|
c3e88b97c8 | ||
|
|
ba424666f8 | ||
|
|
ea3b671182 | ||
|
|
f209f616c9 | ||
|
|
961af515d5 | ||
|
|
a362963017 | ||
|
|
78d735f35c | ||
|
|
48f6f8f2f8 | ||
|
|
74bc0440f0 | ||
|
|
18d5a75cdc | ||
|
|
a820863e8b | ||
|
|
9a210971f5 | ||
|
|
e1ad235f17 | ||
|
|
4a0b329e3e | ||
|
|
a22359de5e | ||
|
|
bbfe4002eb | ||
|
|
94426a3cae | ||
|
|
e559f0f4dc | ||
|
|
15c9c7e1f4 | ||
|
|
ac641e7cba | ||
|
|
1915d7b195 | ||
|
|
c2f4b383f2 | ||
|
|
939af59950 | ||
|
|
7d08da1a8a | ||
|
|
543b617e1c | ||
|
|
c827fdd10e | ||
|
|
ac608ded46 | ||
|
|
0e00f357a6 | ||
|
|
c465d7b732 | ||
|
|
aba0a93d1d | ||
|
|
a49b2b2833 | ||
|
|
e0ebb7097e | ||
|
|
9a79635012 | ||
|
|
ce19abc968 | ||
|
|
49cd3894aa | ||
|
|
da4aa37493 | ||
|
|
5a14ee9c6a | ||
|
|
3394aa5030 | ||
|
|
cee0579e0e | ||
|
|
d6c173c94b | ||
|
|
beff848a3f | ||
|
|
b9bc3123d6 | ||
|
|
639c7ad6ad | ||
|
|
822e2310ce | ||
|
|
2a0a8f6beb | ||
|
|
a6fd32a15a | ||
|
|
80a1a57fde | ||
|
|
3576391482 | ||
|
|
55557f7cc8 | ||
|
|
d7d02766ea | ||
|
|
627f0bffe5 | ||
|
|
8d1d95371f | ||
|
|
8bcdc041b2 | ||
|
|
9b6395d955 | ||
|
|
ad1503abdc | ||
|
|
a6d7ebf22e | ||
|
|
cebf140bce | ||
|
|
cc0cf359a2 | ||
|
|
3547873bc4 | ||
|
|
50b7bd6643 | ||
|
|
81186312ef | ||
|
|
4ec7bcb60f | ||
|
|
c78fd0fc83 | ||
|
|
d711fc7852 | ||
|
|
6af7efd0f4 | ||
|
|
d57e7aec73 | ||
|
|
e4e25aaf2b | ||
|
|
e8ddd279fd | ||
|
|
b742c8c7f9 | ||
|
|
803ade8601 | ||
|
|
dcd96c29c5 | ||
|
|
53c31b85d0 | ||
|
|
d07c2b3475 | ||
|
|
a434d28579 | ||
|
|
d82a63642d | ||
|
|
9585db14ba | ||
|
|
c191af6c9b | ||
|
|
39346d6b8e | ||
|
|
28d63dab71 | ||
|
|
49d1cefe71 | ||
|
|
0262c25989 | ||
|
|
90b037a67f | ||
|
|
fc8fd489d6 | ||
|
|
81b32e400a | ||
|
|
ae732b2ebc | ||
|
|
7e7e75714e | ||
|
|
ff54cbffd9 | ||
|
|
74e029e78f | ||
|
|
75324e1c7e | ||
|
|
949682ef0f | ||
|
|
66bd419baa | ||
|
|
aa42759ffd | ||
|
|
52e59e40be | ||
|
|
a955097faf | ||
|
|
b6413b06bc | ||
|
|
e6cebdf2b6 | ||
|
|
3eb6debe6a | ||
|
|
8780a78165 | ||
|
|
9dbf153489 | ||
|
|
4799593e1a | ||
|
|
a199b87478 | ||
|
|
007570b5c6 | ||
|
|
5943d5346c |
87
.env.example
87
.env.example
@@ -15,6 +15,20 @@ HOST=localhost
|
|||||||
PORT=3080
|
PORT=3080
|
||||||
|
|
||||||
MONGO_URI=mongodb://127.0.0.1:27017/LibreChat
|
MONGO_URI=mongodb://127.0.0.1:27017/LibreChat
|
||||||
|
#The maximum number of connections in the connection pool. */
|
||||||
|
MONGO_MAX_POOL_SIZE=
|
||||||
|
#The minimum number of connections in the connection pool. */
|
||||||
|
MONGO_MIN_POOL_SIZE=
|
||||||
|
#The maximum number of connections that may be in the process of being established concurrently by the connection pool. */
|
||||||
|
MONGO_MAX_CONNECTING=
|
||||||
|
#The maximum number of milliseconds that a connection can remain idle in the pool before being removed and closed. */
|
||||||
|
MONGO_MAX_IDLE_TIME_MS=
|
||||||
|
#The maximum time in milliseconds that a thread can wait for a connection to become available. */
|
||||||
|
MONGO_WAIT_QUEUE_TIMEOUT_MS=
|
||||||
|
# Set to false to disable automatic index creation for all models associated with this connection. */
|
||||||
|
MONGO_AUTO_INDEX=
|
||||||
|
# Set to `false` to disable Mongoose automatically calling `createCollection()` on every model created on this connection. */
|
||||||
|
MONGO_AUTO_CREATE=
|
||||||
|
|
||||||
DOMAIN_CLIENT=http://localhost:3080
|
DOMAIN_CLIENT=http://localhost:3080
|
||||||
DOMAIN_SERVER=http://localhost:3080
|
DOMAIN_SERVER=http://localhost:3080
|
||||||
@@ -26,6 +40,13 @@ NO_INDEX=true
|
|||||||
# Defaulted to 1.
|
# Defaulted to 1.
|
||||||
TRUST_PROXY=1
|
TRUST_PROXY=1
|
||||||
|
|
||||||
|
# Minimum password length for user authentication
|
||||||
|
# Default: 8
|
||||||
|
# Note: When using LDAP authentication, you may want to set this to 1
|
||||||
|
# to bypass local password validation, as LDAP servers handle their own
|
||||||
|
# password policies.
|
||||||
|
# MIN_PASSWORD_LENGTH=8
|
||||||
|
|
||||||
#===============#
|
#===============#
|
||||||
# JSON Logging #
|
# JSON Logging #
|
||||||
#===============#
|
#===============#
|
||||||
@@ -142,10 +163,10 @@ GOOGLE_KEY=user_provided
|
|||||||
# GOOGLE_AUTH_HEADER=true
|
# GOOGLE_AUTH_HEADER=true
|
||||||
|
|
||||||
# Gemini API (AI Studio)
|
# Gemini API (AI Studio)
|
||||||
# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite-preview-06-17,gemini-2.0-flash,gemini-2.0-flash-lite
|
# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite,gemini-2.0-flash,gemini-2.0-flash-lite
|
||||||
|
|
||||||
# Vertex AI
|
# Vertex AI
|
||||||
# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite-preview-06-17,gemini-2.0-flash-001,gemini-2.0-flash-lite-001
|
# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite,gemini-2.0-flash-001,gemini-2.0-flash-lite-001
|
||||||
|
|
||||||
# GOOGLE_TITLE_MODEL=gemini-2.0-flash-lite-001
|
# GOOGLE_TITLE_MODEL=gemini-2.0-flash-lite-001
|
||||||
|
|
||||||
@@ -175,7 +196,7 @@ GOOGLE_KEY=user_provided
|
|||||||
#============#
|
#============#
|
||||||
|
|
||||||
OPENAI_API_KEY=user_provided
|
OPENAI_API_KEY=user_provided
|
||||||
# OPENAI_MODELS=o1,o1-mini,o1-preview,gpt-4o,gpt-4.5-preview,chatgpt-4o-latest,gpt-4o-mini,gpt-3.5-turbo-0125,gpt-3.5-turbo-0301,gpt-3.5-turbo,gpt-4,gpt-4-0613,gpt-4-vision-preview,gpt-3.5-turbo-0613,gpt-3.5-turbo-16k-0613,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-instruct-0914,gpt-3.5-turbo-16k
|
# OPENAI_MODELS=gpt-5,gpt-5-codex,gpt-5-mini,gpt-5-nano,o3-pro,o3,o4-mini,gpt-4.1,gpt-4.1-mini,gpt-4.1-nano,o3-mini,o1-pro,o1,gpt-4o,gpt-4o-mini
|
||||||
|
|
||||||
DEBUG_OPENAI=false
|
DEBUG_OPENAI=false
|
||||||
|
|
||||||
@@ -438,6 +459,9 @@ OPENID_CALLBACK_URL=/oauth/openid/callback
|
|||||||
OPENID_REQUIRED_ROLE=
|
OPENID_REQUIRED_ROLE=
|
||||||
OPENID_REQUIRED_ROLE_TOKEN_KIND=
|
OPENID_REQUIRED_ROLE_TOKEN_KIND=
|
||||||
OPENID_REQUIRED_ROLE_PARAMETER_PATH=
|
OPENID_REQUIRED_ROLE_PARAMETER_PATH=
|
||||||
|
OPENID_ADMIN_ROLE=
|
||||||
|
OPENID_ADMIN_ROLE_PARAMETER_PATH=
|
||||||
|
OPENID_ADMIN_ROLE_TOKEN_KIND=
|
||||||
# Set to determine which user info property returned from OpenID Provider to store as the User's username
|
# Set to determine which user info property returned from OpenID Provider to store as the User's username
|
||||||
OPENID_USERNAME_CLAIM=
|
OPENID_USERNAME_CLAIM=
|
||||||
# Set to determine which user info property returned from OpenID Provider to store as the User's name
|
# Set to determine which user info property returned from OpenID Provider to store as the User's name
|
||||||
@@ -465,6 +489,21 @@ OPENID_ON_BEHALF_FLOW_USERINFO_SCOPE="user.read" # example for Scope Needed for
|
|||||||
# Set to true to use the OpenID Connect end session endpoint for logout
|
# Set to true to use the OpenID Connect end session endpoint for logout
|
||||||
OPENID_USE_END_SESSION_ENDPOINT=
|
OPENID_USE_END_SESSION_ENDPOINT=
|
||||||
|
|
||||||
|
#========================#
|
||||||
|
# SharePoint Integration #
|
||||||
|
#========================#
|
||||||
|
# Requires Entra ID (OpenID) authentication to be configured
|
||||||
|
|
||||||
|
# Enable SharePoint file picker in chat and agent panels
|
||||||
|
# ENABLE_SHAREPOINT_FILEPICKER=true
|
||||||
|
|
||||||
|
# SharePoint tenant base URL (e.g., https://yourtenant.sharepoint.com)
|
||||||
|
# SHAREPOINT_BASE_URL=https://yourtenant.sharepoint.com
|
||||||
|
|
||||||
|
# Microsoft Graph API And SharePoint scopes for file picker
|
||||||
|
# SHAREPOINT_PICKER_SHAREPOINT_SCOPE==https://yourtenant.sharepoint.com/AllSites.Read
|
||||||
|
# SHAREPOINT_PICKER_GRAPH_SCOPE=Files.Read.All
|
||||||
|
#========================#
|
||||||
|
|
||||||
# SAML
|
# SAML
|
||||||
# Note: If OpenID is enabled, SAML authentication will be automatically disabled.
|
# Note: If OpenID is enabled, SAML authentication will be automatically disabled.
|
||||||
@@ -492,6 +531,21 @@ SAML_IMAGE_URL=
|
|||||||
# SAML_USE_AUTHN_RESPONSE_SIGNED=
|
# SAML_USE_AUTHN_RESPONSE_SIGNED=
|
||||||
|
|
||||||
|
|
||||||
|
#===============================================#
|
||||||
|
# Microsoft Graph API / Entra ID Integration #
|
||||||
|
#===============================================#
|
||||||
|
|
||||||
|
# Enable Entra ID people search integration in permissions/sharing system
|
||||||
|
# When enabled, the people picker will search both local database and Entra ID
|
||||||
|
USE_ENTRA_ID_FOR_PEOPLE_SEARCH=false
|
||||||
|
|
||||||
|
# When enabled, entra id groups owners will be considered as members of the group
|
||||||
|
ENTRA_ID_INCLUDE_OWNERS_AS_MEMBERS=false
|
||||||
|
|
||||||
|
# Microsoft Graph API scopes needed for people/group search
|
||||||
|
# Default scopes provide access to user profiles and group memberships
|
||||||
|
OPENID_GRAPH_SCOPES=User.Read,People.Read,GroupMember.Read.All
|
||||||
|
|
||||||
# LDAP
|
# LDAP
|
||||||
LDAP_URL=
|
LDAP_URL=
|
||||||
LDAP_BIND_DN=
|
LDAP_BIND_DN=
|
||||||
@@ -599,6 +653,12 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
|||||||
# Google tag manager id
|
# Google tag manager id
|
||||||
#ANALYTICS_GTM_ID=user provided google tag manager id
|
#ANALYTICS_GTM_ID=user provided google tag manager id
|
||||||
|
|
||||||
|
# limit conversation file imports to a certain number of bytes in size to avoid the container
|
||||||
|
# maxing out memory limitations by unremarking this line and supplying a file size in bytes
|
||||||
|
# such as the below example of 250 mib
|
||||||
|
# CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES=262144000
|
||||||
|
|
||||||
|
|
||||||
#===============#
|
#===============#
|
||||||
# REDIS Options #
|
# REDIS Options #
|
||||||
#===============#
|
#===============#
|
||||||
@@ -616,6 +676,10 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
|||||||
# REDIS_URI=rediss://127.0.0.1:6380
|
# REDIS_URI=rediss://127.0.0.1:6380
|
||||||
# REDIS_CA=/path/to/ca-cert.pem
|
# REDIS_CA=/path/to/ca-cert.pem
|
||||||
|
|
||||||
|
# Elasticache may need to use an alternate dnsLookup for TLS connections. see "Special Note: Aws Elasticache Clusters with TLS" on this webpage: https://www.npmjs.com/package/ioredis
|
||||||
|
# Enable alternative dnsLookup for redis
|
||||||
|
# REDIS_USE_ALTERNATIVE_DNS_LOOKUP=true
|
||||||
|
|
||||||
# Redis authentication (if required)
|
# Redis authentication (if required)
|
||||||
# REDIS_USERNAME=your_redis_username
|
# REDIS_USERNAME=your_redis_username
|
||||||
# REDIS_PASSWORD=your_redis_password
|
# REDIS_PASSWORD=your_redis_password
|
||||||
@@ -635,8 +699,8 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
|||||||
# REDIS_PING_INTERVAL=300
|
# REDIS_PING_INTERVAL=300
|
||||||
|
|
||||||
# Force specific cache namespaces to use in-memory storage even when Redis is enabled
|
# Force specific cache namespaces to use in-memory storage even when Redis is enabled
|
||||||
# Comma-separated list of CacheKeys (e.g., STATIC_CONFIG,ROLES,MESSAGES)
|
# Comma-separated list of CacheKeys (e.g., ROLES,MESSAGES)
|
||||||
# FORCED_IN_MEMORY_CACHE_NAMESPACES=STATIC_CONFIG,ROLES
|
# FORCED_IN_MEMORY_CACHE_NAMESPACES=ROLES,MESSAGES
|
||||||
|
|
||||||
#==================================================#
|
#==================================================#
|
||||||
# Others #
|
# Others #
|
||||||
@@ -698,3 +762,16 @@ OPENWEATHER_API_KEY=
|
|||||||
# JINA_API_KEY=your_jina_api_key
|
# JINA_API_KEY=your_jina_api_key
|
||||||
# or
|
# or
|
||||||
# COHERE_API_KEY=your_cohere_api_key
|
# COHERE_API_KEY=your_cohere_api_key
|
||||||
|
|
||||||
|
#======================#
|
||||||
|
# MCP Configuration #
|
||||||
|
#======================#
|
||||||
|
|
||||||
|
# Treat 401/403 responses as OAuth requirement when no oauth metadata found
|
||||||
|
# MCP_OAUTH_ON_AUTH_ERROR=true
|
||||||
|
|
||||||
|
# Timeout for OAuth detection requests in milliseconds
|
||||||
|
# MCP_OAUTH_DETECTION_TIMEOUT=5000
|
||||||
|
|
||||||
|
# Cache connection status checks for this many milliseconds to avoid expensive verification
|
||||||
|
# MCP_CONNECTION_CHECK_TTL=60000
|
||||||
|
|||||||
4
.github/CONTRIBUTING.md
vendored
4
.github/CONTRIBUTING.md
vendored
@@ -147,7 +147,7 @@ Apply the following naming conventions to branches, labels, and other Git-relate
|
|||||||
## 8. Module Import Conventions
|
## 8. Module Import Conventions
|
||||||
|
|
||||||
- `npm` packages first,
|
- `npm` packages first,
|
||||||
- from shortest line (top) to longest (bottom)
|
- from longest line (top) to shortest (bottom)
|
||||||
|
|
||||||
- Followed by typescript types (pertains to data-provider and client workspaces)
|
- Followed by typescript types (pertains to data-provider and client workspaces)
|
||||||
- longest line (top) to shortest (bottom)
|
- longest line (top) to shortest (bottom)
|
||||||
@@ -157,6 +157,8 @@ Apply the following naming conventions to branches, labels, and other Git-relate
|
|||||||
- longest line (top) to shortest (bottom)
|
- longest line (top) to shortest (bottom)
|
||||||
- imports with alias `~` treated the same as relative import with respect to line length
|
- imports with alias `~` treated the same as relative import with respect to line length
|
||||||
|
|
||||||
|
**Note:** ESLint will automatically enforce these import conventions when you run `npm run lint --fix` or through pre-commit hooks.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
Please ensure that you adapt this summary to fit the specific context and nuances of your project.
|
Please ensure that you adapt this summary to fit the specific context and nuances of your project.
|
||||||
|
|||||||
78
.github/workflows/cache-integration-tests.yml
vendored
Normal file
78
.github/workflows/cache-integration-tests.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
name: Cache Integration Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- dev
|
||||||
|
- release/*
|
||||||
|
paths:
|
||||||
|
- 'packages/api/src/cache/**'
|
||||||
|
- 'redis-config/**'
|
||||||
|
- '.github/workflows/cache-integration-tests.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cache_integration_tests:
|
||||||
|
name: Run Cache Integration Tests
|
||||||
|
timeout-minutes: 30
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Use Node.js 20.x
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: Install Redis tools
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y redis-server redis-tools
|
||||||
|
|
||||||
|
- name: Start Single Redis Instance
|
||||||
|
run: |
|
||||||
|
redis-server --daemonize yes --port 6379
|
||||||
|
sleep 2
|
||||||
|
# Verify single Redis is running
|
||||||
|
redis-cli -p 6379 ping || exit 1
|
||||||
|
|
||||||
|
- name: Start Redis Cluster
|
||||||
|
working-directory: redis-config
|
||||||
|
run: |
|
||||||
|
chmod +x start-cluster.sh stop-cluster.sh
|
||||||
|
./start-cluster.sh
|
||||||
|
sleep 10
|
||||||
|
# Verify cluster is running
|
||||||
|
redis-cli -p 7001 cluster info || exit 1
|
||||||
|
redis-cli -p 7002 cluster info || exit 1
|
||||||
|
redis-cli -p 7003 cluster info || exit 1
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Build packages
|
||||||
|
run: |
|
||||||
|
npm run build:data-provider
|
||||||
|
npm run build:data-schemas
|
||||||
|
npm run build:api
|
||||||
|
|
||||||
|
- name: Run cache integration tests
|
||||||
|
working-directory: packages/api
|
||||||
|
env:
|
||||||
|
NODE_ENV: test
|
||||||
|
USE_REDIS: true
|
||||||
|
REDIS_URI: redis://127.0.0.1:6379
|
||||||
|
REDIS_CLUSTER_URI: redis://127.0.0.1:7001,redis://127.0.0.1:7002,redis://127.0.0.1:7003
|
||||||
|
run: npm run test:cache:integration
|
||||||
|
|
||||||
|
- name: Stop Redis Cluster
|
||||||
|
if: always()
|
||||||
|
working-directory: redis-config
|
||||||
|
run: ./stop-cluster.sh || true
|
||||||
|
|
||||||
|
- name: Stop Single Redis Instance
|
||||||
|
if: always()
|
||||||
|
run: redis-cli -p 6379 shutdown || true
|
||||||
12
.github/workflows/data-provider.yml
vendored
12
.github/workflows/data-provider.yml
vendored
@@ -1,4 +1,4 @@
|
|||||||
name: Node.js Package
|
name: Publish `librechat-data-provider` to NPM
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -6,6 +6,12 @@ on:
|
|||||||
- main
|
- main
|
||||||
paths:
|
paths:
|
||||||
- 'packages/data-provider/package.json'
|
- 'packages/data-provider/package.json'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
reason:
|
||||||
|
description: 'Reason for manual trigger'
|
||||||
|
required: false
|
||||||
|
default: 'Manual publish requested'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
@@ -14,7 +20,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 16
|
node-version: 20
|
||||||
- run: cd packages/data-provider && npm ci
|
- run: cd packages/data-provider && npm ci
|
||||||
- run: cd packages/data-provider && npm run build
|
- run: cd packages/data-provider && npm run build
|
||||||
|
|
||||||
@@ -25,7 +31,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 16
|
node-version: 20
|
||||||
registry-url: 'https://registry.npmjs.org'
|
registry-url: 'https://registry.npmjs.org'
|
||||||
- run: cd packages/data-provider && npm ci
|
- run: cd packages/data-provider && npm ci
|
||||||
- run: cd packages/data-provider && npm run build
|
- run: cd packages/data-provider && npm run build
|
||||||
|
|||||||
33
.github/workflows/i18n-unused-keys.yml
vendored
33
.github/workflows/i18n-unused-keys.yml
vendored
@@ -1,5 +1,10 @@
|
|||||||
name: Detect Unused i18next Strings
|
name: Detect Unused i18next Strings
|
||||||
|
|
||||||
|
# This workflow checks for unused i18n keys in translation files.
|
||||||
|
# It has special handling for:
|
||||||
|
# - com_ui_special_var_* keys that are dynamically constructed
|
||||||
|
# - com_agents_category_* keys that are stored in the database and used dynamically
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
@@ -7,6 +12,7 @@ on:
|
|||||||
- "api/**"
|
- "api/**"
|
||||||
- "packages/data-provider/src/**"
|
- "packages/data-provider/src/**"
|
||||||
- "packages/client/**"
|
- "packages/client/**"
|
||||||
|
- "packages/data-schemas/src/**"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
detect-unused-i18n-keys:
|
detect-unused-i18n-keys:
|
||||||
@@ -24,7 +30,7 @@ jobs:
|
|||||||
|
|
||||||
# Define paths
|
# Define paths
|
||||||
I18N_FILE="client/src/locales/en/translation.json"
|
I18N_FILE="client/src/locales/en/translation.json"
|
||||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src" "packages/client")
|
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src" "packages/client" "packages/data-schemas/src")
|
||||||
|
|
||||||
# Check if translation file exists
|
# Check if translation file exists
|
||||||
if [[ ! -f "$I18N_FILE" ]]; then
|
if [[ ! -f "$I18N_FILE" ]]; then
|
||||||
@@ -52,6 +58,31 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# Also check if the key is directly used somewhere
|
||||||
|
if [[ "$FOUND" == false ]]; then
|
||||||
|
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||||
|
if grep -r --include=\*.{js,jsx,ts,tsx} -q "$KEY" "$DIR"; then
|
||||||
|
FOUND=true
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
# Special case for agent category keys that are dynamically used from database
|
||||||
|
elif [[ "$KEY" == com_agents_category_* ]]; then
|
||||||
|
# Check if agent category localization is being used
|
||||||
|
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||||
|
# Check for dynamic category label/description usage
|
||||||
|
if grep -r --include=\*.{js,jsx,ts,tsx} -E "category\.(label|description).*startsWith.*['\"]com_" "$DIR" > /dev/null 2>&1 || \
|
||||||
|
# Check for the method that defines these keys
|
||||||
|
grep -r --include=\*.{js,jsx,ts,tsx} "ensureDefaultCategories" "$DIR" > /dev/null 2>&1 || \
|
||||||
|
# Check for direct usage in agentCategory.ts
|
||||||
|
grep -r --include=\*.ts -E "label:.*['\"]$KEY['\"]" "$DIR" > /dev/null 2>&1 || \
|
||||||
|
grep -r --include=\*.ts -E "description:.*['\"]$KEY['\"]" "$DIR" > /dev/null 2>&1; then
|
||||||
|
FOUND=true
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
# Also check if the key is directly used somewhere
|
# Also check if the key is directly used somewhere
|
||||||
if [[ "$FOUND" == false ]]; then
|
if [[ "$FOUND" == false ]]; then
|
||||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -137,3 +137,4 @@ helm/**/.values.yaml
|
|||||||
/.openai/
|
/.openai/
|
||||||
/.tabnine/
|
/.tabnine/
|
||||||
/.codeium
|
/.codeium
|
||||||
|
*.local.md
|
||||||
|
|||||||
@@ -1,5 +1,2 @@
|
|||||||
#!/usr/bin/env sh
|
|
||||||
set -e
|
|
||||||
. "$(dirname -- "$0")/_/husky.sh"
|
|
||||||
[ -n "$CI" ] && exit 0
|
[ -n "$CI" ] && exit 0
|
||||||
npx lint-staged --config ./.husky/lint-staged.config.js
|
npx lint-staged --config ./.husky/lint-staged.config.js
|
||||||
|
|||||||
3
.vscode/launch.json
vendored
3
.vscode/launch.json
vendored
@@ -8,7 +8,8 @@
|
|||||||
"skipFiles": ["<node_internals>/**"],
|
"skipFiles": ["<node_internals>/**"],
|
||||||
"program": "${workspaceFolder}/api/server/index.js",
|
"program": "${workspaceFolder}/api/server/index.js",
|
||||||
"env": {
|
"env": {
|
||||||
"NODE_ENV": "production"
|
"NODE_ENV": "production",
|
||||||
|
"NODE_TLS_REJECT_UNAUTHORIZED": "0"
|
||||||
},
|
},
|
||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
"envFile": "${workspaceFolder}/.env"
|
"envFile": "${workspaceFolder}/.env"
|
||||||
|
|||||||
21
Dockerfile
21
Dockerfile
@@ -1,4 +1,4 @@
|
|||||||
# v0.8.0-rc2
|
# v0.8.0
|
||||||
|
|
||||||
# Base node image
|
# Base node image
|
||||||
FROM node:20-alpine AS node
|
FROM node:20-alpine AS node
|
||||||
@@ -19,24 +19,31 @@ WORKDIR /app
|
|||||||
|
|
||||||
USER node
|
USER node
|
||||||
|
|
||||||
COPY --chown=node:node . .
|
COPY --chown=node:node package.json package-lock.json ./
|
||||||
|
COPY --chown=node:node api/package.json ./api/package.json
|
||||||
|
COPY --chown=node:node client/package.json ./client/package.json
|
||||||
|
COPY --chown=node:node packages/data-provider/package.json ./packages/data-provider/package.json
|
||||||
|
COPY --chown=node:node packages/data-schemas/package.json ./packages/data-schemas/package.json
|
||||||
|
COPY --chown=node:node packages/api/package.json ./packages/api/package.json
|
||||||
|
|
||||||
RUN \
|
RUN \
|
||||||
# Allow mounting of these files, which have no default
|
# Allow mounting of these files, which have no default
|
||||||
touch .env ; \
|
touch .env ; \
|
||||||
# Create directories for the volumes to inherit the correct permissions
|
# Create directories for the volumes to inherit the correct permissions
|
||||||
mkdir -p /app/client/public/images /app/api/logs ; \
|
mkdir -p /app/client/public/images /app/api/logs /app/uploads ; \
|
||||||
npm config set fetch-retry-maxtimeout 600000 ; \
|
npm config set fetch-retry-maxtimeout 600000 ; \
|
||||||
npm config set fetch-retries 5 ; \
|
npm config set fetch-retries 5 ; \
|
||||||
npm config set fetch-retry-mintimeout 15000 ; \
|
npm config set fetch-retry-mintimeout 15000 ; \
|
||||||
npm install --no-audit; \
|
npm ci --no-audit
|
||||||
|
|
||||||
|
COPY --chown=node:node . .
|
||||||
|
|
||||||
|
RUN \
|
||||||
# React client build
|
# React client build
|
||||||
NODE_OPTIONS="--max-old-space-size=2048" npm run frontend; \
|
NODE_OPTIONS="--max-old-space-size=2048" npm run frontend; \
|
||||||
npm prune --production; \
|
npm prune --production; \
|
||||||
npm cache clean --force
|
npm cache clean --force
|
||||||
|
|
||||||
RUN mkdir -p /app/client/public/images /app/api/logs
|
|
||||||
|
|
||||||
# Node API setup
|
# Node API setup
|
||||||
EXPOSE 3080
|
EXPOSE 3080
|
||||||
ENV HOST=0.0.0.0
|
ENV HOST=0.0.0.0
|
||||||
@@ -47,4 +54,4 @@ CMD ["npm", "run", "backend"]
|
|||||||
# WORKDIR /usr/share/nginx/html
|
# WORKDIR /usr/share/nginx/html
|
||||||
# COPY --from=node /app/client/dist /usr/share/nginx/html
|
# COPY --from=node /app/client/dist /usr/share/nginx/html
|
||||||
# COPY client/nginx.conf /etc/nginx/conf.d/default.conf
|
# COPY client/nginx.conf /etc/nginx/conf.d/default.conf
|
||||||
# ENTRYPOINT ["nginx", "-g", "daemon off;"]
|
# ENTRYPOINT ["nginx", "-g", "daemon off;"]
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Dockerfile.multi
|
# Dockerfile.multi
|
||||||
# v0.8.0-rc2
|
# v0.8.0
|
||||||
|
|
||||||
# Base for all builds
|
# Base for all builds
|
||||||
FROM node:20-alpine AS base-min
|
FROM node:20-alpine AS base-min
|
||||||
|
|||||||
16
README.md
16
README.md
@@ -65,14 +65,17 @@
|
|||||||
|
|
||||||
- 🔦 **Agents & Tools Integration**:
|
- 🔦 **Agents & Tools Integration**:
|
||||||
- **[LibreChat Agents](https://www.librechat.ai/docs/features/agents)**:
|
- **[LibreChat Agents](https://www.librechat.ai/docs/features/agents)**:
|
||||||
- No-Code Custom Assistants: Build specialized, AI-driven helpers without coding
|
- No-Code Custom Assistants: Build specialized, AI-driven helpers
|
||||||
- Flexible & Extensible: Use MCP Servers, tools, file search, code execution, and more
|
- Agent Marketplace: Discover and deploy community-built agents
|
||||||
|
- Collaborative Sharing: Share agents with specific users and groups
|
||||||
|
- Flexible & Extensible: Use MCP Servers, tools, file search, code execution, and more
|
||||||
- Compatible with Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, Google, Vertex AI, Responses API, and more
|
- Compatible with Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, Google, Vertex AI, Responses API, and more
|
||||||
- [Model Context Protocol (MCP) Support](https://modelcontextprotocol.io/clients#librechat) for Tools
|
- [Model Context Protocol (MCP) Support](https://modelcontextprotocol.io/clients#librechat) for Tools
|
||||||
|
|
||||||
- 🔍 **Web Search**:
|
- 🔍 **Web Search**:
|
||||||
- Search the internet and retrieve relevant information to enhance your AI context
|
- Search the internet and retrieve relevant information to enhance your AI context
|
||||||
- Combines search providers, content scrapers, and result rerankers for optimal results
|
- Combines search providers, content scrapers, and result rerankers for optimal results
|
||||||
|
- **Customizable Jina Reranking**: Configure custom Jina API URLs for reranking services
|
||||||
- **[Learn More →](https://www.librechat.ai/docs/features/web_search)**
|
- **[Learn More →](https://www.librechat.ai/docs/features/web_search)**
|
||||||
|
|
||||||
- 🪄 **Generative UI with Code Artifacts**:
|
- 🪄 **Generative UI with Code Artifacts**:
|
||||||
@@ -87,15 +90,18 @@
|
|||||||
- Create, Save, & Share Custom Presets
|
- Create, Save, & Share Custom Presets
|
||||||
- Switch between AI Endpoints and Presets mid-chat
|
- Switch between AI Endpoints and Presets mid-chat
|
||||||
- Edit, Resubmit, and Continue Messages with Conversation branching
|
- Edit, Resubmit, and Continue Messages with Conversation branching
|
||||||
|
- Create and share prompts with specific users and groups
|
||||||
- [Fork Messages & Conversations](https://www.librechat.ai/docs/features/fork) for Advanced Context control
|
- [Fork Messages & Conversations](https://www.librechat.ai/docs/features/fork) for Advanced Context control
|
||||||
|
|
||||||
- 💬 **Multimodal & File Interactions**:
|
- 💬 **Multimodal & File Interactions**:
|
||||||
- Upload and analyze images with Claude 3, GPT-4.5, GPT-4o, o1, Llama-Vision, and Gemini 📸
|
- Upload and analyze images with Claude 3, GPT-4.5, GPT-4o, o1, Llama-Vision, and Gemini 📸
|
||||||
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, & Google 🗃️
|
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, & Google 🗃️
|
||||||
|
|
||||||
- 🌎 **Multilingual UI**:
|
- 🌎 **Multilingual UI**:
|
||||||
- English, 中文, Deutsch, Español, Français, Italiano, Polski, Português Brasileiro
|
- English, 中文 (简体), 中文 (繁體), العربية, Deutsch, Español, Français, Italiano
|
||||||
- Русский, 日本語, Svenska, 한국어, Tiếng Việt, 繁體中文, العربية, Türkçe, Nederlands, עברית
|
- Polski, Português (PT), Português (BR), Русский, 日本語, Svenska, 한국어, Tiếng Việt
|
||||||
|
- Türkçe, Nederlands, עברית, Català, Čeština, Dansk, Eesti, فارسی
|
||||||
|
- Suomi, Magyar, Հայերեն, Bahasa Indonesia, ქართული, Latviešu, ไทย, ئۇيغۇرچە
|
||||||
|
|
||||||
- 🧠 **Reasoning UI**:
|
- 🧠 **Reasoning UI**:
|
||||||
- Dynamic Reasoning UI for Chain-of-Thought/Reasoning AI models like DeepSeek-R1
|
- Dynamic Reasoning UI for Chain-of-Thought/Reasoning AI models like DeepSeek-R1
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
const Anthropic = require('@anthropic-ai/sdk');
|
const Anthropic = require('@anthropic-ai/sdk');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const {
|
const {
|
||||||
Constants,
|
Constants,
|
||||||
@@ -9,8 +10,18 @@ const {
|
|||||||
getResponseSender,
|
getResponseSender,
|
||||||
validateVisionModel,
|
validateVisionModel,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { SplitStreamHandler: _Handler } = require('@librechat/agents');
|
const { sleep, SplitStreamHandler: _Handler } = require('@librechat/agents');
|
||||||
const { Tokenizer, createFetch, createStreamEventHandlers } = require('@librechat/api');
|
const {
|
||||||
|
Tokenizer,
|
||||||
|
createFetch,
|
||||||
|
matchModelName,
|
||||||
|
getClaudeHeaders,
|
||||||
|
getModelMaxTokens,
|
||||||
|
configureReasoning,
|
||||||
|
checkPromptCacheSupport,
|
||||||
|
getModelMaxOutputTokens,
|
||||||
|
createStreamEventHandlers,
|
||||||
|
} = require('@librechat/api');
|
||||||
const {
|
const {
|
||||||
truncateText,
|
truncateText,
|
||||||
formatMessage,
|
formatMessage,
|
||||||
@@ -19,17 +30,9 @@ const {
|
|||||||
parseParamFromPrompt,
|
parseParamFromPrompt,
|
||||||
createContextHandlers,
|
createContextHandlers,
|
||||||
} = require('./prompts');
|
} = require('./prompts');
|
||||||
const {
|
|
||||||
getClaudeHeaders,
|
|
||||||
configureReasoning,
|
|
||||||
checkPromptCacheSupport,
|
|
||||||
} = require('~/server/services/Endpoints/anthropic/helpers');
|
|
||||||
const { getModelMaxTokens, getModelMaxOutputTokens, matchModelName } = require('~/utils');
|
|
||||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||||
const { sleep } = require('~/server/utils');
|
|
||||||
const BaseClient = require('./BaseClient');
|
const BaseClient = require('./BaseClient');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const HUMAN_PROMPT = '\n\nHuman:';
|
const HUMAN_PROMPT = '\n\nHuman:';
|
||||||
const AI_PROMPT = '\n\nAssistant:';
|
const AI_PROMPT = '\n\nAssistant:';
|
||||||
|
|||||||
@@ -1,21 +1,31 @@
|
|||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const fetch = require('node-fetch');
|
const fetch = require('node-fetch');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const {
|
const {
|
||||||
supportsBalanceCheck,
|
getBalanceConfig,
|
||||||
isAgentsEndpoint,
|
extractFileContext,
|
||||||
isParamEndpoint,
|
encodeAndFormatAudios,
|
||||||
EModelEndpoint,
|
encodeAndFormatVideos,
|
||||||
|
encodeAndFormatDocuments,
|
||||||
|
} = require('@librechat/api');
|
||||||
|
const {
|
||||||
|
Constants,
|
||||||
|
ErrorTypes,
|
||||||
|
FileSources,
|
||||||
ContentTypes,
|
ContentTypes,
|
||||||
excludedKeys,
|
excludedKeys,
|
||||||
ErrorTypes,
|
EModelEndpoint,
|
||||||
Constants,
|
isParamEndpoint,
|
||||||
|
isAgentsEndpoint,
|
||||||
|
supportsBalanceCheck,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { getMessages, saveMessage, updateMessage, saveConvo, getConvo } = require('~/models');
|
const { getMessages, saveMessage, updateMessage, saveConvo, getConvo } = require('~/models');
|
||||||
|
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||||
const { checkBalance } = require('~/models/balanceMethods');
|
const { checkBalance } = require('~/models/balanceMethods');
|
||||||
const { truncateToolCallOutputs } = require('./prompts');
|
const { truncateToolCallOutputs } = require('./prompts');
|
||||||
|
const countTokens = require('~/server/utils/countTokens');
|
||||||
const { getFiles } = require('~/models/File');
|
const { getFiles } = require('~/models/File');
|
||||||
const TextStream = require('./TextStream');
|
const TextStream = require('./TextStream');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
class BaseClient {
|
class BaseClient {
|
||||||
constructor(apiKey, options = {}) {
|
constructor(apiKey, options = {}) {
|
||||||
@@ -37,6 +47,8 @@ class BaseClient {
|
|||||||
this.conversationId;
|
this.conversationId;
|
||||||
/** @type {string} */
|
/** @type {string} */
|
||||||
this.responseMessageId;
|
this.responseMessageId;
|
||||||
|
/** @type {string} */
|
||||||
|
this.parentMessageId;
|
||||||
/** @type {TAttachment[]} */
|
/** @type {TAttachment[]} */
|
||||||
this.attachments;
|
this.attachments;
|
||||||
/** The key for the usage object's input tokens
|
/** The key for the usage object's input tokens
|
||||||
@@ -110,13 +122,15 @@ class BaseClient {
|
|||||||
* If a correction to the token usage is needed, the method should return an object with the corrected token counts.
|
* If a correction to the token usage is needed, the method should return an object with the corrected token counts.
|
||||||
* Should only be used if `recordCollectedUsage` was not used instead.
|
* Should only be used if `recordCollectedUsage` was not used instead.
|
||||||
* @param {string} [model]
|
* @param {string} [model]
|
||||||
|
* @param {AppConfig['balance']} [balance]
|
||||||
* @param {number} promptTokens
|
* @param {number} promptTokens
|
||||||
* @param {number} completionTokens
|
* @param {number} completionTokens
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async recordTokenUsage({ model, promptTokens, completionTokens }) {
|
async recordTokenUsage({ model, balance, promptTokens, completionTokens }) {
|
||||||
logger.debug('[BaseClient] `recordTokenUsage` not implemented.', {
|
logger.debug('[BaseClient] `recordTokenUsage` not implemented.', {
|
||||||
model,
|
model,
|
||||||
|
balance,
|
||||||
promptTokens,
|
promptTokens,
|
||||||
completionTokens,
|
completionTokens,
|
||||||
});
|
});
|
||||||
@@ -185,7 +199,8 @@ class BaseClient {
|
|||||||
this.user = user;
|
this.user = user;
|
||||||
const saveOptions = this.getSaveOptions();
|
const saveOptions = this.getSaveOptions();
|
||||||
this.abortController = opts.abortController ?? new AbortController();
|
this.abortController = opts.abortController ?? new AbortController();
|
||||||
const conversationId = overrideConvoId ?? opts.conversationId ?? crypto.randomUUID();
|
const requestConvoId = overrideConvoId ?? opts.conversationId;
|
||||||
|
const conversationId = requestConvoId ?? crypto.randomUUID();
|
||||||
const parentMessageId = opts.parentMessageId ?? Constants.NO_PARENT;
|
const parentMessageId = opts.parentMessageId ?? Constants.NO_PARENT;
|
||||||
const userMessageId =
|
const userMessageId =
|
||||||
overrideUserMessageId ?? opts.overrideParentMessageId ?? crypto.randomUUID();
|
overrideUserMessageId ?? opts.overrideParentMessageId ?? crypto.randomUUID();
|
||||||
@@ -210,11 +225,12 @@ class BaseClient {
|
|||||||
...opts,
|
...opts,
|
||||||
user,
|
user,
|
||||||
head,
|
head,
|
||||||
|
saveOptions,
|
||||||
|
userMessageId,
|
||||||
|
requestConvoId,
|
||||||
conversationId,
|
conversationId,
|
||||||
parentMessageId,
|
parentMessageId,
|
||||||
userMessageId,
|
|
||||||
responseMessageId,
|
responseMessageId,
|
||||||
saveOptions,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -233,11 +249,12 @@ class BaseClient {
|
|||||||
const {
|
const {
|
||||||
user,
|
user,
|
||||||
head,
|
head,
|
||||||
|
saveOptions,
|
||||||
|
userMessageId,
|
||||||
|
requestConvoId,
|
||||||
conversationId,
|
conversationId,
|
||||||
parentMessageId,
|
parentMessageId,
|
||||||
userMessageId,
|
|
||||||
responseMessageId,
|
responseMessageId,
|
||||||
saveOptions,
|
|
||||||
} = await this.setMessageOptions(opts);
|
} = await this.setMessageOptions(opts);
|
||||||
|
|
||||||
const userMessage = opts.isEdited
|
const userMessage = opts.isEdited
|
||||||
@@ -259,7 +276,8 @@ class BaseClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (typeof opts?.onStart === 'function') {
|
if (typeof opts?.onStart === 'function') {
|
||||||
opts.onStart(userMessage, responseMessageId);
|
const isNewConvo = !requestConvoId && parentMessageId === Constants.NO_PARENT;
|
||||||
|
opts.onStart(userMessage, responseMessageId, isNewConvo);
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -565,6 +583,7 @@ class BaseClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async sendMessage(message, opts = {}) {
|
async sendMessage(message, opts = {}) {
|
||||||
|
const appConfig = this.options.req?.config;
|
||||||
/** @type {Promise<TMessage>} */
|
/** @type {Promise<TMessage>} */
|
||||||
let userMessagePromise;
|
let userMessagePromise;
|
||||||
const { user, head, isEdited, conversationId, responseMessageId, saveOptions, userMessage } =
|
const { user, head, isEdited, conversationId, responseMessageId, saveOptions, userMessage } =
|
||||||
@@ -614,15 +633,19 @@ class BaseClient {
|
|||||||
this.currentMessages.push(userMessage);
|
this.currentMessages.push(userMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When the userMessage is pushed to currentMessages, the parentMessage is the userMessageId.
|
||||||
|
* this only matters when buildMessages is utilizing the parentMessageId, and may vary on implementation
|
||||||
|
*/
|
||||||
|
const parentMessageId = isEdited ? head : userMessage.messageId;
|
||||||
|
this.parentMessageId = parentMessageId;
|
||||||
let {
|
let {
|
||||||
prompt: payload,
|
prompt: payload,
|
||||||
tokenCountMap,
|
tokenCountMap,
|
||||||
promptTokens,
|
promptTokens,
|
||||||
} = await this.buildMessages(
|
} = await this.buildMessages(
|
||||||
this.currentMessages,
|
this.currentMessages,
|
||||||
// When the userMessage is pushed to currentMessages, the parentMessage is the userMessageId.
|
parentMessageId,
|
||||||
// this only matters when buildMessages is utilizing the parentMessageId, and may vary on implementation
|
|
||||||
isEdited ? head : userMessage.messageId,
|
|
||||||
this.getBuildMessagesOptions(opts),
|
this.getBuildMessagesOptions(opts),
|
||||||
opts,
|
opts,
|
||||||
);
|
);
|
||||||
@@ -647,9 +670,9 @@ class BaseClient {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const balance = this.options.req?.app?.locals?.balance;
|
const balanceConfig = getBalanceConfig(appConfig);
|
||||||
if (
|
if (
|
||||||
balance?.enabled &&
|
balanceConfig?.enabled &&
|
||||||
supportsBalanceCheck[this.options.endpointType ?? this.options.endpoint]
|
supportsBalanceCheck[this.options.endpointType ?? this.options.endpoint]
|
||||||
) {
|
) {
|
||||||
await checkBalance({
|
await checkBalance({
|
||||||
@@ -748,6 +771,7 @@ class BaseClient {
|
|||||||
usage,
|
usage,
|
||||||
promptTokens,
|
promptTokens,
|
||||||
completionTokens,
|
completionTokens,
|
||||||
|
balance: balanceConfig,
|
||||||
model: responseMessage.model,
|
model: responseMessage.model,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -1183,8 +1207,135 @@ class BaseClient {
|
|||||||
return await this.sendCompletion(payload, opts);
|
return await this.sendCompletion(payload, opts);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async addDocuments(message, attachments) {
|
||||||
|
const documentResult = await encodeAndFormatDocuments(
|
||||||
|
this.options.req,
|
||||||
|
attachments,
|
||||||
|
{
|
||||||
|
provider: this.options.agent?.provider,
|
||||||
|
useResponsesApi: this.options.agent?.model_parameters?.useResponsesApi,
|
||||||
|
},
|
||||||
|
getStrategyFunctions,
|
||||||
|
);
|
||||||
|
message.documents =
|
||||||
|
documentResult.documents && documentResult.documents.length
|
||||||
|
? documentResult.documents
|
||||||
|
: undefined;
|
||||||
|
return documentResult.files;
|
||||||
|
}
|
||||||
|
|
||||||
|
async addVideos(message, attachments) {
|
||||||
|
const videoResult = await encodeAndFormatVideos(
|
||||||
|
this.options.req,
|
||||||
|
attachments,
|
||||||
|
this.options.agent.provider,
|
||||||
|
getStrategyFunctions,
|
||||||
|
);
|
||||||
|
message.videos =
|
||||||
|
videoResult.videos && videoResult.videos.length ? videoResult.videos : undefined;
|
||||||
|
return videoResult.files;
|
||||||
|
}
|
||||||
|
|
||||||
|
async addAudios(message, attachments) {
|
||||||
|
const audioResult = await encodeAndFormatAudios(
|
||||||
|
this.options.req,
|
||||||
|
attachments,
|
||||||
|
this.options.agent.provider,
|
||||||
|
getStrategyFunctions,
|
||||||
|
);
|
||||||
|
message.audios =
|
||||||
|
audioResult.audios && audioResult.audios.length ? audioResult.audios : undefined;
|
||||||
|
return audioResult.files;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts text context from attachments and sets it on the message.
|
||||||
|
* This handles text that was already extracted from files (OCR, transcriptions, document text, etc.)
|
||||||
|
* @param {TMessage} message - The message to add context to
|
||||||
|
* @param {MongoFile[]} attachments - Array of file attachments
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
async addFileContextToMessage(message, attachments) {
|
||||||
|
const fileContext = await extractFileContext({
|
||||||
|
attachments,
|
||||||
|
req: this.options?.req,
|
||||||
|
tokenCountFn: (text) => countTokens(text),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (fileContext) {
|
||||||
|
message.fileContext = fileContext;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async processAttachments(message, attachments) {
|
||||||
|
const categorizedAttachments = {
|
||||||
|
images: [],
|
||||||
|
videos: [],
|
||||||
|
audios: [],
|
||||||
|
documents: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
const allFiles = [];
|
||||||
|
|
||||||
|
for (const file of attachments) {
|
||||||
|
/** @type {FileSources} */
|
||||||
|
const source = file.source ?? FileSources.local;
|
||||||
|
if (source === FileSources.text) {
|
||||||
|
allFiles.push(file);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (file.embedded === true || file.metadata?.fileIdentifier != null) {
|
||||||
|
allFiles.push(file);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.type.startsWith('image/')) {
|
||||||
|
categorizedAttachments.images.push(file);
|
||||||
|
} else if (file.type === 'application/pdf') {
|
||||||
|
categorizedAttachments.documents.push(file);
|
||||||
|
allFiles.push(file);
|
||||||
|
} else if (file.type.startsWith('video/')) {
|
||||||
|
categorizedAttachments.videos.push(file);
|
||||||
|
allFiles.push(file);
|
||||||
|
} else if (file.type.startsWith('audio/')) {
|
||||||
|
categorizedAttachments.audios.push(file);
|
||||||
|
allFiles.push(file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const [imageFiles] = await Promise.all([
|
||||||
|
categorizedAttachments.images.length > 0
|
||||||
|
? this.addImageURLs(message, categorizedAttachments.images)
|
||||||
|
: Promise.resolve([]),
|
||||||
|
categorizedAttachments.documents.length > 0
|
||||||
|
? this.addDocuments(message, categorizedAttachments.documents)
|
||||||
|
: Promise.resolve([]),
|
||||||
|
categorizedAttachments.videos.length > 0
|
||||||
|
? this.addVideos(message, categorizedAttachments.videos)
|
||||||
|
: Promise.resolve([]),
|
||||||
|
categorizedAttachments.audios.length > 0
|
||||||
|
? this.addAudios(message, categorizedAttachments.audios)
|
||||||
|
: Promise.resolve([]),
|
||||||
|
]);
|
||||||
|
|
||||||
|
allFiles.push(...imageFiles);
|
||||||
|
|
||||||
|
const seenFileIds = new Set();
|
||||||
|
const uniqueFiles = [];
|
||||||
|
|
||||||
|
for (const file of allFiles) {
|
||||||
|
if (file.file_id && !seenFileIds.has(file.file_id)) {
|
||||||
|
seenFileIds.add(file.file_id);
|
||||||
|
uniqueFiles.push(file);
|
||||||
|
} else if (!file.file_id) {
|
||||||
|
uniqueFiles.push(file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return uniqueFiles;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
|
||||||
* @param {TMessage[]} _messages
|
* @param {TMessage[]} _messages
|
||||||
* @returns {Promise<TMessage[]>}
|
* @returns {Promise<TMessage[]>}
|
||||||
*/
|
*/
|
||||||
@@ -1233,7 +1384,8 @@ class BaseClient {
|
|||||||
{},
|
{},
|
||||||
);
|
);
|
||||||
|
|
||||||
await this.addImageURLs(message, files, this.visionMode);
|
await this.addFileContextToMessage(message, files);
|
||||||
|
await this.processAttachments(message, files);
|
||||||
|
|
||||||
this.message_file_map[message.messageId] = files;
|
this.message_file_map[message.messageId] = files;
|
||||||
return message;
|
return message;
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
const { google } = require('googleapis');
|
const { google } = require('googleapis');
|
||||||
|
const { sleep } = require('@librechat/agents');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const { getModelMaxTokens } = require('@librechat/api');
|
||||||
const { concat } = require('@langchain/core/utils/stream');
|
const { concat } = require('@langchain/core/utils/stream');
|
||||||
const { ChatVertexAI } = require('@langchain/google-vertexai');
|
const { ChatVertexAI } = require('@langchain/google-vertexai');
|
||||||
const { Tokenizer, getSafetySettings } = require('@librechat/api');
|
const { Tokenizer, getSafetySettings } = require('@librechat/api');
|
||||||
@@ -21,9 +24,6 @@ const {
|
|||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { encodeAndFormat } = require('~/server/services/Files/images');
|
const { encodeAndFormat } = require('~/server/services/Files/images');
|
||||||
const { spendTokens } = require('~/models/spendTokens');
|
const { spendTokens } = require('~/models/spendTokens');
|
||||||
const { getModelMaxTokens } = require('~/utils');
|
|
||||||
const { sleep } = require('~/server/utils');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
const {
|
const {
|
||||||
formatMessage,
|
formatMessage,
|
||||||
createContextHandlers,
|
createContextHandlers,
|
||||||
|
|||||||
@@ -1,13 +1,15 @@
|
|||||||
const { OllamaClient } = require('./OllamaClient');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const { SplitStreamHandler, CustomOpenAIClient: OpenAI } = require('@librechat/agents');
|
const { sleep, SplitStreamHandler, CustomOpenAIClient: OpenAI } = require('@librechat/agents');
|
||||||
const {
|
const {
|
||||||
isEnabled,
|
isEnabled,
|
||||||
Tokenizer,
|
Tokenizer,
|
||||||
createFetch,
|
createFetch,
|
||||||
resolveHeaders,
|
resolveHeaders,
|
||||||
constructAzureURL,
|
constructAzureURL,
|
||||||
|
getModelMaxTokens,
|
||||||
genAzureChatCompletion,
|
genAzureChatCompletion,
|
||||||
|
getModelMaxOutputTokens,
|
||||||
createStreamEventHandlers,
|
createStreamEventHandlers,
|
||||||
} = require('@librechat/api');
|
} = require('@librechat/api');
|
||||||
const {
|
const {
|
||||||
@@ -31,17 +33,16 @@ const {
|
|||||||
titleInstruction,
|
titleInstruction,
|
||||||
createContextHandlers,
|
createContextHandlers,
|
||||||
} = require('./prompts');
|
} = require('./prompts');
|
||||||
const { extractBaseURL, getModelMaxTokens, getModelMaxOutputTokens } = require('~/utils');
|
|
||||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||||
const { addSpaceIfNeeded, sleep } = require('~/server/utils');
|
|
||||||
const { spendTokens } = require('~/models/spendTokens');
|
const { spendTokens } = require('~/models/spendTokens');
|
||||||
|
const { addSpaceIfNeeded } = require('~/server/utils');
|
||||||
const { handleOpenAIErrors } = require('./tools/util');
|
const { handleOpenAIErrors } = require('./tools/util');
|
||||||
const { createLLM, RunManager } = require('./llm');
|
const { OllamaClient } = require('./OllamaClient');
|
||||||
const { summaryBuffer } = require('./memory');
|
const { summaryBuffer } = require('./memory');
|
||||||
const { runTitleChain } = require('./chains');
|
const { runTitleChain } = require('./chains');
|
||||||
|
const { extractBaseURL } = require('~/utils');
|
||||||
const { tokenSplit } = require('./document');
|
const { tokenSplit } = require('./document');
|
||||||
const BaseClient = require('./BaseClient');
|
const BaseClient = require('./BaseClient');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
class OpenAIClient extends BaseClient {
|
class OpenAIClient extends BaseClient {
|
||||||
constructor(apiKey, options = {}) {
|
constructor(apiKey, options = {}) {
|
||||||
@@ -612,77 +613,8 @@ class OpenAIClient extends BaseClient {
|
|||||||
return (reply ?? '').trim();
|
return (reply ?? '').trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
initializeLLM({
|
initializeLLM() {
|
||||||
model = openAISettings.model.default,
|
throw new Error('Deprecated');
|
||||||
modelName,
|
|
||||||
temperature = 0.2,
|
|
||||||
max_tokens,
|
|
||||||
streaming,
|
|
||||||
context,
|
|
||||||
tokenBuffer,
|
|
||||||
initialMessageCount,
|
|
||||||
conversationId,
|
|
||||||
}) {
|
|
||||||
const modelOptions = {
|
|
||||||
modelName: modelName ?? model,
|
|
||||||
temperature,
|
|
||||||
user: this.user,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (max_tokens) {
|
|
||||||
modelOptions.max_tokens = max_tokens;
|
|
||||||
}
|
|
||||||
|
|
||||||
const configOptions = {};
|
|
||||||
|
|
||||||
if (this.langchainProxy) {
|
|
||||||
configOptions.basePath = this.langchainProxy;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.useOpenRouter) {
|
|
||||||
configOptions.basePath = 'https://openrouter.ai/api/v1';
|
|
||||||
configOptions.baseOptions = {
|
|
||||||
headers: {
|
|
||||||
'HTTP-Referer': 'https://librechat.ai',
|
|
||||||
'X-Title': 'LibreChat',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const { headers } = this.options;
|
|
||||||
if (headers && typeof headers === 'object' && !Array.isArray(headers)) {
|
|
||||||
configOptions.baseOptions = {
|
|
||||||
headers: resolveHeaders({
|
|
||||||
...headers,
|
|
||||||
...configOptions?.baseOptions?.headers,
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.options.proxy) {
|
|
||||||
configOptions.httpAgent = new HttpsProxyAgent(this.options.proxy);
|
|
||||||
configOptions.httpsAgent = new HttpsProxyAgent(this.options.proxy);
|
|
||||||
}
|
|
||||||
|
|
||||||
const { req, res, debug } = this.options;
|
|
||||||
const runManager = new RunManager({ req, res, debug, abortController: this.abortController });
|
|
||||||
this.runManager = runManager;
|
|
||||||
|
|
||||||
const llm = createLLM({
|
|
||||||
modelOptions,
|
|
||||||
configOptions,
|
|
||||||
openAIApiKey: this.apiKey,
|
|
||||||
azure: this.azure,
|
|
||||||
streaming,
|
|
||||||
callbacks: runManager.createCallbacks({
|
|
||||||
context,
|
|
||||||
tokenBuffer,
|
|
||||||
conversationId: this.conversationId ?? conversationId,
|
|
||||||
initialMessageCount,
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
return llm;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -700,6 +632,7 @@ class OpenAIClient extends BaseClient {
|
|||||||
* In case of failure, it will return the default title, "New Chat".
|
* In case of failure, it will return the default title, "New Chat".
|
||||||
*/
|
*/
|
||||||
async titleConvo({ text, conversationId, responseText = '' }) {
|
async titleConvo({ text, conversationId, responseText = '' }) {
|
||||||
|
const appConfig = this.options.req?.config;
|
||||||
this.conversationId = conversationId;
|
this.conversationId = conversationId;
|
||||||
|
|
||||||
if (this.options.attachments) {
|
if (this.options.attachments) {
|
||||||
@@ -728,8 +661,7 @@ class OpenAIClient extends BaseClient {
|
|||||||
max_tokens: 16,
|
max_tokens: 16,
|
||||||
};
|
};
|
||||||
|
|
||||||
/** @type {TAzureConfig | undefined} */
|
const azureConfig = appConfig?.endpoints?.[EModelEndpoint.azureOpenAI];
|
||||||
const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI];
|
|
||||||
|
|
||||||
const resetTitleOptions = !!(
|
const resetTitleOptions = !!(
|
||||||
(this.azure && azureConfig) ||
|
(this.azure && azureConfig) ||
|
||||||
@@ -749,7 +681,7 @@ class OpenAIClient extends BaseClient {
|
|||||||
groupMap,
|
groupMap,
|
||||||
});
|
});
|
||||||
|
|
||||||
this.options.headers = resolveHeaders(headers);
|
this.options.headers = resolveHeaders({ headers });
|
||||||
this.options.reverseProxyUrl = baseURL ?? null;
|
this.options.reverseProxyUrl = baseURL ?? null;
|
||||||
this.langchainProxy = extractBaseURL(this.options.reverseProxyUrl);
|
this.langchainProxy = extractBaseURL(this.options.reverseProxyUrl);
|
||||||
this.apiKey = azureOptions.azureOpenAIApiKey;
|
this.apiKey = azureOptions.azureOpenAIApiKey;
|
||||||
@@ -1118,6 +1050,7 @@ ${convo}
|
|||||||
}
|
}
|
||||||
|
|
||||||
async chatCompletion({ payload, onProgress, abortController = null }) {
|
async chatCompletion({ payload, onProgress, abortController = null }) {
|
||||||
|
const appConfig = this.options.req?.config;
|
||||||
let error = null;
|
let error = null;
|
||||||
let intermediateReply = [];
|
let intermediateReply = [];
|
||||||
const errorCallback = (err) => (error = err);
|
const errorCallback = (err) => (error = err);
|
||||||
@@ -1163,8 +1096,7 @@ ${convo}
|
|||||||
opts.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy);
|
opts.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @type {TAzureConfig | undefined} */
|
const azureConfig = appConfig?.endpoints?.[EModelEndpoint.azureOpenAI];
|
||||||
const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI];
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
(this.azure && this.isVisionModel && azureConfig) ||
|
(this.azure && this.isVisionModel && azureConfig) ||
|
||||||
@@ -1181,7 +1113,7 @@ ${convo}
|
|||||||
modelGroupMap,
|
modelGroupMap,
|
||||||
groupMap,
|
groupMap,
|
||||||
});
|
});
|
||||||
opts.defaultHeaders = resolveHeaders(headers);
|
opts.defaultHeaders = resolveHeaders({ headers });
|
||||||
this.langchainProxy = extractBaseURL(baseURL);
|
this.langchainProxy = extractBaseURL(baseURL);
|
||||||
this.apiKey = azureOptions.azureOpenAIApiKey;
|
this.apiKey = azureOptions.azureOpenAIApiKey;
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
const { Readable } = require('stream');
|
const { Readable } = require('stream');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
|
||||||
class TextStream extends Readable {
|
class TextStream extends Readable {
|
||||||
constructor(text, options = {}) {
|
constructor(text, options = {}) {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { ZeroShotAgentOutputParser } = require('langchain/agents');
|
const { ZeroShotAgentOutputParser } = require('langchain/agents');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
class CustomOutputParser extends ZeroShotAgentOutputParser {
|
class CustomOutputParser extends ZeroShotAgentOutputParser {
|
||||||
constructor(fields) {
|
constructor(fields) {
|
||||||
|
|||||||
@@ -1,95 +0,0 @@
|
|||||||
const { promptTokensEstimate } = require('openai-chat-tokens');
|
|
||||||
const { EModelEndpoint, supportsBalanceCheck } = require('librechat-data-provider');
|
|
||||||
const { formatFromLangChain } = require('~/app/clients/prompts');
|
|
||||||
const { getBalanceConfig } = require('~/server/services/Config');
|
|
||||||
const { checkBalance } = require('~/models/balanceMethods');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const createStartHandler = ({
|
|
||||||
context,
|
|
||||||
conversationId,
|
|
||||||
tokenBuffer = 0,
|
|
||||||
initialMessageCount,
|
|
||||||
manager,
|
|
||||||
}) => {
|
|
||||||
return async (_llm, _messages, runId, parentRunId, extraParams) => {
|
|
||||||
const { invocation_params } = extraParams;
|
|
||||||
const { model, functions, function_call } = invocation_params;
|
|
||||||
const messages = _messages[0].map(formatFromLangChain);
|
|
||||||
|
|
||||||
logger.debug(`[createStartHandler] handleChatModelStart: ${context}`, {
|
|
||||||
model,
|
|
||||||
function_call,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (context !== 'title') {
|
|
||||||
logger.debug(`[createStartHandler] handleChatModelStart: ${context}`, {
|
|
||||||
functions,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload = { messages };
|
|
||||||
let prelimPromptTokens = 1;
|
|
||||||
|
|
||||||
if (functions) {
|
|
||||||
payload.functions = functions;
|
|
||||||
prelimPromptTokens += 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (function_call) {
|
|
||||||
payload.function_call = function_call;
|
|
||||||
prelimPromptTokens -= 5;
|
|
||||||
}
|
|
||||||
|
|
||||||
prelimPromptTokens += promptTokensEstimate(payload);
|
|
||||||
logger.debug('[createStartHandler]', {
|
|
||||||
prelimPromptTokens,
|
|
||||||
tokenBuffer,
|
|
||||||
});
|
|
||||||
prelimPromptTokens += tokenBuffer;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const balance = await getBalanceConfig();
|
|
||||||
if (balance?.enabled && supportsBalanceCheck[EModelEndpoint.openAI]) {
|
|
||||||
const generations =
|
|
||||||
initialMessageCount && messages.length > initialMessageCount
|
|
||||||
? messages.slice(initialMessageCount)
|
|
||||||
: null;
|
|
||||||
await checkBalance({
|
|
||||||
req: manager.req,
|
|
||||||
res: manager.res,
|
|
||||||
txData: {
|
|
||||||
user: manager.user,
|
|
||||||
tokenType: 'prompt',
|
|
||||||
amount: prelimPromptTokens,
|
|
||||||
debug: manager.debug,
|
|
||||||
generations,
|
|
||||||
model,
|
|
||||||
endpoint: EModelEndpoint.openAI,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(`[createStartHandler][${context}] checkBalance error`, err);
|
|
||||||
manager.abortController.abort();
|
|
||||||
if (context === 'summary' || context === 'plugins') {
|
|
||||||
manager.addRun(runId, { conversationId, error: err.message });
|
|
||||||
throw new Error(err);
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
manager.addRun(runId, {
|
|
||||||
model,
|
|
||||||
messages,
|
|
||||||
functions,
|
|
||||||
function_call,
|
|
||||||
runId,
|
|
||||||
parentRunId,
|
|
||||||
conversationId,
|
|
||||||
prelimPromptTokens,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = createStartHandler;
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
const createStartHandler = require('./createStartHandler');
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
createStartHandler,
|
|
||||||
};
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { langPrompt, createTitlePrompt, escapeBraces, getSnippet } = require('../prompts');
|
const { langPrompt, createTitlePrompt, escapeBraces, getSnippet } = require('../prompts');
|
||||||
const { createStructuredOutputChainFromZod } = require('langchain/chains/openai_functions');
|
const { createStructuredOutputChainFromZod } = require('langchain/chains/openai_functions');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const langSchema = z.object({
|
const langSchema = z.object({
|
||||||
language: z.string().describe('The language of the input text (full noun, no abbreviations).'),
|
language: z.string().describe('The language of the input text (full noun, no abbreviations).'),
|
||||||
|
|||||||
@@ -1,105 +0,0 @@
|
|||||||
const { createStartHandler } = require('~/app/clients/callbacks');
|
|
||||||
const { spendTokens } = require('~/models/spendTokens');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
class RunManager {
|
|
||||||
constructor(fields) {
|
|
||||||
const { req, res, abortController, debug } = fields;
|
|
||||||
this.abortController = abortController;
|
|
||||||
this.user = req.user.id;
|
|
||||||
this.req = req;
|
|
||||||
this.res = res;
|
|
||||||
this.debug = debug;
|
|
||||||
this.runs = new Map();
|
|
||||||
this.convos = new Map();
|
|
||||||
}
|
|
||||||
|
|
||||||
addRun(runId, runData) {
|
|
||||||
if (!this.runs.has(runId)) {
|
|
||||||
this.runs.set(runId, runData);
|
|
||||||
if (runData.conversationId) {
|
|
||||||
this.convos.set(runData.conversationId, runId);
|
|
||||||
}
|
|
||||||
return runData;
|
|
||||||
} else {
|
|
||||||
const existingData = this.runs.get(runId);
|
|
||||||
const update = { ...existingData, ...runData };
|
|
||||||
this.runs.set(runId, update);
|
|
||||||
if (update.conversationId) {
|
|
||||||
this.convos.set(update.conversationId, runId);
|
|
||||||
}
|
|
||||||
return update;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
removeRun(runId) {
|
|
||||||
if (this.runs.has(runId)) {
|
|
||||||
this.runs.delete(runId);
|
|
||||||
} else {
|
|
||||||
logger.error(`[api/app/clients/llm/RunManager] Run with ID ${runId} does not exist.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
getAllRuns() {
|
|
||||||
return Array.from(this.runs.values());
|
|
||||||
}
|
|
||||||
|
|
||||||
getRunById(runId) {
|
|
||||||
return this.runs.get(runId);
|
|
||||||
}
|
|
||||||
|
|
||||||
getRunByConversationId(conversationId) {
|
|
||||||
const runId = this.convos.get(conversationId);
|
|
||||||
return { run: this.runs.get(runId), runId };
|
|
||||||
}
|
|
||||||
|
|
||||||
createCallbacks(metadata) {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
handleChatModelStart: createStartHandler({ ...metadata, manager: this }),
|
|
||||||
handleLLMEnd: async (output, runId, _parentRunId) => {
|
|
||||||
const { llmOutput, ..._output } = output;
|
|
||||||
logger.debug(`[RunManager] handleLLMEnd: ${JSON.stringify(metadata)}`, {
|
|
||||||
runId,
|
|
||||||
_parentRunId,
|
|
||||||
llmOutput,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (metadata.context !== 'title') {
|
|
||||||
logger.debug('[RunManager] handleLLMEnd:', {
|
|
||||||
output: _output,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const { tokenUsage } = output.llmOutput;
|
|
||||||
const run = this.getRunById(runId);
|
|
||||||
this.removeRun(runId);
|
|
||||||
|
|
||||||
const txData = {
|
|
||||||
user: this.user,
|
|
||||||
model: run?.model ?? 'gpt-3.5-turbo',
|
|
||||||
...metadata,
|
|
||||||
};
|
|
||||||
|
|
||||||
await spendTokens(txData, tokenUsage);
|
|
||||||
},
|
|
||||||
handleLLMError: async (err) => {
|
|
||||||
logger.error(`[RunManager] handleLLMError: ${JSON.stringify(metadata)}`, err);
|
|
||||||
if (metadata.context === 'title') {
|
|
||||||
return;
|
|
||||||
} else if (metadata.context === 'plugins') {
|
|
||||||
throw new Error(err);
|
|
||||||
}
|
|
||||||
const { conversationId } = metadata;
|
|
||||||
const { run } = this.getRunByConversationId(conversationId);
|
|
||||||
if (run && run.error) {
|
|
||||||
const { error } = run;
|
|
||||||
throw new Error(error);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = RunManager;
|
|
||||||
@@ -1,81 +0,0 @@
|
|||||||
const { ChatOpenAI } = require('@langchain/openai');
|
|
||||||
const { isEnabled, sanitizeModelName, constructAzureURL } = require('@librechat/api');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new instance of a language model (LLM) for chat interactions.
|
|
||||||
*
|
|
||||||
* @param {Object} options - The options for creating the LLM.
|
|
||||||
* @param {ModelOptions} options.modelOptions - The options specific to the model, including modelName, temperature, presence_penalty, frequency_penalty, and other model-related settings.
|
|
||||||
* @param {ConfigOptions} options.configOptions - Configuration options for the API requests, including proxy settings and custom headers.
|
|
||||||
* @param {Callbacks} [options.callbacks] - Callback functions for managing the lifecycle of the LLM, including token buffers, context, and initial message count.
|
|
||||||
* @param {boolean} [options.streaming=false] - Determines if the LLM should operate in streaming mode.
|
|
||||||
* @param {string} options.openAIApiKey - The API key for OpenAI, used for authentication.
|
|
||||||
* @param {AzureOptions} [options.azure={}] - Optional Azure-specific configurations. If provided, Azure configurations take precedence over OpenAI configurations.
|
|
||||||
*
|
|
||||||
* @returns {ChatOpenAI} An instance of the ChatOpenAI class, configured with the provided options.
|
|
||||||
*
|
|
||||||
* @example
|
|
||||||
* const llm = createLLM({
|
|
||||||
* modelOptions: { modelName: 'gpt-4o-mini', temperature: 0.2 },
|
|
||||||
* configOptions: { basePath: 'https://example.api/path' },
|
|
||||||
* callbacks: { onMessage: handleMessage },
|
|
||||||
* openAIApiKey: 'your-api-key'
|
|
||||||
* });
|
|
||||||
*/
|
|
||||||
function createLLM({
|
|
||||||
modelOptions,
|
|
||||||
configOptions,
|
|
||||||
callbacks,
|
|
||||||
streaming = false,
|
|
||||||
openAIApiKey,
|
|
||||||
azure = {},
|
|
||||||
}) {
|
|
||||||
let credentials = { openAIApiKey };
|
|
||||||
let configuration = {
|
|
||||||
apiKey: openAIApiKey,
|
|
||||||
...(configOptions.basePath && { baseURL: configOptions.basePath }),
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @type {AzureOptions} */
|
|
||||||
let azureOptions = {};
|
|
||||||
if (azure) {
|
|
||||||
const useModelName = isEnabled(process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME);
|
|
||||||
|
|
||||||
credentials = {};
|
|
||||||
configuration = {};
|
|
||||||
azureOptions = azure;
|
|
||||||
|
|
||||||
azureOptions.azureOpenAIApiDeploymentName = useModelName
|
|
||||||
? sanitizeModelName(modelOptions.modelName)
|
|
||||||
: azureOptions.azureOpenAIApiDeploymentName;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (azure && process.env.AZURE_OPENAI_DEFAULT_MODEL) {
|
|
||||||
modelOptions.modelName = process.env.AZURE_OPENAI_DEFAULT_MODEL;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (azure && configOptions.basePath) {
|
|
||||||
const azureURL = constructAzureURL({
|
|
||||||
baseURL: configOptions.basePath,
|
|
||||||
azureOptions,
|
|
||||||
});
|
|
||||||
azureOptions.azureOpenAIBasePath = azureURL.split(
|
|
||||||
`/${azureOptions.azureOpenAIApiDeploymentName}`,
|
|
||||||
)[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
return new ChatOpenAI(
|
|
||||||
{
|
|
||||||
streaming,
|
|
||||||
credentials,
|
|
||||||
configuration,
|
|
||||||
...azureOptions,
|
|
||||||
...modelOptions,
|
|
||||||
...credentials,
|
|
||||||
callbacks,
|
|
||||||
},
|
|
||||||
configOptions,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = createLLM;
|
|
||||||
@@ -1,9 +1,5 @@
|
|||||||
const createLLM = require('./createLLM');
|
|
||||||
const RunManager = require('./RunManager');
|
|
||||||
const createCoherePayload = require('./createCoherePayload');
|
const createCoherePayload = require('./createCoherePayload');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
createLLM,
|
|
||||||
RunManager,
|
|
||||||
createCoherePayload,
|
createCoherePayload,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,31 +0,0 @@
|
|||||||
require('dotenv').config();
|
|
||||||
const { ChatOpenAI } = require('@langchain/openai');
|
|
||||||
const { getBufferString, ConversationSummaryBufferMemory } = require('langchain/memory');
|
|
||||||
|
|
||||||
const chatPromptMemory = new ConversationSummaryBufferMemory({
|
|
||||||
llm: new ChatOpenAI({ modelName: 'gpt-4o-mini', temperature: 0 }),
|
|
||||||
maxTokenLimit: 10,
|
|
||||||
returnMessages: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
(async () => {
|
|
||||||
await chatPromptMemory.saveContext({ input: 'hi my name\'s Danny' }, { output: 'whats up' });
|
|
||||||
await chatPromptMemory.saveContext({ input: 'not much you' }, { output: 'not much' });
|
|
||||||
await chatPromptMemory.saveContext(
|
|
||||||
{ input: 'are you excited for the olympics?' },
|
|
||||||
{ output: 'not really' },
|
|
||||||
);
|
|
||||||
|
|
||||||
// We can also utilize the predict_new_summary method directly.
|
|
||||||
const messages = await chatPromptMemory.chatHistory.getMessages();
|
|
||||||
console.log('MESSAGES\n\n');
|
|
||||||
console.log(JSON.stringify(messages));
|
|
||||||
const previous_summary = '';
|
|
||||||
const predictSummary = await chatPromptMemory.predictNewSummary(messages, previous_summary);
|
|
||||||
console.log('SUMMARY\n\n');
|
|
||||||
console.log(JSON.stringify(getBufferString([{ role: 'system', content: predictSummary }])));
|
|
||||||
|
|
||||||
// const { history } = await chatPromptMemory.loadMemoryVariables({});
|
|
||||||
// console.log('HISTORY\n\n');
|
|
||||||
// console.log(JSON.stringify(history));
|
|
||||||
})();
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { ConversationSummaryBufferMemory, ChatMessageHistory } = require('langchain/memory');
|
const { ConversationSummaryBufferMemory, ChatMessageHistory } = require('langchain/memory');
|
||||||
const { formatLangChainMessages, SUMMARY_PROMPT } = require('../prompts');
|
const { formatLangChainMessages, SUMMARY_PROMPT } = require('../prompts');
|
||||||
const { predictNewSummary } = require('../chains');
|
const { predictNewSummary } = require('../chains');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const createSummaryBufferMemory = ({ llm, prompt, messages, ...rest }) => {
|
const createSummaryBufferMemory = ({ llm, prompt, messages, ...rest }) => {
|
||||||
const chatHistory = new ChatMessageHistory(messages);
|
const chatHistory = new ChatMessageHistory(messages);
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
const { logger } = require('~/config');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The `addImages` function corrects any erroneous image URLs in the `responseMessage.text`
|
* The `addImages` function corrects any erroneous image URLs in the `responseMessage.text`
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ const { EModelEndpoint, ArtifactModes } = require('librechat-data-provider');
|
|||||||
const { generateShadcnPrompt } = require('~/app/clients/prompts/shadcn-docs/generate');
|
const { generateShadcnPrompt } = require('~/app/clients/prompts/shadcn-docs/generate');
|
||||||
const { components } = require('~/app/clients/prompts/shadcn-docs/components');
|
const { components } = require('~/app/clients/prompts/shadcn-docs/components');
|
||||||
|
|
||||||
|
/** @deprecated */
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
const artifactsPromptV1 = dedent`The assistant can create and reference artifacts during conversations.
|
const artifactsPromptV1 = dedent`The assistant can create and reference artifacts during conversations.
|
||||||
|
|
||||||
@@ -115,6 +116,7 @@ Here are some examples of correct usage of artifacts:
|
|||||||
</assistant_response>
|
</assistant_response>
|
||||||
</example>
|
</example>
|
||||||
</examples>`;
|
</examples>`;
|
||||||
|
|
||||||
const artifactsPrompt = dedent`The assistant can create and reference artifacts during conversations.
|
const artifactsPrompt = dedent`The assistant can create and reference artifacts during conversations.
|
||||||
|
|
||||||
Artifacts are for substantial, self-contained content that users might modify or reuse, displayed in a separate UI window for clarity.
|
Artifacts are for substantial, self-contained content that users might modify or reuse, displayed in a separate UI window for clarity.
|
||||||
@@ -165,6 +167,10 @@ Artifacts are for substantial, self-contained content that users might modify or
|
|||||||
- SVG: "image/svg+xml"
|
- SVG: "image/svg+xml"
|
||||||
- The user interface will render the Scalable Vector Graphics (SVG) image within the artifact tags.
|
- The user interface will render the Scalable Vector Graphics (SVG) image within the artifact tags.
|
||||||
- The assistant should specify the viewbox of the SVG rather than defining a width/height
|
- The assistant should specify the viewbox of the SVG rather than defining a width/height
|
||||||
|
- Markdown: "text/markdown" or "text/md"
|
||||||
|
- The user interface will render Markdown content placed within the artifact tags.
|
||||||
|
- Supports standard Markdown syntax including headers, lists, links, images, code blocks, tables, and more.
|
||||||
|
- Both "text/markdown" and "text/md" are accepted as valid MIME types for Markdown content.
|
||||||
- Mermaid Diagrams: "application/vnd.mermaid"
|
- Mermaid Diagrams: "application/vnd.mermaid"
|
||||||
- The user interface will render Mermaid diagrams placed within the artifact tags.
|
- The user interface will render Mermaid diagrams placed within the artifact tags.
|
||||||
- React Components: "application/vnd.react"
|
- React Components: "application/vnd.react"
|
||||||
@@ -366,6 +372,10 @@ Artifacts are for substantial, self-contained content that users might modify or
|
|||||||
- SVG: "image/svg+xml"
|
- SVG: "image/svg+xml"
|
||||||
- The user interface will render the Scalable Vector Graphics (SVG) image within the artifact tags.
|
- The user interface will render the Scalable Vector Graphics (SVG) image within the artifact tags.
|
||||||
- The assistant should specify the viewbox of the SVG rather than defining a width/height
|
- The assistant should specify the viewbox of the SVG rather than defining a width/height
|
||||||
|
- Markdown: "text/markdown" or "text/md"
|
||||||
|
- The user interface will render Markdown content placed within the artifact tags.
|
||||||
|
- Supports standard Markdown syntax including headers, lists, links, images, code blocks, tables, and more.
|
||||||
|
- Both "text/markdown" and "text/md" are accepted as valid MIME types for Markdown content.
|
||||||
- Mermaid Diagrams: "application/vnd.mermaid"
|
- Mermaid Diagrams: "application/vnd.mermaid"
|
||||||
- The user interface will render Mermaid diagrams placed within the artifact tags.
|
- The user interface will render Mermaid diagrams placed within the artifact tags.
|
||||||
- React Components: "application/vnd.react"
|
- React Components: "application/vnd.react"
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
const axios = require('axios');
|
const axios = require('axios');
|
||||||
const { isEnabled } = require('@librechat/api');
|
|
||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { generateShortLivedToken } = require('~/server/services/AuthService');
|
const { isEnabled, generateShortLivedToken } = require('@librechat/api');
|
||||||
|
|
||||||
const footer = `Use the context as your learned knowledge to better answer the user.
|
const footer = `Use the context as your learned knowledge to better answer the user.
|
||||||
|
|
||||||
|
|||||||
@@ -245,7 +245,7 @@ describe('AnthropicClient', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('Claude 4 model headers', () => {
|
describe('Claude 4 model headers', () => {
|
||||||
it('should add "prompt-caching" beta header for claude-sonnet-4 model', () => {
|
it('should add "prompt-caching" and "context-1m" beta headers for claude-sonnet-4 model', () => {
|
||||||
const client = new AnthropicClient('test-api-key');
|
const client = new AnthropicClient('test-api-key');
|
||||||
const modelOptions = {
|
const modelOptions = {
|
||||||
model: 'claude-sonnet-4-20250514',
|
model: 'claude-sonnet-4-20250514',
|
||||||
@@ -255,10 +255,30 @@ describe('AnthropicClient', () => {
|
|||||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||||
'prompt-caching-2024-07-31',
|
'prompt-caching-2024-07-31,context-1m-2025-08-07',
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should add "prompt-caching" and "context-1m" beta headers for claude-sonnet-4 model formats', () => {
|
||||||
|
const client = new AnthropicClient('test-api-key');
|
||||||
|
const modelVariations = [
|
||||||
|
'claude-sonnet-4-20250514',
|
||||||
|
'claude-sonnet-4-latest',
|
||||||
|
'anthropic/claude-sonnet-4-20250514',
|
||||||
|
];
|
||||||
|
|
||||||
|
modelVariations.forEach((model) => {
|
||||||
|
const modelOptions = { model };
|
||||||
|
client.setOptions({ modelOptions, promptCache: true });
|
||||||
|
const anthropicClient = client.getClient(modelOptions);
|
||||||
|
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||||
|
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||||
|
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||||
|
'prompt-caching-2024-07-31,context-1m-2025-08-07',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('should add "prompt-caching" beta header for claude-opus-4 model', () => {
|
it('should add "prompt-caching" beta header for claude-opus-4 model', () => {
|
||||||
const client = new AnthropicClient('test-api-key');
|
const client = new AnthropicClient('test-api-key');
|
||||||
const modelOptions = {
|
const modelOptions = {
|
||||||
@@ -273,20 +293,6 @@ describe('AnthropicClient', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should add "prompt-caching" beta header for claude-4-sonnet model', () => {
|
|
||||||
const client = new AnthropicClient('test-api-key');
|
|
||||||
const modelOptions = {
|
|
||||||
model: 'claude-4-sonnet-20250514',
|
|
||||||
};
|
|
||||||
client.setOptions({ modelOptions, promptCache: true });
|
|
||||||
const anthropicClient = client.getClient(modelOptions);
|
|
||||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
|
||||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
|
||||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
|
||||||
'prompt-caching-2024-07-31',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should add "prompt-caching" beta header for claude-4-opus model', () => {
|
it('should add "prompt-caching" beta header for claude-4-opus model', () => {
|
||||||
const client = new AnthropicClient('test-api-key');
|
const client = new AnthropicClient('test-api-key');
|
||||||
const modelOptions = {
|
const modelOptions = {
|
||||||
|
|||||||
@@ -2,6 +2,14 @@ const { Constants } = require('librechat-data-provider');
|
|||||||
const { initializeFakeClient } = require('./FakeClient');
|
const { initializeFakeClient } = require('./FakeClient');
|
||||||
|
|
||||||
jest.mock('~/db/connect');
|
jest.mock('~/db/connect');
|
||||||
|
jest.mock('~/server/services/Config', () => ({
|
||||||
|
getAppConfig: jest.fn().mockResolvedValue({
|
||||||
|
// Default app config for tests
|
||||||
|
paths: { uploads: '/tmp' },
|
||||||
|
fileStrategy: 'local',
|
||||||
|
memory: { disabled: false },
|
||||||
|
}),
|
||||||
|
}));
|
||||||
jest.mock('~/models', () => ({
|
jest.mock('~/models', () => ({
|
||||||
User: jest.fn(),
|
User: jest.fn(),
|
||||||
Key: jest.fn(),
|
Key: jest.fn(),
|
||||||
@@ -579,6 +587,8 @@ describe('BaseClient', () => {
|
|||||||
expect(onStart).toHaveBeenCalledWith(
|
expect(onStart).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({ text: 'Hello, world!' }),
|
expect.objectContaining({ text: 'Hello, world!' }),
|
||||||
expect.any(String),
|
expect.any(String),
|
||||||
|
/** `isNewConvo` */
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
|
const { getModelMaxTokens } = require('@librechat/api');
|
||||||
const BaseClient = require('../BaseClient');
|
const BaseClient = require('../BaseClient');
|
||||||
const { getModelMaxTokens } = require('../../../utils');
|
|
||||||
|
|
||||||
class FakeClient extends BaseClient {
|
class FakeClient extends BaseClient {
|
||||||
constructor(apiKey, options = {}) {
|
constructor(apiKey, options = {}) {
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
const availableTools = require('./manifest.json');
|
const manifest = require('./manifest');
|
||||||
|
|
||||||
// Structured Tools
|
// Structured Tools
|
||||||
const DALLE3 = require('./structured/DALLE3');
|
const DALLE3 = require('./structured/DALLE3');
|
||||||
@@ -13,23 +13,8 @@ const TraversaalSearch = require('./structured/TraversaalSearch');
|
|||||||
const createOpenAIImageTools = require('./structured/OpenAIImageTools');
|
const createOpenAIImageTools = require('./structured/OpenAIImageTools');
|
||||||
const TavilySearchResults = require('./structured/TavilySearchResults');
|
const TavilySearchResults = require('./structured/TavilySearchResults');
|
||||||
|
|
||||||
/** @type {Record<string, TPlugin | undefined>} */
|
|
||||||
const manifestToolMap = {};
|
|
||||||
|
|
||||||
/** @type {Array<TPlugin>} */
|
|
||||||
const toolkits = [];
|
|
||||||
|
|
||||||
availableTools.forEach((tool) => {
|
|
||||||
manifestToolMap[tool.pluginKey] = tool;
|
|
||||||
if (tool.toolkit === true) {
|
|
||||||
toolkits.push(tool);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
toolkits,
|
...manifest,
|
||||||
availableTools,
|
|
||||||
manifestToolMap,
|
|
||||||
// Structured Tools
|
// Structured Tools
|
||||||
DALLE3,
|
DALLE3,
|
||||||
FluxAPI,
|
FluxAPI,
|
||||||
|
|||||||
20
api/app/clients/tools/manifest.js
Normal file
20
api/app/clients/tools/manifest.js
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
const availableTools = require('./manifest.json');
|
||||||
|
|
||||||
|
/** @type {Record<string, TPlugin | undefined>} */
|
||||||
|
const manifestToolMap = {};
|
||||||
|
|
||||||
|
/** @type {Array<TPlugin>} */
|
||||||
|
const toolkits = [];
|
||||||
|
|
||||||
|
availableTools.forEach((tool) => {
|
||||||
|
manifestToolMap[tool.pluginKey] = tool;
|
||||||
|
if (tool.toolkit === true) {
|
||||||
|
toolkits.push(tool);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
toolkits,
|
||||||
|
availableTools,
|
||||||
|
manifestToolMap,
|
||||||
|
};
|
||||||
@@ -49,7 +49,7 @@
|
|||||||
"pluginKey": "image_gen_oai",
|
"pluginKey": "image_gen_oai",
|
||||||
"toolkit": true,
|
"toolkit": true,
|
||||||
"description": "Image Generation and Editing using OpenAI's latest state-of-the-art models",
|
"description": "Image Generation and Editing using OpenAI's latest state-of-the-art models",
|
||||||
"icon": "/assets/image_gen_oai.png",
|
"icon": "assets/image_gen_oai.png",
|
||||||
"authConfig": [
|
"authConfig": [
|
||||||
{
|
{
|
||||||
"authField": "IMAGE_GEN_OAI_API_KEY",
|
"authField": "IMAGE_GEN_OAI_API_KEY",
|
||||||
@@ -75,7 +75,7 @@
|
|||||||
"name": "Browser",
|
"name": "Browser",
|
||||||
"pluginKey": "web-browser",
|
"pluginKey": "web-browser",
|
||||||
"description": "Scrape and summarize webpage data",
|
"description": "Scrape and summarize webpage data",
|
||||||
"icon": "/assets/web-browser.svg",
|
"icon": "assets/web-browser.svg",
|
||||||
"authConfig": [
|
"authConfig": [
|
||||||
{
|
{
|
||||||
"authField": "OPENAI_API_KEY",
|
"authField": "OPENAI_API_KEY",
|
||||||
@@ -170,7 +170,7 @@
|
|||||||
"name": "OpenWeather",
|
"name": "OpenWeather",
|
||||||
"pluginKey": "open_weather",
|
"pluginKey": "open_weather",
|
||||||
"description": "Get weather forecasts and historical data from the OpenWeather API",
|
"description": "Get weather forecasts and historical data from the OpenWeather API",
|
||||||
"icon": "/assets/openweather.png",
|
"icon": "assets/openweather.png",
|
||||||
"authConfig": [
|
"authConfig": [
|
||||||
{
|
{
|
||||||
"authField": "OPENWEATHER_API_KEY",
|
"authField": "OPENWEATHER_API_KEY",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const { Tool } = require('@langchain/core/tools');
|
const { Tool } = require('@langchain/core/tools');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { SearchClient, AzureKeyCredential } = require('@azure/search-documents');
|
const { SearchClient, AzureKeyCredential } = require('@azure/search-documents');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
class AzureAISearch extends Tool {
|
class AzureAISearch extends Tool {
|
||||||
// Constants for default values
|
// Constants for default values
|
||||||
@@ -18,7 +18,7 @@ class AzureAISearch extends Tool {
|
|||||||
super();
|
super();
|
||||||
this.name = 'azure-ai-search';
|
this.name = 'azure-ai-search';
|
||||||
this.description =
|
this.description =
|
||||||
'Use the \'azure-ai-search\' tool to retrieve search results relevant to your input';
|
"Use the 'azure-ai-search' tool to retrieve search results relevant to your input";
|
||||||
/* Used to initialize the Tool without necessary variables. */
|
/* Used to initialize the Tool without necessary variables. */
|
||||||
this.override = fields.override ?? false;
|
this.override = fields.override ?? false;
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const { v4: uuidv4 } = require('uuid');
|
const { v4: uuidv4 } = require('uuid');
|
||||||
const { ProxyAgent } = require('undici');
|
const { ProxyAgent, fetch } = require('undici');
|
||||||
const { Tool } = require('@langchain/core/tools');
|
const { Tool } = require('@langchain/core/tools');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const { getImageBasename } = require('@librechat/api');
|
||||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||||
const { getImageBasename } = require('~/server/services/Files/images');
|
|
||||||
const extractBaseURL = require('~/utils/extractBaseURL');
|
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||||
const logger = require('~/config/winston');
|
|
||||||
|
|
||||||
const displayMessage =
|
const displayMessage =
|
||||||
"DALL-E displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
|
"DALL-E displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
|
||||||
|
|||||||
@@ -3,12 +3,12 @@ const axios = require('axios');
|
|||||||
const fetch = require('node-fetch');
|
const fetch = require('node-fetch');
|
||||||
const { v4: uuidv4 } = require('uuid');
|
const { v4: uuidv4 } = require('uuid');
|
||||||
const { Tool } = require('@langchain/core/tools');
|
const { Tool } = require('@langchain/core/tools');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const displayMessage =
|
const displayMessage =
|
||||||
'Flux displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
"Flux displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* FluxAPI - A tool for generating high-quality images from text prompts using the Flux API.
|
* FluxAPI - A tool for generating high-quality images from text prompts using the Flux API.
|
||||||
|
|||||||
@@ -1,69 +1,16 @@
|
|||||||
const { z } = require('zod');
|
|
||||||
const axios = require('axios');
|
const axios = require('axios');
|
||||||
const { v4 } = require('uuid');
|
const { v4 } = require('uuid');
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
const FormData = require('form-data');
|
const FormData = require('form-data');
|
||||||
const { ProxyAgent } = require('undici');
|
const { ProxyAgent } = require('undici');
|
||||||
const { tool } = require('@langchain/core/tools');
|
const { tool } = require('@langchain/core/tools');
|
||||||
const { logAxiosError } = require('@librechat/api');
|
|
||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const { logAxiosError, oaiToolkit } = require('@librechat/api');
|
||||||
const { ContentTypes, EImageOutputType } = require('librechat-data-provider');
|
const { ContentTypes, EImageOutputType } = require('librechat-data-provider');
|
||||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||||
const { extractBaseURL } = require('~/utils');
|
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||||
const { getFiles } = require('~/models/File');
|
const { getFiles } = require('~/models/File');
|
||||||
|
|
||||||
/** Default descriptions for image generation tool */
|
|
||||||
const DEFAULT_IMAGE_GEN_DESCRIPTION = `
|
|
||||||
Generates high-quality, original images based solely on text, not using any uploaded reference images.
|
|
||||||
|
|
||||||
When to use \`image_gen_oai\`:
|
|
||||||
- To create entirely new images from detailed text descriptions that do NOT reference any image files.
|
|
||||||
|
|
||||||
When NOT to use \`image_gen_oai\`:
|
|
||||||
- If the user has uploaded any images and requests modifications, enhancements, or remixing based on those uploads → use \`image_edit_oai\` instead.
|
|
||||||
|
|
||||||
Generated image IDs will be returned in the response, so you can refer to them in future requests made to \`image_edit_oai\`.
|
|
||||||
`.trim();
|
|
||||||
|
|
||||||
/** Default description for image editing tool */
|
|
||||||
const DEFAULT_IMAGE_EDIT_DESCRIPTION =
|
|
||||||
`Generates high-quality, original images based on text and one or more uploaded/referenced images.
|
|
||||||
|
|
||||||
When to use \`image_edit_oai\`:
|
|
||||||
- The user wants to modify, extend, or remix one **or more** uploaded images, either:
|
|
||||||
- Previously generated, or in the current request (both to be included in the \`image_ids\` array).
|
|
||||||
- Always when the user refers to uploaded images for editing, enhancement, remixing, style transfer, or combining elements.
|
|
||||||
- Any current or existing images are to be used as visual guides.
|
|
||||||
- If there are any files in the current request, they are more likely than not expected as references for image edit requests.
|
|
||||||
|
|
||||||
When NOT to use \`image_edit_oai\`:
|
|
||||||
- Brand-new generations that do not rely on an existing image → use \`image_gen_oai\` instead.
|
|
||||||
|
|
||||||
Both generated and referenced image IDs will be returned in the response, so you can refer to them in future requests made to \`image_edit_oai\`.
|
|
||||||
`.trim();
|
|
||||||
|
|
||||||
/** Default prompt descriptions */
|
|
||||||
const DEFAULT_IMAGE_GEN_PROMPT_DESCRIPTION = `Describe the image you want in detail.
|
|
||||||
Be highly specific—break your idea into layers:
|
|
||||||
(1) main concept and subject,
|
|
||||||
(2) composition and position,
|
|
||||||
(3) lighting and mood,
|
|
||||||
(4) style, medium, or camera details,
|
|
||||||
(5) important features (age, expression, clothing, etc.),
|
|
||||||
(6) background.
|
|
||||||
Use positive, descriptive language and specify what should be included, not what to avoid.
|
|
||||||
List number and characteristics of people/objects, and mention style/technical requirements (e.g., "DSLR photo, 85mm lens, golden hour").
|
|
||||||
Do not reference any uploaded images—use for new image creation from text only.`;
|
|
||||||
|
|
||||||
const DEFAULT_IMAGE_EDIT_PROMPT_DESCRIPTION = `Describe the changes, enhancements, or new ideas to apply to the uploaded image(s).
|
|
||||||
Be highly specific—break your request into layers:
|
|
||||||
(1) main concept or transformation,
|
|
||||||
(2) specific edits/replacements or composition guidance,
|
|
||||||
(3) desired style, mood, or technique,
|
|
||||||
(4) features/items to keep, change, or add (such as objects, people, clothing, lighting, etc.).
|
|
||||||
Use positive, descriptive language and clarify what should be included or changed, not what to avoid.
|
|
||||||
Always base this prompt on the most recently uploaded reference images.`;
|
|
||||||
|
|
||||||
const displayMessage =
|
const displayMessage =
|
||||||
"The tool displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
|
"The tool displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
|
||||||
|
|
||||||
@@ -91,22 +38,6 @@ function returnValue(value) {
|
|||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
const getImageGenDescription = () => {
|
|
||||||
return process.env.IMAGE_GEN_OAI_DESCRIPTION || DEFAULT_IMAGE_GEN_DESCRIPTION;
|
|
||||||
};
|
|
||||||
|
|
||||||
const getImageEditDescription = () => {
|
|
||||||
return process.env.IMAGE_EDIT_OAI_DESCRIPTION || DEFAULT_IMAGE_EDIT_DESCRIPTION;
|
|
||||||
};
|
|
||||||
|
|
||||||
const getImageGenPromptDescription = () => {
|
|
||||||
return process.env.IMAGE_GEN_OAI_PROMPT_DESCRIPTION || DEFAULT_IMAGE_GEN_PROMPT_DESCRIPTION;
|
|
||||||
};
|
|
||||||
|
|
||||||
const getImageEditPromptDescription = () => {
|
|
||||||
return process.env.IMAGE_EDIT_OAI_PROMPT_DESCRIPTION || DEFAULT_IMAGE_EDIT_PROMPT_DESCRIPTION;
|
|
||||||
};
|
|
||||||
|
|
||||||
function createAbortHandler() {
|
function createAbortHandler() {
|
||||||
return function () {
|
return function () {
|
||||||
logger.debug('[ImageGenOAI] Image generation aborted');
|
logger.debug('[ImageGenOAI] Image generation aborted');
|
||||||
@@ -121,7 +52,9 @@ function createAbortHandler() {
|
|||||||
* @param {string} fields.IMAGE_GEN_OAI_API_KEY - The OpenAI API key
|
* @param {string} fields.IMAGE_GEN_OAI_API_KEY - The OpenAI API key
|
||||||
* @param {boolean} [fields.override] - Whether to override the API key check, necessary for app initialization
|
* @param {boolean} [fields.override] - Whether to override the API key check, necessary for app initialization
|
||||||
* @param {MongoFile[]} [fields.imageFiles] - The images to be used for editing
|
* @param {MongoFile[]} [fields.imageFiles] - The images to be used for editing
|
||||||
* @returns {Array} - Array of image tools
|
* @param {string} [fields.imageOutputType] - The image output type configuration
|
||||||
|
* @param {string} [fields.fileStrategy] - The file storage strategy
|
||||||
|
* @returns {Array<ReturnType<tool>>} - Array of image tools
|
||||||
*/
|
*/
|
||||||
function createOpenAIImageTools(fields = {}) {
|
function createOpenAIImageTools(fields = {}) {
|
||||||
/** @type {boolean} Used to initialize the Tool without necessary variables. */
|
/** @type {boolean} Used to initialize the Tool without necessary variables. */
|
||||||
@@ -131,8 +64,8 @@ function createOpenAIImageTools(fields = {}) {
|
|||||||
throw new Error('This tool is only available for agents.');
|
throw new Error('This tool is only available for agents.');
|
||||||
}
|
}
|
||||||
const { req } = fields;
|
const { req } = fields;
|
||||||
const imageOutputType = req?.app.locals.imageOutputType || EImageOutputType.PNG;
|
const imageOutputType = fields.imageOutputType || EImageOutputType.PNG;
|
||||||
const appFileStrategy = req?.app.locals.fileStrategy;
|
const appFileStrategy = fields.fileStrategy;
|
||||||
|
|
||||||
const getApiKey = () => {
|
const getApiKey = () => {
|
||||||
const apiKey = process.env.IMAGE_GEN_OAI_API_KEY ?? '';
|
const apiKey = process.env.IMAGE_GEN_OAI_API_KEY ?? '';
|
||||||
@@ -285,46 +218,7 @@ Error Message: ${error.message}`);
|
|||||||
];
|
];
|
||||||
return [response, { content, file_ids }];
|
return [response, { content, file_ids }];
|
||||||
},
|
},
|
||||||
{
|
oaiToolkit.image_gen_oai,
|
||||||
name: 'image_gen_oai',
|
|
||||||
description: getImageGenDescription(),
|
|
||||||
schema: z.object({
|
|
||||||
prompt: z.string().max(32000).describe(getImageGenPromptDescription()),
|
|
||||||
background: z
|
|
||||||
.enum(['transparent', 'opaque', 'auto'])
|
|
||||||
.optional()
|
|
||||||
.describe(
|
|
||||||
'Sets transparency for the background. Must be one of transparent, opaque or auto (default). When transparent, the output format should be png or webp.',
|
|
||||||
),
|
|
||||||
/*
|
|
||||||
n: z
|
|
||||||
.number()
|
|
||||||
.int()
|
|
||||||
.min(1)
|
|
||||||
.max(10)
|
|
||||||
.optional()
|
|
||||||
.describe('The number of images to generate. Must be between 1 and 10.'),
|
|
||||||
output_compression: z
|
|
||||||
.number()
|
|
||||||
.int()
|
|
||||||
.min(0)
|
|
||||||
.max(100)
|
|
||||||
.optional()
|
|
||||||
.describe('The compression level (0-100%) for webp or jpeg formats. Defaults to 100.'),
|
|
||||||
*/
|
|
||||||
quality: z
|
|
||||||
.enum(['auto', 'high', 'medium', 'low'])
|
|
||||||
.optional()
|
|
||||||
.describe('The quality of the image. One of auto (default), high, medium, or low.'),
|
|
||||||
size: z
|
|
||||||
.enum(['auto', '1024x1024', '1536x1024', '1024x1536'])
|
|
||||||
.optional()
|
|
||||||
.describe(
|
|
||||||
'The size of the generated image. One of 1024x1024, 1536x1024 (landscape), 1024x1536 (portrait), or auto (default).',
|
|
||||||
),
|
|
||||||
}),
|
|
||||||
responseFormat: 'content_and_artifact',
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -517,48 +411,7 @@ Error Message: ${error.message || 'Unknown error'}`);
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
oaiToolkit.image_edit_oai,
|
||||||
name: 'image_edit_oai',
|
|
||||||
description: getImageEditDescription(),
|
|
||||||
schema: z.object({
|
|
||||||
image_ids: z
|
|
||||||
.array(z.string())
|
|
||||||
.min(1)
|
|
||||||
.describe(
|
|
||||||
`
|
|
||||||
IDs (image ID strings) of previously generated or uploaded images that should guide the edit.
|
|
||||||
|
|
||||||
Guidelines:
|
|
||||||
- If the user's request depends on any prior image(s), copy their image IDs into the \`image_ids\` array (in the same order the user refers to them).
|
|
||||||
- Never invent or hallucinate IDs; only use IDs that are still visible in the conversation context.
|
|
||||||
- If no earlier image is relevant, omit the field entirely.
|
|
||||||
`.trim(),
|
|
||||||
),
|
|
||||||
prompt: z.string().max(32000).describe(getImageEditPromptDescription()),
|
|
||||||
/*
|
|
||||||
n: z
|
|
||||||
.number()
|
|
||||||
.int()
|
|
||||||
.min(1)
|
|
||||||
.max(10)
|
|
||||||
.optional()
|
|
||||||
.describe('The number of images to generate. Must be between 1 and 10. Defaults to 1.'),
|
|
||||||
*/
|
|
||||||
quality: z
|
|
||||||
.enum(['auto', 'high', 'medium', 'low'])
|
|
||||||
.optional()
|
|
||||||
.describe(
|
|
||||||
'The quality of the image. One of auto (default), high, medium, or low. High/medium/low only supported for gpt-image-1.',
|
|
||||||
),
|
|
||||||
size: z
|
|
||||||
.enum(['auto', '1024x1024', '1536x1024', '1024x1536', '256x256', '512x512'])
|
|
||||||
.optional()
|
|
||||||
.describe(
|
|
||||||
'The size of the generated images. For gpt-image-1: auto (default), 1024x1024, 1536x1024, 1024x1536. For dall-e-2: 256x256, 512x512, 1024x1024.',
|
|
||||||
),
|
|
||||||
}),
|
|
||||||
responseFormat: 'content_and_artifact',
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
return [imageGenTool, imageEditTool];
|
return [imageGenTool, imageEditTool];
|
||||||
|
|||||||
@@ -6,19 +6,19 @@ const axios = require('axios');
|
|||||||
const sharp = require('sharp');
|
const sharp = require('sharp');
|
||||||
const { v4: uuidv4 } = require('uuid');
|
const { v4: uuidv4 } = require('uuid');
|
||||||
const { Tool } = require('@langchain/core/tools');
|
const { Tool } = require('@langchain/core/tools');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||||
const paths = require('~/config/paths');
|
const paths = require('~/config/paths');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const displayMessage =
|
const displayMessage =
|
||||||
'Stable Diffusion displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
"Stable Diffusion displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
|
||||||
|
|
||||||
class StableDiffusionAPI extends Tool {
|
class StableDiffusionAPI extends Tool {
|
||||||
constructor(fields) {
|
constructor(fields) {
|
||||||
super();
|
super();
|
||||||
/** @type {string} User ID */
|
/** @type {string} User ID */
|
||||||
this.userId = fields.userId;
|
this.userId = fields.userId;
|
||||||
/** @type {Express.Request | undefined} Express Request object, only provided by ToolService */
|
/** @type {ServerRequest | undefined} Express Request object, only provided by ToolService */
|
||||||
this.req = fields.req;
|
this.req = fields.req;
|
||||||
/** @type {boolean} Used to initialize the Tool without necessary variables. */
|
/** @type {boolean} Used to initialize the Tool without necessary variables. */
|
||||||
this.override = fields.override ?? false;
|
this.override = fields.override ?? false;
|
||||||
@@ -44,7 +44,7 @@ class StableDiffusionAPI extends Tool {
|
|||||||
// "negative_prompt":"semi-realistic, cgi, 3d, render, sketch, cartoon, drawing, anime, out of frame, low quality, ugly, mutation, deformed"
|
// "negative_prompt":"semi-realistic, cgi, 3d, render, sketch, cartoon, drawing, anime, out of frame, low quality, ugly, mutation, deformed"
|
||||||
// - Generate images only once per human query unless explicitly requested by the user`;
|
// - Generate images only once per human query unless explicitly requested by the user`;
|
||||||
this.description =
|
this.description =
|
||||||
'You can generate images using text with \'stable-diffusion\'. This tool is exclusively for visual content.';
|
"You can generate images using text with 'stable-diffusion'. This tool is exclusively for visual content.";
|
||||||
this.schema = z.object({
|
this.schema = z.object({
|
||||||
prompt: z
|
prompt: z
|
||||||
.string()
|
.string()
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const { Tool } = require('@langchain/core/tools');
|
const { Tool } = require('@langchain/core/tools');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { getEnvironmentVariable } = require('@langchain/core/utils/env');
|
const { getEnvironmentVariable } = require('@langchain/core/utils/env');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tool for the Traversaal AI search API, Ares.
|
* Tool for the Traversaal AI search API, Ares.
|
||||||
@@ -21,7 +21,7 @@ class TraversaalSearch extends Tool {
|
|||||||
query: z
|
query: z
|
||||||
.string()
|
.string()
|
||||||
.describe(
|
.describe(
|
||||||
'A properly written sentence to be interpreted by an AI to search the web according to the user\'s request.',
|
"A properly written sentence to be interpreted by an AI to search the web according to the user's request.",
|
||||||
),
|
),
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -38,7 +38,6 @@ class TraversaalSearch extends Tool {
|
|||||||
return apiKey;
|
return apiKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
async _call({ query }, _runManager) {
|
async _call({ query }, _runManager) {
|
||||||
const body = {
|
const body = {
|
||||||
query: [query],
|
query: [query],
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
/* eslint-disable no-useless-escape */
|
/* eslint-disable no-useless-escape */
|
||||||
const axios = require('axios');
|
|
||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
|
const axios = require('axios');
|
||||||
const { Tool } = require('@langchain/core/tools');
|
const { Tool } = require('@langchain/core/tools');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
|
||||||
class WolframAlphaAPI extends Tool {
|
class WolframAlphaAPI extends Tool {
|
||||||
constructor(fields) {
|
constructor(fields) {
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
const { z } = require('zod');
|
const { ytToolkit } = require('@librechat/api');
|
||||||
const { tool } = require('@langchain/core/tools');
|
const { tool } = require('@langchain/core/tools');
|
||||||
const { youtube } = require('@googleapis/youtube');
|
const { youtube } = require('@googleapis/youtube');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { YoutubeTranscript } = require('youtube-transcript');
|
const { YoutubeTranscript } = require('youtube-transcript');
|
||||||
const { getApiKey } = require('./credentials');
|
const { getApiKey } = require('./credentials');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
function extractVideoId(url) {
|
function extractVideoId(url) {
|
||||||
const rawIdRegex = /^[a-zA-Z0-9_-]{11}$/;
|
const rawIdRegex = /^[a-zA-Z0-9_-]{11}$/;
|
||||||
@@ -29,7 +29,7 @@ function parseTranscript(transcriptResponse) {
|
|||||||
.map((entry) => entry.text.trim())
|
.map((entry) => entry.text.trim())
|
||||||
.filter((text) => text)
|
.filter((text) => text)
|
||||||
.join(' ')
|
.join(' ')
|
||||||
.replaceAll('&#39;', '\'');
|
.replaceAll('&#39;', "'");
|
||||||
}
|
}
|
||||||
|
|
||||||
function createYouTubeTools(fields = {}) {
|
function createYouTubeTools(fields = {}) {
|
||||||
@@ -42,160 +42,94 @@ function createYouTubeTools(fields = {}) {
|
|||||||
auth: apiKey,
|
auth: apiKey,
|
||||||
});
|
});
|
||||||
|
|
||||||
const searchTool = tool(
|
const searchTool = tool(async ({ query, maxResults = 5 }) => {
|
||||||
async ({ query, maxResults = 5 }) => {
|
const response = await youtubeClient.search.list({
|
||||||
const response = await youtubeClient.search.list({
|
part: 'snippet',
|
||||||
part: 'snippet',
|
q: query,
|
||||||
q: query,
|
type: 'video',
|
||||||
type: 'video',
|
maxResults: maxResults || 5,
|
||||||
maxResults: maxResults || 5,
|
});
|
||||||
});
|
const result = response.data.items.map((item) => ({
|
||||||
const result = response.data.items.map((item) => ({
|
title: item.snippet.title,
|
||||||
title: item.snippet.title,
|
description: item.snippet.description,
|
||||||
description: item.snippet.description,
|
url: `https://www.youtube.com/watch?v=${item.id.videoId}`,
|
||||||
url: `https://www.youtube.com/watch?v=${item.id.videoId}`,
|
}));
|
||||||
}));
|
return JSON.stringify(result, null, 2);
|
||||||
return JSON.stringify(result, null, 2);
|
}, ytToolkit.youtube_search);
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'youtube_search',
|
|
||||||
description: `Search for YouTube videos by keyword or phrase.
|
|
||||||
- Required: query (search terms to find videos)
|
|
||||||
- Optional: maxResults (number of videos to return, 1-50, default: 5)
|
|
||||||
- Returns: List of videos with titles, descriptions, and URLs
|
|
||||||
- Use for: Finding specific videos, exploring content, research
|
|
||||||
Example: query="cooking pasta tutorials" maxResults=3`,
|
|
||||||
schema: z.object({
|
|
||||||
query: z.string().describe('Search query terms'),
|
|
||||||
maxResults: z.number().int().min(1).max(50).optional().describe('Number of results (1-50)'),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
const infoTool = tool(
|
const infoTool = tool(async ({ url }) => {
|
||||||
async ({ url }) => {
|
const videoId = extractVideoId(url);
|
||||||
const videoId = extractVideoId(url);
|
if (!videoId) {
|
||||||
if (!videoId) {
|
throw new Error('Invalid YouTube URL or video ID');
|
||||||
throw new Error('Invalid YouTube URL or video ID');
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const response = await youtubeClient.videos.list({
|
const response = await youtubeClient.videos.list({
|
||||||
part: 'snippet,statistics',
|
part: 'snippet,statistics',
|
||||||
id: videoId,
|
id: videoId,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!response.data.items?.length) {
|
if (!response.data.items?.length) {
|
||||||
throw new Error('Video not found');
|
throw new Error('Video not found');
|
||||||
}
|
}
|
||||||
const video = response.data.items[0];
|
const video = response.data.items[0];
|
||||||
|
|
||||||
const result = {
|
const result = {
|
||||||
title: video.snippet.title,
|
title: video.snippet.title,
|
||||||
description: video.snippet.description,
|
description: video.snippet.description,
|
||||||
views: video.statistics.viewCount,
|
views: video.statistics.viewCount,
|
||||||
likes: video.statistics.likeCount,
|
likes: video.statistics.likeCount,
|
||||||
comments: video.statistics.commentCount,
|
comments: video.statistics.commentCount,
|
||||||
};
|
};
|
||||||
return JSON.stringify(result, null, 2);
|
return JSON.stringify(result, null, 2);
|
||||||
},
|
}, ytToolkit.youtube_info);
|
||||||
{
|
|
||||||
name: 'youtube_info',
|
|
||||||
description: `Get detailed metadata and statistics for a specific YouTube video.
|
|
||||||
- Required: url (full YouTube URL or video ID)
|
|
||||||
- Returns: Video title, description, view count, like count, comment count
|
|
||||||
- Use for: Getting video metrics and basic metadata
|
|
||||||
- DO NOT USE FOR VIDEO SUMMARIES, USE TRANSCRIPTS FOR COMPREHENSIVE ANALYSIS
|
|
||||||
- Accepts both full URLs and video IDs
|
|
||||||
Example: url="https://youtube.com/watch?v=abc123" or url="abc123"`,
|
|
||||||
schema: z.object({
|
|
||||||
url: z.string().describe('YouTube video URL or ID'),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
const commentsTool = tool(
|
const commentsTool = tool(async ({ url, maxResults = 10 }) => {
|
||||||
async ({ url, maxResults = 10 }) => {
|
const videoId = extractVideoId(url);
|
||||||
const videoId = extractVideoId(url);
|
if (!videoId) {
|
||||||
if (!videoId) {
|
throw new Error('Invalid YouTube URL or video ID');
|
||||||
throw new Error('Invalid YouTube URL or video ID');
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const response = await youtubeClient.commentThreads.list({
|
const response = await youtubeClient.commentThreads.list({
|
||||||
part: 'snippet',
|
part: 'snippet',
|
||||||
videoId,
|
videoId,
|
||||||
maxResults: maxResults || 10,
|
maxResults: maxResults || 10,
|
||||||
});
|
});
|
||||||
|
|
||||||
const result = response.data.items.map((item) => ({
|
const result = response.data.items.map((item) => ({
|
||||||
author: item.snippet.topLevelComment.snippet.authorDisplayName,
|
author: item.snippet.topLevelComment.snippet.authorDisplayName,
|
||||||
text: item.snippet.topLevelComment.snippet.textDisplay,
|
text: item.snippet.topLevelComment.snippet.textDisplay,
|
||||||
likes: item.snippet.topLevelComment.snippet.likeCount,
|
likes: item.snippet.topLevelComment.snippet.likeCount,
|
||||||
}));
|
}));
|
||||||
return JSON.stringify(result, null, 2);
|
return JSON.stringify(result, null, 2);
|
||||||
},
|
}, ytToolkit.youtube_comments);
|
||||||
{
|
|
||||||
name: 'youtube_comments',
|
|
||||||
description: `Retrieve top-level comments from a YouTube video.
|
|
||||||
- Required: url (full YouTube URL or video ID)
|
|
||||||
- Optional: maxResults (number of comments, 1-50, default: 10)
|
|
||||||
- Returns: Comment text, author names, like counts
|
|
||||||
- Use for: Sentiment analysis, audience feedback, engagement review
|
|
||||||
Example: url="abc123" maxResults=20`,
|
|
||||||
schema: z.object({
|
|
||||||
url: z.string().describe('YouTube video URL or ID'),
|
|
||||||
maxResults: z
|
|
||||||
.number()
|
|
||||||
.int()
|
|
||||||
.min(1)
|
|
||||||
.max(50)
|
|
||||||
.optional()
|
|
||||||
.describe('Number of comments to retrieve'),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
const transcriptTool = tool(
|
const transcriptTool = tool(async ({ url }) => {
|
||||||
async ({ url }) => {
|
const videoId = extractVideoId(url);
|
||||||
const videoId = extractVideoId(url);
|
if (!videoId) {
|
||||||
if (!videoId) {
|
throw new Error('Invalid YouTube URL or video ID');
|
||||||
throw new Error('Invalid YouTube URL or video ID');
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
try {
|
||||||
|
const transcript = await YoutubeTranscript.fetchTranscript(videoId, { lang: 'en' });
|
||||||
|
return parseTranscript(transcript);
|
||||||
|
} catch (e) {
|
||||||
|
logger.error(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
try {
|
const transcript = await YoutubeTranscript.fetchTranscript(videoId, { lang: 'de' });
|
||||||
const transcript = await YoutubeTranscript.fetchTranscript(videoId, { lang: 'en' });
|
|
||||||
return parseTranscript(transcript);
|
|
||||||
} catch (e) {
|
|
||||||
logger.error(e);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const transcript = await YoutubeTranscript.fetchTranscript(videoId, { lang: 'de' });
|
|
||||||
return parseTranscript(transcript);
|
|
||||||
} catch (e) {
|
|
||||||
logger.error(e);
|
|
||||||
}
|
|
||||||
|
|
||||||
const transcript = await YoutubeTranscript.fetchTranscript(videoId);
|
|
||||||
return parseTranscript(transcript);
|
return parseTranscript(transcript);
|
||||||
} catch (error) {
|
} catch (e) {
|
||||||
throw new Error(`Failed to fetch transcript: ${error.message}`);
|
logger.error(e);
|
||||||
}
|
}
|
||||||
},
|
|
||||||
{
|
const transcript = await YoutubeTranscript.fetchTranscript(videoId);
|
||||||
name: 'youtube_transcript',
|
return parseTranscript(transcript);
|
||||||
description: `Fetch and parse the transcript/captions of a YouTube video.
|
} catch (error) {
|
||||||
- Required: url (full YouTube URL or video ID)
|
throw new Error(`Failed to fetch transcript: ${error.message}`);
|
||||||
- Returns: Full video transcript as plain text
|
}
|
||||||
- Use for: Content analysis, summarization, translation reference
|
}, ytToolkit.youtube_transcript);
|
||||||
- This is the "Go-to" tool for analyzing actual video content
|
|
||||||
- Attempts to fetch English first, then German, then any available language
|
|
||||||
Example: url="https://youtube.com/watch?v=abc123"`,
|
|
||||||
schema: z.object({
|
|
||||||
url: z.string().describe('YouTube video URL or ID'),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
return [searchTool, infoTool, commentsTool, transcriptTool];
|
return [searchTool, infoTool, commentsTool, transcriptTool];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,43 +1,9 @@
|
|||||||
const DALLE3 = require('../DALLE3');
|
const DALLE3 = require('../DALLE3');
|
||||||
const { ProxyAgent } = require('undici');
|
const { ProxyAgent } = require('undici');
|
||||||
|
|
||||||
|
jest.mock('tiktoken');
|
||||||
const processFileURL = jest.fn();
|
const processFileURL = jest.fn();
|
||||||
|
|
||||||
jest.mock('~/server/services/Files/images', () => ({
|
|
||||||
getImageBasename: jest.fn().mockImplementation((url) => {
|
|
||||||
const parts = url.split('/');
|
|
||||||
const lastPart = parts.pop();
|
|
||||||
const imageExtensionRegex = /\.(jpg|jpeg|png|gif|bmp|tiff|svg)$/i;
|
|
||||||
if (imageExtensionRegex.test(lastPart)) {
|
|
||||||
return lastPart;
|
|
||||||
}
|
|
||||||
return '';
|
|
||||||
}),
|
|
||||||
}));
|
|
||||||
|
|
||||||
jest.mock('fs', () => {
|
|
||||||
return {
|
|
||||||
existsSync: jest.fn(),
|
|
||||||
mkdirSync: jest.fn(),
|
|
||||||
promises: {
|
|
||||||
writeFile: jest.fn(),
|
|
||||||
readFile: jest.fn(),
|
|
||||||
unlink: jest.fn(),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
jest.mock('path', () => {
|
|
||||||
return {
|
|
||||||
resolve: jest.fn(),
|
|
||||||
join: jest.fn(),
|
|
||||||
relative: jest.fn(),
|
|
||||||
extname: jest.fn().mockImplementation((filename) => {
|
|
||||||
return filename.slice(filename.lastIndexOf('.'));
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('DALLE3 Proxy Configuration', () => {
|
describe('DALLE3 Proxy Configuration', () => {
|
||||||
let originalEnv;
|
let originalEnv;
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const DALLE3 = require('../DALLE3');
|
const DALLE3 = require('../DALLE3');
|
||||||
const logger = require('~/config/winston');
|
|
||||||
|
|
||||||
jest.mock('openai');
|
jest.mock('openai');
|
||||||
|
|
||||||
jest.mock('@librechat/data-schemas', () => {
|
jest.mock('@librechat/data-schemas', () => {
|
||||||
return {
|
return {
|
||||||
logger: {
|
logger: {
|
||||||
@@ -26,25 +25,6 @@ jest.mock('tiktoken', () => {
|
|||||||
|
|
||||||
const processFileURL = jest.fn();
|
const processFileURL = jest.fn();
|
||||||
|
|
||||||
jest.mock('~/server/services/Files/images', () => ({
|
|
||||||
getImageBasename: jest.fn().mockImplementation((url) => {
|
|
||||||
// Split the URL by '/'
|
|
||||||
const parts = url.split('/');
|
|
||||||
|
|
||||||
// Get the last part of the URL
|
|
||||||
const lastPart = parts.pop();
|
|
||||||
|
|
||||||
// Check if the last part of the URL matches the image extension regex
|
|
||||||
const imageExtensionRegex = /\.(jpg|jpeg|png|gif|bmp|tiff|svg)$/i;
|
|
||||||
if (imageExtensionRegex.test(lastPart)) {
|
|
||||||
return lastPart;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the regex test fails, return an empty string
|
|
||||||
return '';
|
|
||||||
}),
|
|
||||||
}));
|
|
||||||
|
|
||||||
const generate = jest.fn();
|
const generate = jest.fn();
|
||||||
OpenAI.mockImplementation(() => ({
|
OpenAI.mockImplementation(() => ({
|
||||||
images: {
|
images: {
|
||||||
|
|||||||
@@ -2,8 +2,9 @@ const { z } = require('zod');
|
|||||||
const axios = require('axios');
|
const axios = require('axios');
|
||||||
const { tool } = require('@langchain/core/tools');
|
const { tool } = require('@langchain/core/tools');
|
||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const { generateShortLivedToken } = require('@librechat/api');
|
||||||
const { Tools, EToolResources } = require('librechat-data-provider');
|
const { Tools, EToolResources } = require('librechat-data-provider');
|
||||||
const { generateShortLivedToken } = require('~/server/services/AuthService');
|
const { filterFilesByAgentAccess } = require('~/server/services/Files/permissions');
|
||||||
const { getFiles } = require('~/models/File');
|
const { getFiles } = require('~/models/File');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -22,14 +23,24 @@ const primeFiles = async (options) => {
|
|||||||
const file_ids = tool_resources?.[EToolResources.file_search]?.file_ids ?? [];
|
const file_ids = tool_resources?.[EToolResources.file_search]?.file_ids ?? [];
|
||||||
const agentResourceIds = new Set(file_ids);
|
const agentResourceIds = new Set(file_ids);
|
||||||
const resourceFiles = tool_resources?.[EToolResources.file_search]?.files ?? [];
|
const resourceFiles = tool_resources?.[EToolResources.file_search]?.files ?? [];
|
||||||
const dbFiles = (
|
|
||||||
(await getFiles(
|
// Get all files first
|
||||||
{ file_id: { $in: file_ids } },
|
const allFiles = (await getFiles({ file_id: { $in: file_ids } }, null, { text: 0 })) ?? [];
|
||||||
null,
|
|
||||||
{ text: 0 },
|
// Filter by access if user and agent are provided
|
||||||
{ userId: req?.user?.id, agentId },
|
let dbFiles;
|
||||||
)) ?? []
|
if (req?.user?.id && agentId) {
|
||||||
).concat(resourceFiles);
|
dbFiles = await filterFilesByAgentAccess({
|
||||||
|
files: allFiles,
|
||||||
|
userId: req.user.id,
|
||||||
|
role: req.user.role,
|
||||||
|
agentId,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
dbFiles = allFiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
dbFiles = dbFiles.concat(resourceFiles);
|
||||||
|
|
||||||
let toolContext = `- Note: Semantic search is available through the ${Tools.file_search} tool but no files are currently loaded. Request the user to upload documents to search through.`;
|
let toolContext = `- Note: Semantic search is available through the ${Tools.file_search} tool but no files are currently loaded. Request the user to upload documents to search through.`;
|
||||||
|
|
||||||
@@ -57,18 +68,19 @@ const primeFiles = async (options) => {
|
|||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @param {Object} options
|
* @param {Object} options
|
||||||
* @param {ServerRequest} options.req
|
* @param {string} options.userId
|
||||||
* @param {Array<{ file_id: string; filename: string }>} options.files
|
* @param {Array<{ file_id: string; filename: string }>} options.files
|
||||||
* @param {string} [options.entity_id]
|
* @param {string} [options.entity_id]
|
||||||
|
* @param {boolean} [options.fileCitations=false] - Whether to include citation instructions
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
const createFileSearchTool = async ({ req, files, entity_id }) => {
|
const createFileSearchTool = async ({ userId, files, entity_id, fileCitations = false }) => {
|
||||||
return tool(
|
return tool(
|
||||||
async ({ query }) => {
|
async ({ query }) => {
|
||||||
if (files.length === 0) {
|
if (files.length === 0) {
|
||||||
return 'No files to search. Instruct the user to add files for the search.';
|
return 'No files to search. Instruct the user to add files for the search.';
|
||||||
}
|
}
|
||||||
const jwtToken = generateShortLivedToken(req.user.id);
|
const jwtToken = generateShortLivedToken(userId);
|
||||||
if (!jwtToken) {
|
if (!jwtToken) {
|
||||||
return 'There was an error authenticating the file search request.';
|
return 'There was an error authenticating the file search request.';
|
||||||
}
|
}
|
||||||
@@ -114,11 +126,13 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const formattedResults = validResults
|
const formattedResults = validResults
|
||||||
.flatMap((result) =>
|
.flatMap((result, fileIndex) =>
|
||||||
result.data.map(([docInfo, distance]) => ({
|
result.data.map(([docInfo, distance]) => ({
|
||||||
filename: docInfo.metadata.source.split('/').pop(),
|
filename: docInfo.metadata.source.split('/').pop(),
|
||||||
content: docInfo.page_content,
|
content: docInfo.page_content,
|
||||||
distance,
|
distance,
|
||||||
|
file_id: files[fileIndex]?.file_id,
|
||||||
|
page: docInfo.metadata.page || null,
|
||||||
})),
|
})),
|
||||||
)
|
)
|
||||||
// TODO: results should be sorted by relevance, not distance
|
// TODO: results should be sorted by relevance, not distance
|
||||||
@@ -128,18 +142,41 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
|
|||||||
|
|
||||||
const formattedString = formattedResults
|
const formattedString = formattedResults
|
||||||
.map(
|
.map(
|
||||||
(result) =>
|
(result, index) =>
|
||||||
`File: ${result.filename}\nRelevance: ${1.0 - result.distance.toFixed(4)}\nContent: ${
|
`File: ${result.filename}${
|
||||||
result.content
|
fileCitations ? `\nAnchor: \\ue202turn0file${index} (${result.filename})` : ''
|
||||||
}\n`,
|
}\nRelevance: ${(1.0 - result.distance).toFixed(4)}\nContent: ${result.content}\n`,
|
||||||
)
|
)
|
||||||
.join('\n---\n');
|
.join('\n---\n');
|
||||||
|
|
||||||
return formattedString;
|
const sources = formattedResults.map((result) => ({
|
||||||
|
type: 'file',
|
||||||
|
fileId: result.file_id,
|
||||||
|
content: result.content,
|
||||||
|
fileName: result.filename,
|
||||||
|
relevance: 1.0 - result.distance,
|
||||||
|
pages: result.page ? [result.page] : [],
|
||||||
|
pageRelevance: result.page ? { [result.page]: 1.0 - result.distance } : {},
|
||||||
|
}));
|
||||||
|
|
||||||
|
return [formattedString, { [Tools.file_search]: { sources, fileCitations } }];
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: Tools.file_search,
|
name: Tools.file_search,
|
||||||
description: `Performs semantic search across attached "${Tools.file_search}" documents using natural language queries. This tool analyzes the content of uploaded files to find relevant information, quotes, and passages that best match your query. Use this to extract specific information or find relevant sections within the available documents.`,
|
responseFormat: 'content_and_artifact',
|
||||||
|
description: `Performs semantic search across attached "${Tools.file_search}" documents using natural language queries. This tool analyzes the content of uploaded files to find relevant information, quotes, and passages that best match your query. Use this to extract specific information or find relevant sections within the available documents.${
|
||||||
|
fileCitations
|
||||||
|
? `
|
||||||
|
|
||||||
|
**CITE FILE SEARCH RESULTS:**
|
||||||
|
Use anchor markers immediately after statements derived from file content. Reference the filename in your text:
|
||||||
|
- File citation: "The document.pdf states that... \\ue202turn0file0"
|
||||||
|
- Page reference: "According to report.docx... \\ue202turn0file1"
|
||||||
|
- Multi-file: "Multiple sources confirm... \\ue200\\ue202turn0file0\\ue202turn0file1\\ue201"
|
||||||
|
|
||||||
|
**ALWAYS mention the filename in your text before the citation marker. NEVER use markdown links or footnotes.**`
|
||||||
|
: ''
|
||||||
|
}`,
|
||||||
schema: z.object({
|
schema: z.object({
|
||||||
query: z
|
query: z
|
||||||
.string()
|
.string()
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles errors that may occur when making requests to OpenAI's API.
|
* Handles errors that may occur when making requests to OpenAI's API.
|
||||||
|
|||||||
@@ -1,9 +1,21 @@
|
|||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
||||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||||
const { mcpToolPattern, loadWebSearchAuth } = require('@librechat/api');
|
|
||||||
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
||||||
const { Tools, EToolResources, replaceSpecialVars } = require('librechat-data-provider');
|
const {
|
||||||
|
checkAccess,
|
||||||
|
createSafeUser,
|
||||||
|
mcpToolPattern,
|
||||||
|
loadWebSearchAuth,
|
||||||
|
} = require('@librechat/api');
|
||||||
|
const {
|
||||||
|
Tools,
|
||||||
|
Constants,
|
||||||
|
Permissions,
|
||||||
|
EToolResources,
|
||||||
|
PermissionTypes,
|
||||||
|
replaceSpecialVars,
|
||||||
|
} = require('librechat-data-provider');
|
||||||
const {
|
const {
|
||||||
availableTools,
|
availableTools,
|
||||||
manifestToolMap,
|
manifestToolMap,
|
||||||
@@ -24,9 +36,10 @@ const {
|
|||||||
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
|
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
|
||||||
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
|
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
|
||||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||||
|
const { createMCPTool, createMCPTools } = require('~/server/services/MCP');
|
||||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||||
const { getCachedTools } = require('~/server/services/Config');
|
const { getMCPServerTools } = require('~/server/services/Config');
|
||||||
const { createMCPTool } = require('~/server/services/MCP');
|
const { getRoleByName } = require('~/models/Role');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values.
|
* Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values.
|
||||||
@@ -121,27 +134,37 @@ const getAuthFields = (toolKey) => {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @param {object} object
|
* @param {object} params
|
||||||
* @param {string} object.user
|
* @param {string} params.user
|
||||||
* @param {Pick<Agent, 'id' | 'provider' | 'model'>} [object.agent]
|
* @param {Record<string, Record<string, string>>} [object.userMCPAuthMap]
|
||||||
* @param {string} [object.model]
|
* @param {AbortSignal} [object.signal]
|
||||||
* @param {EModelEndpoint} [object.endpoint]
|
* @param {Pick<Agent, 'id' | 'provider' | 'model'>} [params.agent]
|
||||||
* @param {LoadToolOptions} [object.options]
|
* @param {string} [params.model]
|
||||||
* @param {boolean} [object.useSpecs]
|
* @param {EModelEndpoint} [params.endpoint]
|
||||||
* @param {Array<string>} object.tools
|
* @param {LoadToolOptions} [params.options]
|
||||||
* @param {boolean} [object.functions]
|
* @param {boolean} [params.useSpecs]
|
||||||
* @param {boolean} [object.returnMap]
|
* @param {Array<string>} params.tools
|
||||||
|
* @param {boolean} [params.functions]
|
||||||
|
* @param {boolean} [params.returnMap]
|
||||||
|
* @param {AppConfig['webSearch']} [params.webSearch]
|
||||||
|
* @param {AppConfig['fileStrategy']} [params.fileStrategy]
|
||||||
|
* @param {AppConfig['imageOutputType']} [params.imageOutputType]
|
||||||
* @returns {Promise<{ loadedTools: Tool[], toolContextMap: Object<string, any> } | Record<string,Tool>>}
|
* @returns {Promise<{ loadedTools: Tool[], toolContextMap: Object<string, any> } | Record<string,Tool>>}
|
||||||
*/
|
*/
|
||||||
const loadTools = async ({
|
const loadTools = async ({
|
||||||
user,
|
user,
|
||||||
agent,
|
agent,
|
||||||
model,
|
model,
|
||||||
|
signal,
|
||||||
endpoint,
|
endpoint,
|
||||||
|
userMCPAuthMap,
|
||||||
tools = [],
|
tools = [],
|
||||||
options = {},
|
options = {},
|
||||||
functions = true,
|
functions = true,
|
||||||
returnMap = false,
|
returnMap = false,
|
||||||
|
webSearch,
|
||||||
|
fileStrategy,
|
||||||
|
imageOutputType,
|
||||||
}) => {
|
}) => {
|
||||||
const toolConstructors = {
|
const toolConstructors = {
|
||||||
flux: FluxAPI,
|
flux: FluxAPI,
|
||||||
@@ -200,6 +223,8 @@ const loadTools = async ({
|
|||||||
...authValues,
|
...authValues,
|
||||||
isAgent: !!agent,
|
isAgent: !!agent,
|
||||||
req: options.req,
|
req: options.req,
|
||||||
|
imageOutputType,
|
||||||
|
fileStrategy,
|
||||||
imageFiles,
|
imageFiles,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
@@ -215,7 +240,7 @@ const loadTools = async ({
|
|||||||
const imageGenOptions = {
|
const imageGenOptions = {
|
||||||
isAgent: !!agent,
|
isAgent: !!agent,
|
||||||
req: options.req,
|
req: options.req,
|
||||||
fileStrategy: options.fileStrategy,
|
fileStrategy,
|
||||||
processFileURL: options.processFileURL,
|
processFileURL: options.processFileURL,
|
||||||
returnMetadata: options.returnMetadata,
|
returnMetadata: options.returnMetadata,
|
||||||
uploadImageBuffer: options.uploadImageBuffer,
|
uploadImageBuffer: options.uploadImageBuffer,
|
||||||
@@ -230,7 +255,7 @@ const loadTools = async ({
|
|||||||
|
|
||||||
/** @type {Record<string, string>} */
|
/** @type {Record<string, string>} */
|
||||||
const toolContextMap = {};
|
const toolContextMap = {};
|
||||||
const cachedTools = (await getCachedTools({ userId: user, includeGlobal: true })) ?? {};
|
const requestedMCPTools = {};
|
||||||
|
|
||||||
for (const tool of tools) {
|
for (const tool of tools) {
|
||||||
if (tool === Tools.execute_code) {
|
if (tool === Tools.execute_code) {
|
||||||
@@ -268,15 +293,36 @@ const loadTools = async ({
|
|||||||
if (toolContext) {
|
if (toolContext) {
|
||||||
toolContextMap[tool] = toolContext;
|
toolContextMap[tool] = toolContext;
|
||||||
}
|
}
|
||||||
return createFileSearchTool({ req: options.req, files, entity_id: agent?.id });
|
|
||||||
|
/** @type {boolean | undefined} Check if user has FILE_CITATIONS permission */
|
||||||
|
let fileCitations;
|
||||||
|
if (fileCitations == null && options.req?.user != null) {
|
||||||
|
try {
|
||||||
|
fileCitations = await checkAccess({
|
||||||
|
user: options.req.user,
|
||||||
|
permissionType: PermissionTypes.FILE_CITATIONS,
|
||||||
|
permissions: [Permissions.USE],
|
||||||
|
getRoleByName,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[handleTools] FILE_CITATIONS permission check failed:', error);
|
||||||
|
fileCitations = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return createFileSearchTool({
|
||||||
|
userId: user,
|
||||||
|
files,
|
||||||
|
entity_id: agent?.id,
|
||||||
|
fileCitations,
|
||||||
|
});
|
||||||
};
|
};
|
||||||
continue;
|
continue;
|
||||||
} else if (tool === Tools.web_search) {
|
} else if (tool === Tools.web_search) {
|
||||||
const webSearchConfig = options?.req?.app?.locals?.webSearch;
|
|
||||||
const result = await loadWebSearchAuth({
|
const result = await loadWebSearchAuth({
|
||||||
userId: user,
|
userId: user,
|
||||||
loadAuthValues,
|
loadAuthValues,
|
||||||
webSearchConfig,
|
webSearchConfig: webSearch,
|
||||||
});
|
});
|
||||||
const { onSearchResults, onGetHighlights } = options?.[Tools.web_search] ?? {};
|
const { onSearchResults, onGetHighlights } = options?.[Tools.web_search] ?? {};
|
||||||
requestedTools[tool] = async () => {
|
requestedTools[tool] = async () => {
|
||||||
@@ -298,15 +344,34 @@ Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
continue;
|
continue;
|
||||||
} else if (tool && cachedTools && mcpToolPattern.test(tool)) {
|
} else if (tool && mcpToolPattern.test(tool)) {
|
||||||
requestedTools[tool] = async () =>
|
const [toolName, serverName] = tool.split(Constants.mcp_delimiter);
|
||||||
createMCPTool({
|
if (toolName === Constants.mcp_server) {
|
||||||
req: options.req,
|
/** Placeholder used for UI purposes */
|
||||||
res: options.res,
|
continue;
|
||||||
toolKey: tool,
|
}
|
||||||
model: agent?.model ?? model,
|
if (serverName && options.req?.config?.mcpConfig?.[serverName] == null) {
|
||||||
provider: agent?.provider ?? endpoint,
|
logger.warn(
|
||||||
});
|
`MCP server "${serverName}" for "${toolName}" tool is not configured${agent?.id != null && agent.id ? ` but attached to "${agent.id}"` : ''}`,
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (toolName === Constants.mcp_all) {
|
||||||
|
requestedMCPTools[serverName] = [
|
||||||
|
{
|
||||||
|
type: 'all',
|
||||||
|
serverName,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
requestedMCPTools[serverName] = requestedMCPTools[serverName] || [];
|
||||||
|
requestedMCPTools[serverName].push({
|
||||||
|
type: 'single',
|
||||||
|
toolKey: tool,
|
||||||
|
serverName,
|
||||||
|
});
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -346,6 +411,75 @@ Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
|||||||
}
|
}
|
||||||
|
|
||||||
const loadedTools = (await Promise.all(toolPromises)).flatMap((plugin) => plugin || []);
|
const loadedTools = (await Promise.all(toolPromises)).flatMap((plugin) => plugin || []);
|
||||||
|
const mcpToolPromises = [];
|
||||||
|
/** MCP server tools are initialized sequentially by server */
|
||||||
|
let index = -1;
|
||||||
|
const failedMCPServers = new Set();
|
||||||
|
const safeUser = createSafeUser(options.req?.user);
|
||||||
|
for (const [serverName, toolConfigs] of Object.entries(requestedMCPTools)) {
|
||||||
|
index++;
|
||||||
|
/** @type {LCAvailableTools} */
|
||||||
|
let availableTools;
|
||||||
|
for (const config of toolConfigs) {
|
||||||
|
try {
|
||||||
|
if (failedMCPServers.has(serverName)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const mcpParams = {
|
||||||
|
index,
|
||||||
|
signal,
|
||||||
|
user: safeUser,
|
||||||
|
userMCPAuthMap,
|
||||||
|
res: options.res,
|
||||||
|
model: agent?.model ?? model,
|
||||||
|
serverName: config.serverName,
|
||||||
|
provider: agent?.provider ?? endpoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (config.type === 'all' && toolConfigs.length === 1) {
|
||||||
|
/** Handle async loading for single 'all' tool config */
|
||||||
|
mcpToolPromises.push(
|
||||||
|
createMCPTools(mcpParams).catch((error) => {
|
||||||
|
logger.error(`Error loading ${serverName} tools:`, error);
|
||||||
|
return null;
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (!availableTools) {
|
||||||
|
try {
|
||||||
|
availableTools = await getMCPServerTools(serverName);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Error fetching available tools for MCP server ${serverName}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Handle synchronous loading */
|
||||||
|
const mcpTool =
|
||||||
|
config.type === 'all'
|
||||||
|
? await createMCPTools(mcpParams)
|
||||||
|
: await createMCPTool({
|
||||||
|
...mcpParams,
|
||||||
|
availableTools,
|
||||||
|
toolKey: config.toolKey,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (Array.isArray(mcpTool)) {
|
||||||
|
loadedTools.push(...mcpTool);
|
||||||
|
} else if (mcpTool) {
|
||||||
|
loadedTools.push(mcpTool);
|
||||||
|
} else {
|
||||||
|
failedMCPServers.add(serverName);
|
||||||
|
logger.warn(
|
||||||
|
`MCP tool creation failed for "${config.toolKey}", server may be unavailable or unauthenticated.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Error loading MCP tool for server ${serverName}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loadedTools.push(...(await Promise.all(mcpToolPromises)).flatMap((plugin) => plugin || []));
|
||||||
return { loadedTools, toolContextMap };
|
return { loadedTools, toolContextMap };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,27 @@ const mockPluginService = {
|
|||||||
|
|
||||||
jest.mock('~/server/services/PluginService', () => mockPluginService);
|
jest.mock('~/server/services/PluginService', () => mockPluginService);
|
||||||
|
|
||||||
const { BaseLLM } = require('@langchain/openai');
|
jest.mock('~/server/services/Config', () => ({
|
||||||
|
getAppConfig: jest.fn().mockResolvedValue({
|
||||||
|
// Default app config for tool tests
|
||||||
|
paths: { uploads: '/tmp' },
|
||||||
|
fileStrategy: 'local',
|
||||||
|
filteredTools: [],
|
||||||
|
includedTools: [],
|
||||||
|
}),
|
||||||
|
getCachedTools: jest.fn().mockResolvedValue({
|
||||||
|
// Default cached tools for tests
|
||||||
|
dalle: {
|
||||||
|
type: 'function',
|
||||||
|
function: {
|
||||||
|
name: 'dalle',
|
||||||
|
description: 'DALL-E image generation',
|
||||||
|
parameters: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||||
|
|
||||||
const { User } = require('~/db/models');
|
const { User } = require('~/db/models');
|
||||||
@@ -151,7 +171,6 @@ describe('Tool Handlers', () => {
|
|||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
const toolMap = await loadTools({
|
const toolMap = await loadTools({
|
||||||
user: fakeUser._id,
|
user: fakeUser._id,
|
||||||
model: BaseLLM,
|
|
||||||
tools: sampleTools,
|
tools: sampleTools,
|
||||||
returnMap: true,
|
returnMap: true,
|
||||||
useSpecs: true,
|
useSpecs: true,
|
||||||
@@ -245,7 +264,6 @@ describe('Tool Handlers', () => {
|
|||||||
it('returns an empty object when no tools are requested', async () => {
|
it('returns an empty object when no tools are requested', async () => {
|
||||||
toolFunctions = await loadTools({
|
toolFunctions = await loadTools({
|
||||||
user: fakeUser._id,
|
user: fakeUser._id,
|
||||||
model: BaseLLM,
|
|
||||||
returnMap: true,
|
returnMap: true,
|
||||||
useSpecs: true,
|
useSpecs: true,
|
||||||
});
|
});
|
||||||
@@ -255,7 +273,6 @@ describe('Tool Handlers', () => {
|
|||||||
process.env.SD_WEBUI_URL = mockCredential;
|
process.env.SD_WEBUI_URL = mockCredential;
|
||||||
toolFunctions = await loadTools({
|
toolFunctions = await loadTools({
|
||||||
user: fakeUser._id,
|
user: fakeUser._id,
|
||||||
model: BaseLLM,
|
|
||||||
tools: ['stable-diffusion'],
|
tools: ['stable-diffusion'],
|
||||||
functions: true,
|
functions: true,
|
||||||
returnMap: true,
|
returnMap: true,
|
||||||
|
|||||||
157
api/cache/cacheConfig.spec.js
vendored
157
api/cache/cacheConfig.spec.js
vendored
@@ -1,157 +0,0 @@
|
|||||||
const fs = require('fs');
|
|
||||||
|
|
||||||
describe('cacheConfig', () => {
|
|
||||||
let originalEnv;
|
|
||||||
let originalReadFileSync;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
originalEnv = { ...process.env };
|
|
||||||
originalReadFileSync = fs.readFileSync;
|
|
||||||
|
|
||||||
// Clear all related env vars first
|
|
||||||
delete process.env.REDIS_URI;
|
|
||||||
delete process.env.REDIS_CA;
|
|
||||||
delete process.env.REDIS_KEY_PREFIX_VAR;
|
|
||||||
delete process.env.REDIS_KEY_PREFIX;
|
|
||||||
delete process.env.USE_REDIS;
|
|
||||||
delete process.env.REDIS_PING_INTERVAL;
|
|
||||||
delete process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES;
|
|
||||||
|
|
||||||
// Clear require cache
|
|
||||||
jest.resetModules();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
process.env = originalEnv;
|
|
||||||
fs.readFileSync = originalReadFileSync;
|
|
||||||
jest.resetModules();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('REDIS_KEY_PREFIX validation and resolution', () => {
|
|
||||||
test('should throw error when both REDIS_KEY_PREFIX_VAR and REDIS_KEY_PREFIX are set', () => {
|
|
||||||
process.env.REDIS_KEY_PREFIX_VAR = 'DEPLOYMENT_ID';
|
|
||||||
process.env.REDIS_KEY_PREFIX = 'manual-prefix';
|
|
||||||
|
|
||||||
expect(() => {
|
|
||||||
require('./cacheConfig');
|
|
||||||
}).toThrow('Only either REDIS_KEY_PREFIX_VAR or REDIS_KEY_PREFIX can be set.');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should resolve REDIS_KEY_PREFIX from variable reference', () => {
|
|
||||||
process.env.REDIS_KEY_PREFIX_VAR = 'DEPLOYMENT_ID';
|
|
||||||
process.env.DEPLOYMENT_ID = 'test-deployment-123';
|
|
||||||
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('test-deployment-123');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should use direct REDIS_KEY_PREFIX value', () => {
|
|
||||||
process.env.REDIS_KEY_PREFIX = 'direct-prefix';
|
|
||||||
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('direct-prefix');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should default to empty string when no prefix is configured', () => {
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should handle empty variable reference', () => {
|
|
||||||
process.env.REDIS_KEY_PREFIX_VAR = 'EMPTY_VAR';
|
|
||||||
process.env.EMPTY_VAR = '';
|
|
||||||
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should handle undefined variable reference', () => {
|
|
||||||
process.env.REDIS_KEY_PREFIX_VAR = 'UNDEFINED_VAR';
|
|
||||||
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('USE_REDIS and REDIS_URI validation', () => {
|
|
||||||
test('should throw error when USE_REDIS is enabled but REDIS_URI is not set', () => {
|
|
||||||
process.env.USE_REDIS = 'true';
|
|
||||||
|
|
||||||
expect(() => {
|
|
||||||
require('./cacheConfig');
|
|
||||||
}).toThrow('USE_REDIS is enabled but REDIS_URI is not set.');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should not throw error when USE_REDIS is enabled and REDIS_URI is set', () => {
|
|
||||||
process.env.USE_REDIS = 'true';
|
|
||||||
process.env.REDIS_URI = 'redis://localhost:6379';
|
|
||||||
|
|
||||||
expect(() => {
|
|
||||||
require('./cacheConfig');
|
|
||||||
}).not.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should handle empty REDIS_URI when USE_REDIS is enabled', () => {
|
|
||||||
process.env.USE_REDIS = 'true';
|
|
||||||
process.env.REDIS_URI = '';
|
|
||||||
|
|
||||||
expect(() => {
|
|
||||||
require('./cacheConfig');
|
|
||||||
}).toThrow('USE_REDIS is enabled but REDIS_URI is not set.');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('REDIS_CA file reading', () => {
|
|
||||||
test('should be null when REDIS_CA is not set', () => {
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
expect(cacheConfig.REDIS_CA).toBeNull();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('REDIS_PING_INTERVAL configuration', () => {
|
|
||||||
test('should default to 0 when REDIS_PING_INTERVAL is not set', () => {
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
expect(cacheConfig.REDIS_PING_INTERVAL).toBe(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should use provided REDIS_PING_INTERVAL value', () => {
|
|
||||||
process.env.REDIS_PING_INTERVAL = '300';
|
|
||||||
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
expect(cacheConfig.REDIS_PING_INTERVAL).toBe(300);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('FORCED_IN_MEMORY_CACHE_NAMESPACES validation', () => {
|
|
||||||
test('should parse comma-separated cache keys correctly', () => {
|
|
||||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = ' ROLES, STATIC_CONFIG ,MESSAGES ';
|
|
||||||
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([
|
|
||||||
'ROLES',
|
|
||||||
'STATIC_CONFIG',
|
|
||||||
'MESSAGES',
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should throw error for invalid cache keys', () => {
|
|
||||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = 'INVALID_KEY,ROLES';
|
|
||||||
|
|
||||||
expect(() => {
|
|
||||||
require('./cacheConfig');
|
|
||||||
}).toThrow('Invalid cache keys in FORCED_IN_MEMORY_CACHE_NAMESPACES: INVALID_KEY');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should handle empty string gracefully', () => {
|
|
||||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = '';
|
|
||||||
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should handle undefined env var gracefully', () => {
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
108
api/cache/cacheFactory.js
vendored
108
api/cache/cacheFactory.js
vendored
@@ -1,108 +0,0 @@
|
|||||||
const KeyvRedis = require('@keyv/redis').default;
|
|
||||||
const { Keyv } = require('keyv');
|
|
||||||
const { RedisStore } = require('rate-limit-redis');
|
|
||||||
const { Time } = require('librechat-data-provider');
|
|
||||||
const { logger } = require('@librechat/data-schemas');
|
|
||||||
const { RedisStore: ConnectRedis } = require('connect-redis');
|
|
||||||
const MemoryStore = require('memorystore')(require('express-session'));
|
|
||||||
const { keyvRedisClient, ioredisClient, GLOBAL_PREFIX_SEPARATOR } = require('./redisClients');
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
const { violationFile } = require('./keyvFiles');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a cache instance using Redis or a fallback store. Suitable for general caching needs.
|
|
||||||
* @param {string} namespace - The cache namespace.
|
|
||||||
* @param {number} [ttl] - Time to live for cache entries.
|
|
||||||
* @param {object} [fallbackStore] - Optional fallback store if Redis is not used.
|
|
||||||
* @returns {Keyv} Cache instance.
|
|
||||||
*/
|
|
||||||
const standardCache = (namespace, ttl = undefined, fallbackStore = undefined) => {
|
|
||||||
if (
|
|
||||||
cacheConfig.USE_REDIS &&
|
|
||||||
!cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES?.includes(namespace)
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
const keyvRedis = new KeyvRedis(keyvRedisClient);
|
|
||||||
const cache = new Keyv(keyvRedis, { namespace, ttl });
|
|
||||||
keyvRedis.namespace = cacheConfig.REDIS_KEY_PREFIX;
|
|
||||||
keyvRedis.keyPrefixSeparator = GLOBAL_PREFIX_SEPARATOR;
|
|
||||||
|
|
||||||
cache.on('error', (err) => {
|
|
||||||
logger.error(`Cache error in namespace ${namespace}:`, err);
|
|
||||||
});
|
|
||||||
|
|
||||||
return cache;
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(`Failed to create Redis cache for namespace ${namespace}:`, err);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (fallbackStore) return new Keyv({ store: fallbackStore, namespace, ttl });
|
|
||||||
return new Keyv({ namespace, ttl });
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a cache instance for storing violation data.
|
|
||||||
* Uses a file-based fallback store if Redis is not enabled.
|
|
||||||
* @param {string} namespace - The cache namespace for violations.
|
|
||||||
* @param {number} [ttl] - Time to live for cache entries.
|
|
||||||
* @returns {Keyv} Cache instance for violations.
|
|
||||||
*/
|
|
||||||
const violationCache = (namespace, ttl = undefined) => {
|
|
||||||
return standardCache(`violations:${namespace}`, ttl, violationFile);
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a session cache instance using Redis or in-memory store.
|
|
||||||
* @param {string} namespace - The session namespace.
|
|
||||||
* @param {number} [ttl] - Time to live for session entries.
|
|
||||||
* @returns {MemoryStore | ConnectRedis} Session store instance.
|
|
||||||
*/
|
|
||||||
const sessionCache = (namespace, ttl = undefined) => {
|
|
||||||
namespace = namespace.endsWith(':') ? namespace : `${namespace}:`;
|
|
||||||
if (!cacheConfig.USE_REDIS) return new MemoryStore({ ttl, checkPeriod: Time.ONE_DAY });
|
|
||||||
const store = new ConnectRedis({ client: ioredisClient, ttl, prefix: namespace });
|
|
||||||
if (ioredisClient) {
|
|
||||||
ioredisClient.on('error', (err) => {
|
|
||||||
logger.error(`Session store Redis error for namespace ${namespace}:`, err);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return store;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a rate limiter cache using Redis.
|
|
||||||
* @param {string} prefix - The key prefix for rate limiting.
|
|
||||||
* @returns {RedisStore|undefined} RedisStore instance or undefined if Redis is not used.
|
|
||||||
*/
|
|
||||||
const limiterCache = (prefix) => {
|
|
||||||
if (!prefix) throw new Error('prefix is required');
|
|
||||||
if (!cacheConfig.USE_REDIS) return undefined;
|
|
||||||
prefix = prefix.endsWith(':') ? prefix : `${prefix}:`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (!ioredisClient) {
|
|
||||||
logger.warn(`Redis client not available for rate limiter with prefix ${prefix}`);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
return new RedisStore({ sendCommand, prefix });
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(`Failed to create Redis rate limiter for prefix ${prefix}:`, err);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const sendCommand = (...args) => {
|
|
||||||
if (!ioredisClient) {
|
|
||||||
logger.warn('Redis client not available for command execution');
|
|
||||||
return Promise.reject(new Error('Redis client not available'));
|
|
||||||
}
|
|
||||||
|
|
||||||
return ioredisClient.call(...args).catch((err) => {
|
|
||||||
logger.error('Redis command execution failed:', err);
|
|
||||||
throw err;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = { standardCache, sessionCache, violationCache, limiterCache };
|
|
||||||
432
api/cache/cacheFactory.spec.js
vendored
432
api/cache/cacheFactory.spec.js
vendored
@@ -1,432 +0,0 @@
|
|||||||
const { Time } = require('librechat-data-provider');
|
|
||||||
|
|
||||||
// Mock dependencies first
|
|
||||||
const mockKeyvRedis = {
|
|
||||||
namespace: '',
|
|
||||||
keyPrefixSeparator: '',
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockKeyv = jest.fn().mockReturnValue({
|
|
||||||
mock: 'keyv',
|
|
||||||
on: jest.fn(),
|
|
||||||
});
|
|
||||||
const mockConnectRedis = jest.fn().mockReturnValue({ mock: 'connectRedis' });
|
|
||||||
const mockMemoryStore = jest.fn().mockReturnValue({ mock: 'memoryStore' });
|
|
||||||
const mockRedisStore = jest.fn().mockReturnValue({ mock: 'redisStore' });
|
|
||||||
|
|
||||||
const mockIoredisClient = {
|
|
||||||
call: jest.fn(),
|
|
||||||
on: jest.fn(),
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockKeyvRedisClient = {};
|
|
||||||
const mockViolationFile = {};
|
|
||||||
|
|
||||||
// Mock modules before requiring the main module
|
|
||||||
jest.mock('@keyv/redis', () => ({
|
|
||||||
default: jest.fn().mockImplementation(() => mockKeyvRedis),
|
|
||||||
}));
|
|
||||||
|
|
||||||
jest.mock('keyv', () => ({
|
|
||||||
Keyv: mockKeyv,
|
|
||||||
}));
|
|
||||||
|
|
||||||
jest.mock('./cacheConfig', () => ({
|
|
||||||
cacheConfig: {
|
|
||||||
USE_REDIS: false,
|
|
||||||
REDIS_KEY_PREFIX: 'test',
|
|
||||||
FORCED_IN_MEMORY_CACHE_NAMESPACES: [],
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
jest.mock('./redisClients', () => ({
|
|
||||||
keyvRedisClient: mockKeyvRedisClient,
|
|
||||||
ioredisClient: mockIoredisClient,
|
|
||||||
GLOBAL_PREFIX_SEPARATOR: '::',
|
|
||||||
}));
|
|
||||||
|
|
||||||
jest.mock('./keyvFiles', () => ({
|
|
||||||
violationFile: mockViolationFile,
|
|
||||||
}));
|
|
||||||
|
|
||||||
jest.mock('connect-redis', () => ({ RedisStore: mockConnectRedis }));
|
|
||||||
|
|
||||||
jest.mock('memorystore', () => jest.fn(() => mockMemoryStore));
|
|
||||||
|
|
||||||
jest.mock('rate-limit-redis', () => ({
|
|
||||||
RedisStore: mockRedisStore,
|
|
||||||
}));
|
|
||||||
|
|
||||||
jest.mock('@librechat/data-schemas', () => ({
|
|
||||||
logger: {
|
|
||||||
error: jest.fn(),
|
|
||||||
warn: jest.fn(),
|
|
||||||
info: jest.fn(),
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Import after mocking
|
|
||||||
const { standardCache, sessionCache, violationCache, limiterCache } = require('./cacheFactory');
|
|
||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
|
|
||||||
describe('cacheFactory', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
jest.clearAllMocks();
|
|
||||||
|
|
||||||
// Reset cache config mock
|
|
||||||
cacheConfig.USE_REDIS = false;
|
|
||||||
cacheConfig.REDIS_KEY_PREFIX = 'test';
|
|
||||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = [];
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('redisCache', () => {
|
|
||||||
it('should create Redis cache when USE_REDIS is true', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
const namespace = 'test-namespace';
|
|
||||||
const ttl = 3600;
|
|
||||||
|
|
||||||
standardCache(namespace, ttl);
|
|
||||||
|
|
||||||
expect(require('@keyv/redis').default).toHaveBeenCalledWith(mockKeyvRedisClient);
|
|
||||||
expect(mockKeyv).toHaveBeenCalledWith(mockKeyvRedis, { namespace, ttl });
|
|
||||||
expect(mockKeyvRedis.namespace).toBe(cacheConfig.REDIS_KEY_PREFIX);
|
|
||||||
expect(mockKeyvRedis.keyPrefixSeparator).toBe('::');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should create Redis cache with undefined ttl when not provided', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
const namespace = 'test-namespace';
|
|
||||||
|
|
||||||
standardCache(namespace);
|
|
||||||
|
|
||||||
expect(mockKeyv).toHaveBeenCalledWith(mockKeyvRedis, { namespace, ttl: undefined });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should use fallback store when USE_REDIS is false and fallbackStore is provided', () => {
|
|
||||||
cacheConfig.USE_REDIS = false;
|
|
||||||
const namespace = 'test-namespace';
|
|
||||||
const ttl = 3600;
|
|
||||||
const fallbackStore = { some: 'store' };
|
|
||||||
|
|
||||||
standardCache(namespace, ttl, fallbackStore);
|
|
||||||
|
|
||||||
expect(mockKeyv).toHaveBeenCalledWith({ store: fallbackStore, namespace, ttl });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should create default Keyv instance when USE_REDIS is false and no fallbackStore', () => {
|
|
||||||
cacheConfig.USE_REDIS = false;
|
|
||||||
const namespace = 'test-namespace';
|
|
||||||
const ttl = 3600;
|
|
||||||
|
|
||||||
standardCache(namespace, ttl);
|
|
||||||
|
|
||||||
expect(mockKeyv).toHaveBeenCalledWith({ namespace, ttl });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle namespace and ttl as undefined', () => {
|
|
||||||
cacheConfig.USE_REDIS = false;
|
|
||||||
|
|
||||||
standardCache();
|
|
||||||
|
|
||||||
expect(mockKeyv).toHaveBeenCalledWith({ namespace: undefined, ttl: undefined });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should use fallback when namespace is in FORCED_IN_MEMORY_CACHE_NAMESPACES', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = ['forced-memory'];
|
|
||||||
const namespace = 'forced-memory';
|
|
||||||
const ttl = 3600;
|
|
||||||
|
|
||||||
standardCache(namespace, ttl);
|
|
||||||
|
|
||||||
expect(require('@keyv/redis').default).not.toHaveBeenCalled();
|
|
||||||
expect(mockKeyv).toHaveBeenCalledWith({ namespace, ttl });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should use Redis when namespace is not in FORCED_IN_MEMORY_CACHE_NAMESPACES', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = ['other-namespace'];
|
|
||||||
const namespace = 'test-namespace';
|
|
||||||
const ttl = 3600;
|
|
||||||
|
|
||||||
standardCache(namespace, ttl);
|
|
||||||
|
|
||||||
expect(require('@keyv/redis').default).toHaveBeenCalledWith(mockKeyvRedisClient);
|
|
||||||
expect(mockKeyv).toHaveBeenCalledWith(mockKeyvRedis, { namespace, ttl });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should throw error when Redis cache creation fails', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
const namespace = 'test-namespace';
|
|
||||||
const ttl = 3600;
|
|
||||||
const testError = new Error('Redis connection failed');
|
|
||||||
|
|
||||||
const KeyvRedis = require('@keyv/redis').default;
|
|
||||||
KeyvRedis.mockImplementationOnce(() => {
|
|
||||||
throw testError;
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(() => standardCache(namespace, ttl)).toThrow('Redis connection failed');
|
|
||||||
|
|
||||||
const { logger } = require('@librechat/data-schemas');
|
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
|
||||||
`Failed to create Redis cache for namespace ${namespace}:`,
|
|
||||||
testError,
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(mockKeyv).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('violationCache', () => {
|
|
||||||
it('should create violation cache with prefixed namespace', () => {
|
|
||||||
const namespace = 'test-violations';
|
|
||||||
const ttl = 7200;
|
|
||||||
|
|
||||||
// We can't easily mock the internal redisCache call since it's in the same module
|
|
||||||
// But we can test that the function executes without throwing
|
|
||||||
expect(() => violationCache(namespace, ttl)).not.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should create violation cache with undefined ttl', () => {
|
|
||||||
const namespace = 'test-violations';
|
|
||||||
|
|
||||||
violationCache(namespace);
|
|
||||||
|
|
||||||
// The function should call redisCache with violations: prefixed namespace
|
|
||||||
// Since we can't easily mock the internal redisCache call, we test the behavior
|
|
||||||
expect(() => violationCache(namespace)).not.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle undefined namespace', () => {
|
|
||||||
expect(() => violationCache(undefined)).not.toThrow();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('sessionCache', () => {
|
|
||||||
it('should return MemoryStore when USE_REDIS is false', () => {
|
|
||||||
cacheConfig.USE_REDIS = false;
|
|
||||||
const namespace = 'sessions';
|
|
||||||
const ttl = 86400;
|
|
||||||
|
|
||||||
const result = sessionCache(namespace, ttl);
|
|
||||||
|
|
||||||
expect(mockMemoryStore).toHaveBeenCalledWith({ ttl, checkPeriod: Time.ONE_DAY });
|
|
||||||
expect(result).toBe(mockMemoryStore());
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return ConnectRedis when USE_REDIS is true', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
const namespace = 'sessions';
|
|
||||||
const ttl = 86400;
|
|
||||||
|
|
||||||
const result = sessionCache(namespace, ttl);
|
|
||||||
|
|
||||||
expect(mockConnectRedis).toHaveBeenCalledWith({
|
|
||||||
client: mockIoredisClient,
|
|
||||||
ttl,
|
|
||||||
prefix: `${namespace}:`,
|
|
||||||
});
|
|
||||||
expect(result).toBe(mockConnectRedis());
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should add colon to namespace if not present', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
const namespace = 'sessions';
|
|
||||||
|
|
||||||
sessionCache(namespace);
|
|
||||||
|
|
||||||
expect(mockConnectRedis).toHaveBeenCalledWith({
|
|
||||||
client: mockIoredisClient,
|
|
||||||
ttl: undefined,
|
|
||||||
prefix: 'sessions:',
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should not add colon to namespace if already present', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
const namespace = 'sessions:';
|
|
||||||
|
|
||||||
sessionCache(namespace);
|
|
||||||
|
|
||||||
expect(mockConnectRedis).toHaveBeenCalledWith({
|
|
||||||
client: mockIoredisClient,
|
|
||||||
ttl: undefined,
|
|
||||||
prefix: 'sessions:',
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle undefined ttl', () => {
|
|
||||||
cacheConfig.USE_REDIS = false;
|
|
||||||
const namespace = 'sessions';
|
|
||||||
|
|
||||||
sessionCache(namespace);
|
|
||||||
|
|
||||||
expect(mockMemoryStore).toHaveBeenCalledWith({
|
|
||||||
ttl: undefined,
|
|
||||||
checkPeriod: Time.ONE_DAY,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should throw error when ConnectRedis constructor fails', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
const namespace = 'sessions';
|
|
||||||
const ttl = 86400;
|
|
||||||
|
|
||||||
// Mock ConnectRedis to throw an error during construction
|
|
||||||
const redisError = new Error('Redis connection failed');
|
|
||||||
mockConnectRedis.mockImplementationOnce(() => {
|
|
||||||
throw redisError;
|
|
||||||
});
|
|
||||||
|
|
||||||
// The error should propagate up, not be caught
|
|
||||||
expect(() => sessionCache(namespace, ttl)).toThrow('Redis connection failed');
|
|
||||||
|
|
||||||
// Verify that MemoryStore was NOT used as fallback
|
|
||||||
expect(mockMemoryStore).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should register error handler but let errors propagate to Express', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
const namespace = 'sessions';
|
|
||||||
|
|
||||||
// Create a mock session store with middleware methods
|
|
||||||
const mockSessionStore = {
|
|
||||||
get: jest.fn(),
|
|
||||||
set: jest.fn(),
|
|
||||||
destroy: jest.fn(),
|
|
||||||
};
|
|
||||||
mockConnectRedis.mockReturnValue(mockSessionStore);
|
|
||||||
|
|
||||||
const store = sessionCache(namespace);
|
|
||||||
|
|
||||||
// Verify error handler was registered
|
|
||||||
expect(mockIoredisClient.on).toHaveBeenCalledWith('error', expect.any(Function));
|
|
||||||
|
|
||||||
// Get the error handler
|
|
||||||
const errorHandler = mockIoredisClient.on.mock.calls.find((call) => call[0] === 'error')[1];
|
|
||||||
|
|
||||||
// Simulate an error from Redis during a session operation
|
|
||||||
const redisError = new Error('Socket closed unexpectedly');
|
|
||||||
|
|
||||||
// The error handler should log but not swallow the error
|
|
||||||
const { logger } = require('@librechat/data-schemas');
|
|
||||||
errorHandler(redisError);
|
|
||||||
|
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
|
||||||
`Session store Redis error for namespace ${namespace}::`,
|
|
||||||
redisError,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Now simulate what happens when session middleware tries to use the store
|
|
||||||
const callback = jest.fn();
|
|
||||||
mockSessionStore.get.mockImplementation((sid, cb) => {
|
|
||||||
cb(new Error('Redis connection lost'));
|
|
||||||
});
|
|
||||||
|
|
||||||
// Call the store's get method (as Express session would)
|
|
||||||
store.get('test-session-id', callback);
|
|
||||||
|
|
||||||
// The error should be passed to the callback, not swallowed
|
|
||||||
expect(callback).toHaveBeenCalledWith(new Error('Redis connection lost'));
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle null ioredisClient gracefully', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
const namespace = 'sessions';
|
|
||||||
|
|
||||||
// Temporarily set ioredisClient to null (simulating connection not established)
|
|
||||||
const originalClient = require('./redisClients').ioredisClient;
|
|
||||||
require('./redisClients').ioredisClient = null;
|
|
||||||
|
|
||||||
// ConnectRedis might accept null client but would fail on first use
|
|
||||||
// The important thing is it doesn't throw uncaught exceptions during construction
|
|
||||||
const store = sessionCache(namespace);
|
|
||||||
expect(store).toBeDefined();
|
|
||||||
|
|
||||||
// Restore original client
|
|
||||||
require('./redisClients').ioredisClient = originalClient;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('limiterCache', () => {
|
|
||||||
it('should return undefined when USE_REDIS is false', () => {
|
|
||||||
cacheConfig.USE_REDIS = false;
|
|
||||||
const result = limiterCache('prefix');
|
|
||||||
|
|
||||||
expect(result).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return RedisStore when USE_REDIS is true', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
const result = limiterCache('rate-limit');
|
|
||||||
|
|
||||||
expect(mockRedisStore).toHaveBeenCalledWith({
|
|
||||||
sendCommand: expect.any(Function),
|
|
||||||
prefix: `rate-limit:`,
|
|
||||||
});
|
|
||||||
expect(result).toBe(mockRedisStore());
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should add colon to prefix if not present', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
limiterCache('rate-limit');
|
|
||||||
|
|
||||||
expect(mockRedisStore).toHaveBeenCalledWith({
|
|
||||||
sendCommand: expect.any(Function),
|
|
||||||
prefix: 'rate-limit:',
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should not add colon to prefix if already present', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
limiterCache('rate-limit:');
|
|
||||||
|
|
||||||
expect(mockRedisStore).toHaveBeenCalledWith({
|
|
||||||
sendCommand: expect.any(Function),
|
|
||||||
prefix: 'rate-limit:',
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should pass sendCommand function that calls ioredisClient.call', async () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
mockIoredisClient.call.mockResolvedValue('test-value');
|
|
||||||
|
|
||||||
limiterCache('rate-limit');
|
|
||||||
|
|
||||||
const sendCommandCall = mockRedisStore.mock.calls[0][0];
|
|
||||||
const sendCommand = sendCommandCall.sendCommand;
|
|
||||||
|
|
||||||
// Test that sendCommand properly delegates to ioredisClient.call
|
|
||||||
const args = ['GET', 'test-key'];
|
|
||||||
const result = await sendCommand(...args);
|
|
||||||
|
|
||||||
expect(mockIoredisClient.call).toHaveBeenCalledWith(...args);
|
|
||||||
expect(result).toBe('test-value');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle sendCommand errors properly', async () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
|
|
||||||
// Mock the call method to reject with an error
|
|
||||||
const testError = new Error('Redis error');
|
|
||||||
mockIoredisClient.call.mockRejectedValue(testError);
|
|
||||||
|
|
||||||
limiterCache('rate-limit');
|
|
||||||
|
|
||||||
const sendCommandCall = mockRedisStore.mock.calls[0][0];
|
|
||||||
const sendCommand = sendCommandCall.sendCommand;
|
|
||||||
|
|
||||||
// Test that sendCommand properly handles errors
|
|
||||||
const args = ['GET', 'test-key'];
|
|
||||||
|
|
||||||
await expect(sendCommand(...args)).rejects.toThrow('Redis error');
|
|
||||||
expect(mockIoredisClient.call).toHaveBeenCalledWith(...args);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle undefined prefix', () => {
|
|
||||||
cacheConfig.USE_REDIS = true;
|
|
||||||
expect(() => limiterCache()).toThrow('prefix is required');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
2
api/cache/clearPendingReq.js
vendored
2
api/cache/clearPendingReq.js
vendored
@@ -1,5 +1,5 @@
|
|||||||
|
const { isEnabled } = require('@librechat/api');
|
||||||
const { Time, CacheKeys } = require('librechat-data-provider');
|
const { Time, CacheKeys } = require('librechat-data-provider');
|
||||||
const { isEnabled } = require('~/server/utils');
|
|
||||||
const getLogStores = require('./getLogStores');
|
const getLogStores = require('./getLogStores');
|
||||||
|
|
||||||
const { USE_REDIS, LIMIT_CONCURRENT_MESSAGES } = process.env ?? {};
|
const { USE_REDIS, LIMIT_CONCURRENT_MESSAGES } = process.env ?? {};
|
||||||
|
|||||||
17
api/cache/getLogStores.js
vendored
17
api/cache/getLogStores.js
vendored
@@ -1,9 +1,13 @@
|
|||||||
const { cacheConfig } = require('./cacheConfig');
|
|
||||||
const { Keyv } = require('keyv');
|
const { Keyv } = require('keyv');
|
||||||
const { CacheKeys, ViolationTypes, Time } = require('librechat-data-provider');
|
const { Time, CacheKeys, ViolationTypes } = require('librechat-data-provider');
|
||||||
const { logFile } = require('./keyvFiles');
|
const {
|
||||||
const keyvMongo = require('./keyvMongo');
|
logFile,
|
||||||
const { standardCache, sessionCache, violationCache } = require('./cacheFactory');
|
keyvMongo,
|
||||||
|
cacheConfig,
|
||||||
|
sessionCache,
|
||||||
|
standardCache,
|
||||||
|
violationCache,
|
||||||
|
} = require('@librechat/api');
|
||||||
|
|
||||||
const namespaces = {
|
const namespaces = {
|
||||||
[ViolationTypes.GENERAL]: new Keyv({ store: logFile, namespace: 'violations' }),
|
[ViolationTypes.GENERAL]: new Keyv({ store: logFile, namespace: 'violations' }),
|
||||||
@@ -31,9 +35,8 @@ const namespaces = {
|
|||||||
[CacheKeys.SAML_SESSION]: sessionCache(CacheKeys.SAML_SESSION),
|
[CacheKeys.SAML_SESSION]: sessionCache(CacheKeys.SAML_SESSION),
|
||||||
|
|
||||||
[CacheKeys.ROLES]: standardCache(CacheKeys.ROLES),
|
[CacheKeys.ROLES]: standardCache(CacheKeys.ROLES),
|
||||||
[CacheKeys.MCP_TOOLS]: standardCache(CacheKeys.MCP_TOOLS),
|
[CacheKeys.APP_CONFIG]: standardCache(CacheKeys.APP_CONFIG),
|
||||||
[CacheKeys.CONFIG_STORE]: standardCache(CacheKeys.CONFIG_STORE),
|
[CacheKeys.CONFIG_STORE]: standardCache(CacheKeys.CONFIG_STORE),
|
||||||
[CacheKeys.STATIC_CONFIG]: standardCache(CacheKeys.STATIC_CONFIG),
|
|
||||||
[CacheKeys.PENDING_REQ]: standardCache(CacheKeys.PENDING_REQ),
|
[CacheKeys.PENDING_REQ]: standardCache(CacheKeys.PENDING_REQ),
|
||||||
[CacheKeys.ENCODED_DOMAINS]: new Keyv({ store: keyvMongo, namespace: CacheKeys.ENCODED_DOMAINS }),
|
[CacheKeys.ENCODED_DOMAINS]: new Keyv({ store: keyvMongo, namespace: CacheKeys.ENCODED_DOMAINS }),
|
||||||
[CacheKeys.ABORT_KEYS]: standardCache(CacheKeys.ABORT_KEYS, Time.TEN_MINUTES),
|
[CacheKeys.ABORT_KEYS]: standardCache(CacheKeys.ABORT_KEYS, Time.TEN_MINUTES),
|
||||||
|
|||||||
3
api/cache/index.js
vendored
3
api/cache/index.js
vendored
@@ -1,5 +1,4 @@
|
|||||||
const keyvFiles = require('./keyvFiles');
|
|
||||||
const getLogStores = require('./getLogStores');
|
const getLogStores = require('./getLogStores');
|
||||||
const logViolation = require('./logViolation');
|
const logViolation = require('./logViolation');
|
||||||
|
|
||||||
module.exports = { ...keyvFiles, getLogStores, logViolation };
|
module.exports = { getLogStores, logViolation };
|
||||||
|
|||||||
9
api/cache/keyvFiles.js
vendored
9
api/cache/keyvFiles.js
vendored
@@ -1,9 +0,0 @@
|
|||||||
const { KeyvFile } = require('keyv-file');
|
|
||||||
|
|
||||||
const logFile = new KeyvFile({ filename: './data/logs.json' }).setMaxListeners(20);
|
|
||||||
const violationFile = new KeyvFile({ filename: './data/violations.json' }).setMaxListeners(20);
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
logFile,
|
|
||||||
violationFile,
|
|
||||||
};
|
|
||||||
2
api/cache/logViolation.js
vendored
2
api/cache/logViolation.js
vendored
@@ -1,4 +1,4 @@
|
|||||||
const { isEnabled } = require('~/server/utils');
|
const { isEnabled } = require('@librechat/api');
|
||||||
const { ViolationTypes } = require('librechat-data-provider');
|
const { ViolationTypes } = require('librechat-data-provider');
|
||||||
const getLogStores = require('./getLogStores');
|
const getLogStores = require('./getLogStores');
|
||||||
const banViolation = require('./banViolation');
|
const banViolation = require('./banViolation');
|
||||||
|
|||||||
@@ -1,27 +1,13 @@
|
|||||||
const { EventSource } = require('eventsource');
|
const { EventSource } = require('eventsource');
|
||||||
const { Time } = require('librechat-data-provider');
|
const { Time } = require('librechat-data-provider');
|
||||||
const { MCPManager, FlowStateManager } = require('@librechat/api');
|
const { MCPManager, FlowStateManager, OAuthReconnectionManager } = require('@librechat/api');
|
||||||
const logger = require('./winston');
|
const logger = require('./winston');
|
||||||
|
|
||||||
global.EventSource = EventSource;
|
global.EventSource = EventSource;
|
||||||
|
|
||||||
/** @type {MCPManager} */
|
/** @type {MCPManager} */
|
||||||
let mcpManager = null;
|
|
||||||
let flowManager = null;
|
let flowManager = null;
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {string} [userId] - Optional user ID, to avoid disconnecting the current user.
|
|
||||||
* @returns {MCPManager}
|
|
||||||
*/
|
|
||||||
function getMCPManager(userId) {
|
|
||||||
if (!mcpManager) {
|
|
||||||
mcpManager = MCPManager.getInstance();
|
|
||||||
} else {
|
|
||||||
mcpManager.checkIdleConnections(userId);
|
|
||||||
}
|
|
||||||
return mcpManager;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Keyv} flowsCache
|
* @param {Keyv} flowsCache
|
||||||
* @returns {FlowStateManager}
|
* @returns {FlowStateManager}
|
||||||
@@ -37,6 +23,9 @@ function getFlowStateManager(flowsCache) {
|
|||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
logger,
|
logger,
|
||||||
getMCPManager,
|
createMCPManager: MCPManager.createInstance,
|
||||||
|
getMCPManager: MCPManager.getInstance,
|
||||||
getFlowStateManager,
|
getFlowStateManager,
|
||||||
|
createOAuthReconnectionManager: OAuthReconnectionManager.createInstance,
|
||||||
|
getOAuthReconnectionManager: OAuthReconnectionManager.getInstance,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,11 +1,34 @@
|
|||||||
require('dotenv').config();
|
require('dotenv').config();
|
||||||
|
const { isEnabled } = require('@librechat/api');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
|
||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const MONGO_URI = process.env.MONGO_URI;
|
const MONGO_URI = process.env.MONGO_URI;
|
||||||
|
|
||||||
if (!MONGO_URI) {
|
if (!MONGO_URI) {
|
||||||
throw new Error('Please define the MONGO_URI environment variable');
|
throw new Error('Please define the MONGO_URI environment variable');
|
||||||
}
|
}
|
||||||
|
/** The maximum number of connections in the connection pool. */
|
||||||
|
const maxPoolSize = parseInt(process.env.MONGO_MAX_POOL_SIZE) || undefined;
|
||||||
|
/** The minimum number of connections in the connection pool. */
|
||||||
|
const minPoolSize = parseInt(process.env.MONGO_MIN_POOL_SIZE) || undefined;
|
||||||
|
/** The maximum number of connections that may be in the process of being established concurrently by the connection pool. */
|
||||||
|
const maxConnecting = parseInt(process.env.MONGO_MAX_CONNECTING) || undefined;
|
||||||
|
/** The maximum number of milliseconds that a connection can remain idle in the pool before being removed and closed. */
|
||||||
|
const maxIdleTimeMS = parseInt(process.env.MONGO_MAX_IDLE_TIME_MS) || undefined;
|
||||||
|
/** The maximum time in milliseconds that a thread can wait for a connection to become available. */
|
||||||
|
const waitQueueTimeoutMS = parseInt(process.env.MONGO_WAIT_QUEUE_TIMEOUT_MS) || undefined;
|
||||||
|
/** Set to false to disable automatic index creation for all models associated with this connection. */
|
||||||
|
const autoIndex =
|
||||||
|
process.env.MONGO_AUTO_INDEX != undefined
|
||||||
|
? isEnabled(process.env.MONGO_AUTO_INDEX) || false
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
/** Set to `false` to disable Mongoose automatically calling `createCollection()` on every model created on this connection. */
|
||||||
|
const autoCreate =
|
||||||
|
process.env.MONGO_AUTO_CREATE != undefined
|
||||||
|
? isEnabled(process.env.MONGO_AUTO_CREATE) || false
|
||||||
|
: undefined;
|
||||||
/**
|
/**
|
||||||
* Global is used here to maintain a cached connection across hot reloads
|
* Global is used here to maintain a cached connection across hot reloads
|
||||||
* in development. This prevents connections growing exponentially
|
* in development. This prevents connections growing exponentially
|
||||||
@@ -26,13 +49,21 @@ async function connectDb() {
|
|||||||
if (!cached.promise || disconnected) {
|
if (!cached.promise || disconnected) {
|
||||||
const opts = {
|
const opts = {
|
||||||
bufferCommands: false,
|
bufferCommands: false,
|
||||||
|
...(maxPoolSize ? { maxPoolSize } : {}),
|
||||||
|
...(minPoolSize ? { minPoolSize } : {}),
|
||||||
|
...(maxConnecting ? { maxConnecting } : {}),
|
||||||
|
...(maxIdleTimeMS ? { maxIdleTimeMS } : {}),
|
||||||
|
...(waitQueueTimeoutMS ? { waitQueueTimeoutMS } : {}),
|
||||||
|
...(autoIndex != undefined ? { autoIndex } : {}),
|
||||||
|
...(autoCreate != undefined ? { autoCreate } : {}),
|
||||||
// useNewUrlParser: true,
|
// useNewUrlParser: true,
|
||||||
// useUnifiedTopology: true,
|
// useUnifiedTopology: true,
|
||||||
// bufferMaxEntries: 0,
|
// bufferMaxEntries: 0,
|
||||||
// useFindAndModify: true,
|
// useFindAndModify: true,
|
||||||
// useCreateIndex: true
|
// useCreateIndex: true
|
||||||
};
|
};
|
||||||
|
logger.info('Mongo Connection options');
|
||||||
|
logger.info(JSON.stringify(opts, null, 2));
|
||||||
mongoose.set('strictQuery', true);
|
mongoose.set('strictQuery', true);
|
||||||
cached.promise = mongoose.connect(MONGO_URI, opts).then((mongoose) => {
|
cached.promise = mongoose.connect(MONGO_URI, opts).then((mongoose) => {
|
||||||
return mongoose;
|
return mongoose;
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const { MeiliSearch } = require('meilisearch');
|
const { MeiliSearch } = require('meilisearch');
|
||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { FlowStateManager } = require('@librechat/api');
|
|
||||||
const { CacheKeys } = require('librechat-data-provider');
|
const { CacheKeys } = require('librechat-data-provider');
|
||||||
|
const { isEnabled, FlowStateManager } = require('@librechat/api');
|
||||||
const { isEnabled } = require('~/server/utils');
|
|
||||||
const { getLogStores } = require('~/cache');
|
const { getLogStores } = require('~/cache');
|
||||||
|
|
||||||
const Conversation = mongoose.models.Conversation;
|
const Conversation = mongoose.models.Conversation;
|
||||||
@@ -32,78 +30,264 @@ class MeiliSearchClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Performs the actual sync operations for messages and conversations
|
* Deletes documents from MeiliSearch index that are missing the user field
|
||||||
|
* @param {import('meilisearch').Index} index - MeiliSearch index instance
|
||||||
|
* @param {string} indexName - Name of the index for logging
|
||||||
|
* @returns {Promise<number>} - Number of documents deleted
|
||||||
*/
|
*/
|
||||||
async function performSync() {
|
async function deleteDocumentsWithoutUserField(index, indexName) {
|
||||||
const client = MeiliSearchClient.getInstance();
|
let deletedCount = 0;
|
||||||
|
let offset = 0;
|
||||||
|
const batchSize = 1000;
|
||||||
|
|
||||||
const { status } = await client.health();
|
try {
|
||||||
if (status !== 'available') {
|
while (true) {
|
||||||
throw new Error('Meilisearch not available');
|
const searchResult = await index.search('', {
|
||||||
}
|
limit: batchSize,
|
||||||
|
offset: offset,
|
||||||
|
});
|
||||||
|
|
||||||
if (indexingDisabled === true) {
|
if (searchResult.hits.length === 0) {
|
||||||
logger.info('[indexSync] Indexing is disabled, skipping...');
|
break;
|
||||||
return { messagesSync: false, convosSync: false };
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let messagesSync = false;
|
const idsToDelete = searchResult.hits.filter((hit) => !hit.user).map((hit) => hit.id);
|
||||||
let convosSync = false;
|
|
||||||
|
|
||||||
// Check if we need to sync messages
|
if (idsToDelete.length > 0) {
|
||||||
const messageProgress = await Message.getSyncProgress();
|
logger.info(
|
||||||
if (!messageProgress.isComplete) {
|
`[indexSync] Deleting ${idsToDelete.length} documents without user field from ${indexName} index`,
|
||||||
logger.info(
|
);
|
||||||
`[indexSync] Messages need syncing: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments} indexed`,
|
await index.deleteDocuments(idsToDelete);
|
||||||
);
|
deletedCount += idsToDelete.length;
|
||||||
|
}
|
||||||
|
|
||||||
// Check if we should do a full sync or incremental
|
if (searchResult.hits.length < batchSize) {
|
||||||
const messageCount = await Message.countDocuments();
|
break;
|
||||||
const messagesIndexed = messageProgress.totalProcessed;
|
}
|
||||||
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
|
|
||||||
|
|
||||||
if (messageCount - messagesIndexed > syncThreshold) {
|
offset += batchSize;
|
||||||
logger.info('[indexSync] Starting full message sync due to large difference');
|
|
||||||
await Message.syncWithMeili();
|
|
||||||
messagesSync = true;
|
|
||||||
} else if (messageCount !== messagesIndexed) {
|
|
||||||
logger.warn('[indexSync] Messages out of sync, performing incremental sync');
|
|
||||||
await Message.syncWithMeili();
|
|
||||||
messagesSync = true;
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
logger.info(
|
|
||||||
`[indexSync] Messages are fully synced: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if we need to sync conversations
|
if (deletedCount > 0) {
|
||||||
const convoProgress = await Conversation.getSyncProgress();
|
logger.info(`[indexSync] Deleted ${deletedCount} orphaned documents from ${indexName} index`);
|
||||||
if (!convoProgress.isComplete) {
|
|
||||||
logger.info(
|
|
||||||
`[indexSync] Conversations need syncing: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments} indexed`,
|
|
||||||
);
|
|
||||||
|
|
||||||
const convoCount = await Conversation.countDocuments();
|
|
||||||
const convosIndexed = convoProgress.totalProcessed;
|
|
||||||
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
|
|
||||||
|
|
||||||
if (convoCount - convosIndexed > syncThreshold) {
|
|
||||||
logger.info('[indexSync] Starting full conversation sync due to large difference');
|
|
||||||
await Conversation.syncWithMeili();
|
|
||||||
convosSync = true;
|
|
||||||
} else if (convoCount !== convosIndexed) {
|
|
||||||
logger.warn('[indexSync] Convos out of sync, performing incremental sync');
|
|
||||||
await Conversation.syncWithMeili();
|
|
||||||
convosSync = true;
|
|
||||||
}
|
}
|
||||||
} else {
|
} catch (error) {
|
||||||
logger.info(
|
logger.error(`[indexSync] Error deleting documents from ${indexName}:`, error);
|
||||||
`[indexSync] Conversations are fully synced: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments}`,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return { messagesSync, convosSync };
|
return deletedCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensures indexes have proper filterable attributes configured and checks if documents have user field
|
||||||
|
* @param {MeiliSearch} client - MeiliSearch client instance
|
||||||
|
* @returns {Promise<{settingsUpdated: boolean, orphanedDocsFound: boolean}>} - Status of what was done
|
||||||
|
*/
|
||||||
|
async function ensureFilterableAttributes(client) {
|
||||||
|
let settingsUpdated = false;
|
||||||
|
let hasOrphanedDocs = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check and update messages index
|
||||||
|
try {
|
||||||
|
const messagesIndex = client.index('messages');
|
||||||
|
const settings = await messagesIndex.getSettings();
|
||||||
|
|
||||||
|
if (!settings.filterableAttributes || !settings.filterableAttributes.includes('user')) {
|
||||||
|
logger.info('[indexSync] Configuring messages index to filter by user...');
|
||||||
|
await messagesIndex.updateSettings({
|
||||||
|
filterableAttributes: ['user'],
|
||||||
|
});
|
||||||
|
logger.info('[indexSync] Messages index configured for user filtering');
|
||||||
|
settingsUpdated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if existing documents have user field indexed
|
||||||
|
try {
|
||||||
|
const searchResult = await messagesIndex.search('', { limit: 1 });
|
||||||
|
if (searchResult.hits.length > 0 && !searchResult.hits[0].user) {
|
||||||
|
logger.info(
|
||||||
|
'[indexSync] Existing messages missing user field, will clean up orphaned documents...',
|
||||||
|
);
|
||||||
|
hasOrphanedDocs = true;
|
||||||
|
}
|
||||||
|
} catch (searchError) {
|
||||||
|
logger.debug('[indexSync] Could not check message documents:', searchError.message);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error.code !== 'index_not_found') {
|
||||||
|
logger.warn('[indexSync] Could not check/update messages index settings:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check and update conversations index
|
||||||
|
try {
|
||||||
|
const convosIndex = client.index('convos');
|
||||||
|
const settings = await convosIndex.getSettings();
|
||||||
|
|
||||||
|
if (!settings.filterableAttributes || !settings.filterableAttributes.includes('user')) {
|
||||||
|
logger.info('[indexSync] Configuring convos index to filter by user...');
|
||||||
|
await convosIndex.updateSettings({
|
||||||
|
filterableAttributes: ['user'],
|
||||||
|
});
|
||||||
|
logger.info('[indexSync] Convos index configured for user filtering');
|
||||||
|
settingsUpdated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if existing documents have user field indexed
|
||||||
|
try {
|
||||||
|
const searchResult = await convosIndex.search('', { limit: 1 });
|
||||||
|
if (searchResult.hits.length > 0 && !searchResult.hits[0].user) {
|
||||||
|
logger.info(
|
||||||
|
'[indexSync] Existing conversations missing user field, will clean up orphaned documents...',
|
||||||
|
);
|
||||||
|
hasOrphanedDocs = true;
|
||||||
|
}
|
||||||
|
} catch (searchError) {
|
||||||
|
logger.debug('[indexSync] Could not check conversation documents:', searchError.message);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error.code !== 'index_not_found') {
|
||||||
|
logger.warn('[indexSync] Could not check/update convos index settings:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If either index has orphaned documents, clean them up (but don't force resync)
|
||||||
|
if (hasOrphanedDocs) {
|
||||||
|
try {
|
||||||
|
const messagesIndex = client.index('messages');
|
||||||
|
await deleteDocumentsWithoutUserField(messagesIndex, 'messages');
|
||||||
|
} catch (error) {
|
||||||
|
logger.debug('[indexSync] Could not clean up messages:', error.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const convosIndex = client.index('convos');
|
||||||
|
await deleteDocumentsWithoutUserField(convosIndex, 'convos');
|
||||||
|
} catch (error) {
|
||||||
|
logger.debug('[indexSync] Could not clean up convos:', error.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('[indexSync] Orphaned documents cleaned up without forcing resync.');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (settingsUpdated) {
|
||||||
|
logger.info('[indexSync] Index settings updated. Full re-sync will be triggered.');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[indexSync] Error ensuring filterable attributes:', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { settingsUpdated, orphanedDocsFound: hasOrphanedDocs };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Performs the actual sync operations for messages and conversations
|
||||||
|
* @param {FlowStateManager} flowManager - Flow state manager instance
|
||||||
|
* @param {string} flowId - Flow identifier
|
||||||
|
* @param {string} flowType - Flow type
|
||||||
|
*/
|
||||||
|
async function performSync(flowManager, flowId, flowType) {
|
||||||
|
try {
|
||||||
|
const client = MeiliSearchClient.getInstance();
|
||||||
|
|
||||||
|
const { status } = await client.health();
|
||||||
|
if (status !== 'available') {
|
||||||
|
throw new Error('Meilisearch not available');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (indexingDisabled === true) {
|
||||||
|
logger.info('[indexSync] Indexing is disabled, skipping...');
|
||||||
|
return { messagesSync: false, convosSync: false };
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Ensures indexes have proper filterable attributes configured */
|
||||||
|
const { settingsUpdated, orphanedDocsFound: _orphanedDocsFound } =
|
||||||
|
await ensureFilterableAttributes(client);
|
||||||
|
|
||||||
|
let messagesSync = false;
|
||||||
|
let convosSync = false;
|
||||||
|
|
||||||
|
// Only reset flags if settings were actually updated (not just for orphaned doc cleanup)
|
||||||
|
if (settingsUpdated) {
|
||||||
|
logger.info(
|
||||||
|
'[indexSync] Settings updated. Forcing full re-sync to reindex with new configuration...',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Reset sync flags to force full re-sync
|
||||||
|
await Message.collection.updateMany({ _meiliIndex: true }, { $set: { _meiliIndex: false } });
|
||||||
|
await Conversation.collection.updateMany(
|
||||||
|
{ _meiliIndex: true },
|
||||||
|
{ $set: { _meiliIndex: false } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we need to sync messages
|
||||||
|
const messageProgress = await Message.getSyncProgress();
|
||||||
|
if (!messageProgress.isComplete || settingsUpdated) {
|
||||||
|
logger.info(
|
||||||
|
`[indexSync] Messages need syncing: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments} indexed`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if we should do a full sync or incremental
|
||||||
|
const messageCount = await Message.countDocuments();
|
||||||
|
const messagesIndexed = messageProgress.totalProcessed;
|
||||||
|
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
|
||||||
|
|
||||||
|
if (messageCount - messagesIndexed > syncThreshold) {
|
||||||
|
logger.info('[indexSync] Starting full message sync due to large difference');
|
||||||
|
await Message.syncWithMeili();
|
||||||
|
messagesSync = true;
|
||||||
|
} else if (messageCount !== messagesIndexed) {
|
||||||
|
logger.warn('[indexSync] Messages out of sync, performing incremental sync');
|
||||||
|
await Message.syncWithMeili();
|
||||||
|
messagesSync = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info(
|
||||||
|
`[indexSync] Messages are fully synced: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we need to sync conversations
|
||||||
|
const convoProgress = await Conversation.getSyncProgress();
|
||||||
|
if (!convoProgress.isComplete || settingsUpdated) {
|
||||||
|
logger.info(
|
||||||
|
`[indexSync] Conversations need syncing: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments} indexed`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const convoCount = await Conversation.countDocuments();
|
||||||
|
const convosIndexed = convoProgress.totalProcessed;
|
||||||
|
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
|
||||||
|
|
||||||
|
if (convoCount - convosIndexed > syncThreshold) {
|
||||||
|
logger.info('[indexSync] Starting full conversation sync due to large difference');
|
||||||
|
await Conversation.syncWithMeili();
|
||||||
|
convosSync = true;
|
||||||
|
} else if (convoCount !== convosIndexed) {
|
||||||
|
logger.warn('[indexSync] Convos out of sync, performing incremental sync');
|
||||||
|
await Conversation.syncWithMeili();
|
||||||
|
convosSync = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info(
|
||||||
|
`[indexSync] Conversations are fully synced: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { messagesSync, convosSync };
|
||||||
|
} finally {
|
||||||
|
if (indexingDisabled === true) {
|
||||||
|
logger.info('[indexSync] Indexing is disabled, skipping cleanup...');
|
||||||
|
} else if (flowManager && flowId && flowType) {
|
||||||
|
try {
|
||||||
|
await flowManager.deleteFlow(flowId, flowType);
|
||||||
|
logger.debug('[indexSync] Flow state cleaned up');
|
||||||
|
} catch (cleanupErr) {
|
||||||
|
logger.debug('[indexSync] Could not clean up flow state:', cleanupErr.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -116,24 +300,26 @@ async function indexSync() {
|
|||||||
|
|
||||||
logger.info('[indexSync] Starting index synchronization check...');
|
logger.info('[indexSync] Starting index synchronization check...');
|
||||||
|
|
||||||
|
// Get or create FlowStateManager instance
|
||||||
|
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||||
|
if (!flowsCache) {
|
||||||
|
logger.warn('[indexSync] Flows cache not available, falling back to direct sync');
|
||||||
|
return await performSync(null, null, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
const flowManager = new FlowStateManager(flowsCache, {
|
||||||
|
ttl: 60000 * 10, // 10 minutes TTL for sync operations
|
||||||
|
});
|
||||||
|
|
||||||
|
// Use a unique flow ID for the sync operation
|
||||||
|
const flowId = 'meili-index-sync';
|
||||||
|
const flowType = 'MEILI_SYNC';
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get or create FlowStateManager instance
|
|
||||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
|
||||||
if (!flowsCache) {
|
|
||||||
logger.warn('[indexSync] Flows cache not available, falling back to direct sync');
|
|
||||||
return await performSync();
|
|
||||||
}
|
|
||||||
|
|
||||||
const flowManager = new FlowStateManager(flowsCache, {
|
|
||||||
ttl: 60000 * 10, // 10 minutes TTL for sync operations
|
|
||||||
});
|
|
||||||
|
|
||||||
// Use a unique flow ID for the sync operation
|
|
||||||
const flowId = 'meili-index-sync';
|
|
||||||
const flowType = 'MEILI_SYNC';
|
|
||||||
|
|
||||||
// This will only execute the handler if no other instance is running the sync
|
// This will only execute the handler if no other instance is running the sync
|
||||||
const result = await flowManager.createFlowWithHandler(flowId, flowType, performSync);
|
const result = await flowManager.createFlowWithHandler(flowId, flowType, () =>
|
||||||
|
performSync(flowManager, flowId, flowType),
|
||||||
|
);
|
||||||
|
|
||||||
if (result.messagesSync || result.convosSync) {
|
if (result.messagesSync || result.convosSync) {
|
||||||
logger.info('[indexSync] Sync completed successfully');
|
logger.info('[indexSync] Sync completed successfully');
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module.exports = {
|
|||||||
clearMocks: true,
|
clearMocks: true,
|
||||||
roots: ['<rootDir>'],
|
roots: ['<rootDir>'],
|
||||||
coverageDirectory: 'coverage',
|
coverageDirectory: 'coverage',
|
||||||
|
testTimeout: 30000, // 30 seconds timeout for all tests
|
||||||
setupFiles: [
|
setupFiles: [
|
||||||
'./test/jestSetup.js',
|
'./test/jestSetup.js',
|
||||||
'./test/__mocks__/logger.js',
|
'./test/__mocks__/logger.js',
|
||||||
|
|||||||
@@ -1,18 +1,17 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const crypto = require('node:crypto');
|
const crypto = require('node:crypto');
|
||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
|
const { ResourceType, SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
|
||||||
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_delimiter } =
|
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_all, mcp_delimiter } =
|
||||||
require('librechat-data-provider').Constants;
|
require('librechat-data-provider').Constants;
|
||||||
const { CONFIG_STORE, STARTUP_CONFIG } = require('librechat-data-provider').CacheKeys;
|
|
||||||
const {
|
const {
|
||||||
getProjectByName,
|
|
||||||
addAgentIdsToProject,
|
|
||||||
removeAgentIdsFromProject,
|
|
||||||
removeAgentFromAllProjects,
|
removeAgentFromAllProjects,
|
||||||
|
removeAgentIdsFromProject,
|
||||||
|
addAgentIdsToProject,
|
||||||
|
getProjectByName,
|
||||||
} = require('./Project');
|
} = require('./Project');
|
||||||
const { getCachedTools } = require('~/server/services/Config');
|
const { removeAllPermissions } = require('~/server/services/PermissionService');
|
||||||
const getLogStores = require('~/cache/getLogStores');
|
const { getMCPServerTools } = require('~/server/services/Config');
|
||||||
const { getActions } = require('./Action');
|
const { getActions } = require('./Action');
|
||||||
const { Agent } = require('~/db/models');
|
const { Agent } = require('~/db/models');
|
||||||
|
|
||||||
@@ -23,7 +22,7 @@ const { Agent } = require('~/db/models');
|
|||||||
* @throws {Error} If the agent creation fails.
|
* @throws {Error} If the agent creation fails.
|
||||||
*/
|
*/
|
||||||
const createAgent = async (agentData) => {
|
const createAgent = async (agentData) => {
|
||||||
const { author, ...versionData } = agentData;
|
const { author: _author, ...versionData } = agentData;
|
||||||
const timestamp = new Date();
|
const timestamp = new Date();
|
||||||
const initialAgentData = {
|
const initialAgentData = {
|
||||||
...agentData,
|
...agentData,
|
||||||
@@ -34,7 +33,9 @@ const createAgent = async (agentData) => {
|
|||||||
updatedAt: timestamp,
|
updatedAt: timestamp,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
category: agentData.category || 'general',
|
||||||
};
|
};
|
||||||
|
|
||||||
return (await Agent.create(initialAgentData)).toObject();
|
return (await Agent.create(initialAgentData)).toObject();
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -48,6 +49,14 @@ const createAgent = async (agentData) => {
|
|||||||
*/
|
*/
|
||||||
const getAgent = async (searchParameter) => await Agent.findOne(searchParameter).lean();
|
const getAgent = async (searchParameter) => await Agent.findOne(searchParameter).lean();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get multiple agent documents based on the provided search parameters.
|
||||||
|
*
|
||||||
|
* @param {Object} searchParameter - The search parameters to find agents.
|
||||||
|
* @returns {Promise<Agent[]>} Array of agent documents as plain objects.
|
||||||
|
*/
|
||||||
|
const getAgents = async (searchParameter) => await Agent.find(searchParameter).lean();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load an agent based on the provided ID
|
* Load an agent based on the provided ID
|
||||||
*
|
*
|
||||||
@@ -60,8 +69,6 @@ const getAgent = async (searchParameter) => await Agent.findOne(searchParameter)
|
|||||||
*/
|
*/
|
||||||
const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _m }) => {
|
const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _m }) => {
|
||||||
const { model, ...model_parameters } = _m;
|
const { model, ...model_parameters } = _m;
|
||||||
/** @type {Record<string, FunctionTool>} */
|
|
||||||
const availableTools = await getCachedTools({ userId: req.user.id, includeGlobal: true });
|
|
||||||
/** @type {TEphemeralAgent | null} */
|
/** @type {TEphemeralAgent | null} */
|
||||||
const ephemeralAgent = req.body.ephemeralAgent;
|
const ephemeralAgent = req.body.ephemeralAgent;
|
||||||
const mcpServers = new Set(ephemeralAgent?.mcp);
|
const mcpServers = new Set(ephemeralAgent?.mcp);
|
||||||
@@ -77,15 +84,20 @@ const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _
|
|||||||
tools.push(Tools.web_search);
|
tools.push(Tools.web_search);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const addedServers = new Set();
|
||||||
if (mcpServers.size > 0) {
|
if (mcpServers.size > 0) {
|
||||||
for (const toolName of Object.keys(availableTools)) {
|
for (const mcpServer of mcpServers) {
|
||||||
if (!toolName.includes(mcp_delimiter)) {
|
if (addedServers.has(mcpServer)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const mcpServer = toolName.split(mcp_delimiter)?.[1];
|
const serverTools = await getMCPServerTools(mcpServer);
|
||||||
if (mcpServer && mcpServers.has(mcpServer)) {
|
if (!serverTools) {
|
||||||
tools.push(toolName);
|
tools.push(`${mcp_all}${mcp_delimiter}${mcpServer}`);
|
||||||
|
addedServers.add(mcpServer);
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
|
tools.push(...Object.keys(serverTools));
|
||||||
|
addedServers.add(mcpServer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -131,29 +143,7 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
agent.version = agent.versions ? agent.versions.length : 0;
|
agent.version = agent.versions ? agent.versions.length : 0;
|
||||||
|
return agent;
|
||||||
if (agent.author.toString() === req.user.id) {
|
|
||||||
return agent;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!agent.projectIds) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const cache = getLogStores(CONFIG_STORE);
|
|
||||||
/** @type {TStartupConfig} */
|
|
||||||
const cachedStartupConfig = await cache.get(STARTUP_CONFIG);
|
|
||||||
let { instanceProjectId } = cachedStartupConfig ?? {};
|
|
||||||
if (!instanceProjectId) {
|
|
||||||
instanceProjectId = (await getProjectByName(GLOBAL_PROJECT_NAME, '_id'))._id.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const projectObjectId of agent.projectIds) {
|
|
||||||
const projectId = projectObjectId.toString();
|
|
||||||
if (projectId === instanceProjectId) {
|
|
||||||
return agent;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -183,7 +173,7 @@ const isDuplicateVersion = (updateData, currentData, versions, actionsHash = nul
|
|||||||
'actionsHash', // Exclude actionsHash from direct comparison
|
'actionsHash', // Exclude actionsHash from direct comparison
|
||||||
];
|
];
|
||||||
|
|
||||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
const { $push: _$push, $pull: _$pull, $addToSet: _$addToSet, ...directUpdates } = updateData;
|
||||||
|
|
||||||
if (Object.keys(directUpdates).length === 0 && !actionsHash) {
|
if (Object.keys(directUpdates).length === 0 && !actionsHash) {
|
||||||
return null;
|
return null;
|
||||||
@@ -202,54 +192,116 @@ const isDuplicateVersion = (updateData, currentData, versions, actionsHash = nul
|
|||||||
|
|
||||||
let isMatch = true;
|
let isMatch = true;
|
||||||
for (const field of importantFields) {
|
for (const field of importantFields) {
|
||||||
if (!wouldBeVersion[field] && !lastVersion[field]) {
|
const wouldBeValue = wouldBeVersion[field];
|
||||||
|
const lastVersionValue = lastVersion[field];
|
||||||
|
|
||||||
|
// Skip if both are undefined/null
|
||||||
|
if (!wouldBeValue && !lastVersionValue) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Array.isArray(wouldBeVersion[field]) && Array.isArray(lastVersion[field])) {
|
// Handle arrays
|
||||||
if (wouldBeVersion[field].length !== lastVersion[field].length) {
|
if (Array.isArray(wouldBeValue) || Array.isArray(lastVersionValue)) {
|
||||||
|
// Normalize: treat undefined/null as empty array for comparison
|
||||||
|
let wouldBeArr;
|
||||||
|
if (Array.isArray(wouldBeValue)) {
|
||||||
|
wouldBeArr = wouldBeValue;
|
||||||
|
} else if (wouldBeValue == null) {
|
||||||
|
wouldBeArr = [];
|
||||||
|
} else {
|
||||||
|
wouldBeArr = [wouldBeValue];
|
||||||
|
}
|
||||||
|
|
||||||
|
let lastVersionArr;
|
||||||
|
if (Array.isArray(lastVersionValue)) {
|
||||||
|
lastVersionArr = lastVersionValue;
|
||||||
|
} else if (lastVersionValue == null) {
|
||||||
|
lastVersionArr = [];
|
||||||
|
} else {
|
||||||
|
lastVersionArr = [lastVersionValue];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (wouldBeArr.length !== lastVersionArr.length) {
|
||||||
isMatch = false;
|
isMatch = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Special handling for projectIds (MongoDB ObjectIds)
|
// Special handling for projectIds (MongoDB ObjectIds)
|
||||||
if (field === 'projectIds') {
|
if (field === 'projectIds') {
|
||||||
const wouldBeIds = wouldBeVersion[field].map((id) => id.toString()).sort();
|
const wouldBeIds = wouldBeArr.map((id) => id.toString()).sort();
|
||||||
const versionIds = lastVersion[field].map((id) => id.toString()).sort();
|
const versionIds = lastVersionArr.map((id) => id.toString()).sort();
|
||||||
|
|
||||||
if (!wouldBeIds.every((id, i) => id === versionIds[i])) {
|
if (!wouldBeIds.every((id, i) => id === versionIds[i])) {
|
||||||
isMatch = false;
|
isMatch = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Handle arrays of objects like tool_kwargs
|
// Handle arrays of objects
|
||||||
else if (typeof wouldBeVersion[field][0] === 'object' && wouldBeVersion[field][0] !== null) {
|
else if (
|
||||||
const sortedWouldBe = [...wouldBeVersion[field]].map((item) => JSON.stringify(item)).sort();
|
wouldBeArr.length > 0 &&
|
||||||
const sortedVersion = [...lastVersion[field]].map((item) => JSON.stringify(item)).sort();
|
typeof wouldBeArr[0] === 'object' &&
|
||||||
|
wouldBeArr[0] !== null
|
||||||
|
) {
|
||||||
|
const sortedWouldBe = [...wouldBeArr].map((item) => JSON.stringify(item)).sort();
|
||||||
|
const sortedVersion = [...lastVersionArr].map((item) => JSON.stringify(item)).sort();
|
||||||
|
|
||||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||||
isMatch = false;
|
isMatch = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const sortedWouldBe = [...wouldBeVersion[field]].sort();
|
const sortedWouldBe = [...wouldBeArr].sort();
|
||||||
const sortedVersion = [...lastVersion[field]].sort();
|
const sortedVersion = [...lastVersionArr].sort();
|
||||||
|
|
||||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||||
isMatch = false;
|
isMatch = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (field === 'model_parameters') {
|
}
|
||||||
const wouldBeParams = wouldBeVersion[field] || {};
|
// Handle objects
|
||||||
const lastVersionParams = lastVersion[field] || {};
|
else if (typeof wouldBeValue === 'object' && wouldBeValue !== null) {
|
||||||
if (JSON.stringify(wouldBeParams) !== JSON.stringify(lastVersionParams)) {
|
const lastVersionObj =
|
||||||
|
typeof lastVersionValue === 'object' && lastVersionValue !== null ? lastVersionValue : {};
|
||||||
|
|
||||||
|
// For empty objects, normalize the comparison
|
||||||
|
const wouldBeKeys = Object.keys(wouldBeValue);
|
||||||
|
const lastVersionKeys = Object.keys(lastVersionObj);
|
||||||
|
|
||||||
|
// If both are empty objects, they're equal
|
||||||
|
if (wouldBeKeys.length === 0 && lastVersionKeys.length === 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise do a deep comparison
|
||||||
|
if (JSON.stringify(wouldBeValue) !== JSON.stringify(lastVersionObj)) {
|
||||||
|
isMatch = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Handle primitive values
|
||||||
|
else {
|
||||||
|
// For primitives, handle the case where one is undefined and the other is a default value
|
||||||
|
if (wouldBeValue !== lastVersionValue) {
|
||||||
|
// Special handling for boolean false vs undefined
|
||||||
|
if (
|
||||||
|
typeof wouldBeValue === 'boolean' &&
|
||||||
|
wouldBeValue === false &&
|
||||||
|
lastVersionValue === undefined
|
||||||
|
) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// Special handling for empty string vs undefined
|
||||||
|
if (
|
||||||
|
typeof wouldBeValue === 'string' &&
|
||||||
|
wouldBeValue === '' &&
|
||||||
|
lastVersionValue === undefined
|
||||||
|
) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
isMatch = false;
|
isMatch = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} else if (wouldBeVersion[field] !== lastVersion[field]) {
|
|
||||||
isMatch = false;
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -278,7 +330,14 @@ const updateAgent = async (searchParameter, updateData, options = {}) => {
|
|||||||
|
|
||||||
const currentAgent = await Agent.findOne(searchParameter);
|
const currentAgent = await Agent.findOne(searchParameter);
|
||||||
if (currentAgent) {
|
if (currentAgent) {
|
||||||
const { __v, _id, id, versions, author, ...versionData } = currentAgent.toObject();
|
const {
|
||||||
|
__v,
|
||||||
|
_id,
|
||||||
|
id: __id,
|
||||||
|
versions,
|
||||||
|
author: _author,
|
||||||
|
...versionData
|
||||||
|
} = currentAgent.toObject();
|
||||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||||
|
|
||||||
let actionsHash = null;
|
let actionsHash = null;
|
||||||
@@ -458,12 +517,117 @@ const deleteAgent = async (searchParameter) => {
|
|||||||
const agent = await Agent.findOneAndDelete(searchParameter);
|
const agent = await Agent.findOneAndDelete(searchParameter);
|
||||||
if (agent) {
|
if (agent) {
|
||||||
await removeAgentFromAllProjects(agent.id);
|
await removeAgentFromAllProjects(agent.id);
|
||||||
|
await removeAllPermissions({
|
||||||
|
resourceType: ResourceType.AGENT,
|
||||||
|
resourceId: agent._id,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return agent;
|
return agent;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get agents by accessible IDs with optional cursor-based pagination.
|
||||||
|
* @param {Object} params - The parameters for getting accessible agents.
|
||||||
|
* @param {Array} [params.accessibleIds] - Array of agent ObjectIds the user has ACL access to.
|
||||||
|
* @param {Object} [params.otherParams] - Additional query parameters (including author filter).
|
||||||
|
* @param {number} [params.limit] - Number of agents to return (max 100). If not provided, returns all agents.
|
||||||
|
* @param {string} [params.after] - Cursor for pagination - get agents after this cursor. // base64 encoded JSON string with updatedAt and _id.
|
||||||
|
* @returns {Promise<Object>} A promise that resolves to an object containing the agents data and pagination info.
|
||||||
|
*/
|
||||||
|
const getListAgentsByAccess = async ({
|
||||||
|
accessibleIds = [],
|
||||||
|
otherParams = {},
|
||||||
|
limit = null,
|
||||||
|
after = null,
|
||||||
|
}) => {
|
||||||
|
const isPaginated = limit !== null && limit !== undefined;
|
||||||
|
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
|
||||||
|
|
||||||
|
// Build base query combining ACL accessible agents with other filters
|
||||||
|
const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
|
||||||
|
|
||||||
|
// Add cursor condition
|
||||||
|
if (after) {
|
||||||
|
try {
|
||||||
|
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
|
||||||
|
const { updatedAt, _id } = cursor;
|
||||||
|
|
||||||
|
const cursorCondition = {
|
||||||
|
$or: [
|
||||||
|
{ updatedAt: { $lt: new Date(updatedAt) } },
|
||||||
|
{ updatedAt: new Date(updatedAt), _id: { $gt: new mongoose.Types.ObjectId(_id) } },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Merge cursor condition with base query
|
||||||
|
if (Object.keys(baseQuery).length > 0) {
|
||||||
|
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
|
||||||
|
// Remove the original conditions from baseQuery to avoid duplication
|
||||||
|
Object.keys(baseQuery).forEach((key) => {
|
||||||
|
if (key !== '$and') delete baseQuery[key];
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
Object.assign(baseQuery, cursorCondition);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Invalid cursor:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let query = Agent.find(baseQuery, {
|
||||||
|
id: 1,
|
||||||
|
_id: 1,
|
||||||
|
name: 1,
|
||||||
|
avatar: 1,
|
||||||
|
author: 1,
|
||||||
|
projectIds: 1,
|
||||||
|
description: 1,
|
||||||
|
updatedAt: 1,
|
||||||
|
category: 1,
|
||||||
|
support_contact: 1,
|
||||||
|
is_promoted: 1,
|
||||||
|
}).sort({ updatedAt: -1, _id: 1 });
|
||||||
|
|
||||||
|
// Only apply limit if pagination is requested
|
||||||
|
if (isPaginated) {
|
||||||
|
query = query.limit(normalizedLimit + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const agents = await query.lean();
|
||||||
|
|
||||||
|
const hasMore = isPaginated ? agents.length > normalizedLimit : false;
|
||||||
|
const data = (isPaginated ? agents.slice(0, normalizedLimit) : agents).map((agent) => {
|
||||||
|
if (agent.author) {
|
||||||
|
agent.author = agent.author.toString();
|
||||||
|
}
|
||||||
|
return agent;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate next cursor only if paginated
|
||||||
|
let nextCursor = null;
|
||||||
|
if (isPaginated && hasMore && data.length > 0) {
|
||||||
|
const lastAgent = agents[normalizedLimit - 1];
|
||||||
|
nextCursor = Buffer.from(
|
||||||
|
JSON.stringify({
|
||||||
|
updatedAt: lastAgent.updatedAt.toISOString(),
|
||||||
|
_id: lastAgent._id.toString(),
|
||||||
|
}),
|
||||||
|
).toString('base64');
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
object: 'list',
|
||||||
|
data,
|
||||||
|
first_id: data.length > 0 ? data[0].id : null,
|
||||||
|
last_id: data.length > 0 ? data[data.length - 1].id : null,
|
||||||
|
has_more: hasMore,
|
||||||
|
after: nextCursor,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all agents.
|
* Get all agents.
|
||||||
|
* @deprecated Use getListAgentsByAccess for ACL-aware agent listing
|
||||||
* @param {Object} searchParameter - The search parameters to find matching agents.
|
* @param {Object} searchParameter - The search parameters to find matching agents.
|
||||||
* @param {string} searchParameter.author - The user ID of the agent's author.
|
* @param {string} searchParameter.author - The user ID of the agent's author.
|
||||||
* @returns {Promise<Object>} A promise that resolves to an object containing the agents data and pagination info.
|
* @returns {Promise<Object>} A promise that resolves to an object containing the agents data and pagination info.
|
||||||
@@ -482,13 +646,15 @@ const getListAgents = async (searchParameter) => {
|
|||||||
const agents = (
|
const agents = (
|
||||||
await Agent.find(query, {
|
await Agent.find(query, {
|
||||||
id: 1,
|
id: 1,
|
||||||
_id: 0,
|
_id: 1,
|
||||||
name: 1,
|
name: 1,
|
||||||
avatar: 1,
|
avatar: 1,
|
||||||
author: 1,
|
author: 1,
|
||||||
projectIds: 1,
|
projectIds: 1,
|
||||||
description: 1,
|
description: 1,
|
||||||
|
// @deprecated - isCollaborative replaced by ACL permissions
|
||||||
isCollaborative: 1,
|
isCollaborative: 1,
|
||||||
|
category: 1,
|
||||||
}).lean()
|
}).lean()
|
||||||
).map((agent) => {
|
).map((agent) => {
|
||||||
if (agent.author?.toString() !== author) {
|
if (agent.author?.toString() !== author) {
|
||||||
@@ -517,7 +683,7 @@ const getListAgents = async (searchParameter) => {
|
|||||||
* This function also updates the corresponding projects to include or exclude the agent ID.
|
* This function also updates the corresponding projects to include or exclude the agent ID.
|
||||||
*
|
*
|
||||||
* @param {Object} params - Parameters for updating the agent's projects.
|
* @param {Object} params - Parameters for updating the agent's projects.
|
||||||
* @param {MongoUser} params.user - Parameters for updating the agent's projects.
|
* @param {IUser} params.user - Parameters for updating the agent's projects.
|
||||||
* @param {string} params.agentId - The ID of the agent to update.
|
* @param {string} params.agentId - The ID of the agent to update.
|
||||||
* @param {string[]} [params.projectIds] - Array of project IDs to add to the agent.
|
* @param {string[]} [params.projectIds] - Array of project IDs to add to the agent.
|
||||||
* @param {string[]} [params.removeProjectIds] - Array of project IDs to remove from the agent.
|
* @param {string[]} [params.removeProjectIds] - Array of project IDs to remove from the agent.
|
||||||
@@ -654,6 +820,14 @@ const generateActionMetadataHash = async (actionIds, actions) => {
|
|||||||
|
|
||||||
return hashHex;
|
return hashHex;
|
||||||
};
|
};
|
||||||
|
/**
|
||||||
|
* Counts the number of promoted agents.
|
||||||
|
* @returns {Promise<number>} - The count of promoted agents
|
||||||
|
*/
|
||||||
|
const countPromotedAgents = async () => {
|
||||||
|
const count = await Agent.countDocuments({ is_promoted: true });
|
||||||
|
return count;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load a default agent based on the endpoint
|
* Load a default agent based on the endpoint
|
||||||
@@ -663,6 +837,7 @@ const generateActionMetadataHash = async (actionIds, actions) => {
|
|||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getAgent,
|
getAgent,
|
||||||
|
getAgents,
|
||||||
loadAgent,
|
loadAgent,
|
||||||
createAgent,
|
createAgent,
|
||||||
updateAgent,
|
updateAgent,
|
||||||
@@ -671,6 +846,8 @@ module.exports = {
|
|||||||
revertAgentVersion,
|
revertAgentVersion,
|
||||||
updateAgentProjects,
|
updateAgentProjects,
|
||||||
addAgentResourceFile,
|
addAgentResourceFile,
|
||||||
|
getListAgentsByAccess,
|
||||||
removeAgentResourceFiles,
|
removeAgentResourceFiles,
|
||||||
generateActionMetadataHash,
|
generateActionMetadataHash,
|
||||||
|
countPromotedAgents,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -8,12 +8,14 @@ process.env.CREDS_IV = '0123456789abcdef';
|
|||||||
|
|
||||||
jest.mock('~/server/services/Config', () => ({
|
jest.mock('~/server/services/Config', () => ({
|
||||||
getCachedTools: jest.fn(),
|
getCachedTools: jest.fn(),
|
||||||
|
getMCPServerTools: jest.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const { v4: uuidv4 } = require('uuid');
|
const { v4: uuidv4 } = require('uuid');
|
||||||
const { agentSchema } = require('@librechat/data-schemas');
|
const { agentSchema } = require('@librechat/data-schemas');
|
||||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
|
const { AccessRoleIds, ResourceType, PrincipalType } = require('librechat-data-provider');
|
||||||
const {
|
const {
|
||||||
getAgent,
|
getAgent,
|
||||||
loadAgent,
|
loadAgent,
|
||||||
@@ -21,13 +23,16 @@ const {
|
|||||||
updateAgent,
|
updateAgent,
|
||||||
deleteAgent,
|
deleteAgent,
|
||||||
getListAgents,
|
getListAgents,
|
||||||
|
getListAgentsByAccess,
|
||||||
|
revertAgentVersion,
|
||||||
updateAgentProjects,
|
updateAgentProjects,
|
||||||
addAgentResourceFile,
|
addAgentResourceFile,
|
||||||
removeAgentResourceFiles,
|
removeAgentResourceFiles,
|
||||||
generateActionMetadataHash,
|
generateActionMetadataHash,
|
||||||
revertAgentVersion,
|
|
||||||
} = require('./Agent');
|
} = require('./Agent');
|
||||||
const { getCachedTools } = require('~/server/services/Config');
|
const permissionService = require('~/server/services/PermissionService');
|
||||||
|
const { getCachedTools, getMCPServerTools } = require('~/server/services/Config');
|
||||||
|
const { AclEntry } = require('~/db/models');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @type {import('mongoose').Model<import('@librechat/data-schemas').IAgent>}
|
* @type {import('mongoose').Model<import('@librechat/data-schemas').IAgent>}
|
||||||
@@ -407,12 +412,26 @@ describe('models/Agent', () => {
|
|||||||
|
|
||||||
describe('Agent CRUD Operations', () => {
|
describe('Agent CRUD Operations', () => {
|
||||||
let mongoServer;
|
let mongoServer;
|
||||||
|
let AccessRole;
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
mongoServer = await MongoMemoryServer.create();
|
mongoServer = await MongoMemoryServer.create();
|
||||||
const mongoUri = mongoServer.getUri();
|
const mongoUri = mongoServer.getUri();
|
||||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||||
await mongoose.connect(mongoUri);
|
await mongoose.connect(mongoUri);
|
||||||
|
|
||||||
|
// Initialize models
|
||||||
|
const dbModels = require('~/db/models');
|
||||||
|
AccessRole = dbModels.AccessRole;
|
||||||
|
|
||||||
|
// Create necessary access roles for agents
|
||||||
|
await AccessRole.create({
|
||||||
|
accessRoleId: AccessRoleIds.AGENT_OWNER,
|
||||||
|
name: 'Owner',
|
||||||
|
description: 'Full control over agents',
|
||||||
|
resourceType: ResourceType.AGENT,
|
||||||
|
permBits: 15, // VIEW | EDIT | DELETE | SHARE
|
||||||
|
});
|
||||||
}, 20000);
|
}, 20000);
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
@@ -468,6 +487,51 @@ describe('models/Agent', () => {
|
|||||||
expect(agentAfterDelete).toBeNull();
|
expect(agentAfterDelete).toBeNull();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('should remove ACL entries when deleting an agent', async () => {
|
||||||
|
const agentId = `agent_${uuidv4()}`;
|
||||||
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
|
// Create agent
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Agent With Permissions',
|
||||||
|
provider: 'test',
|
||||||
|
model: 'test-model',
|
||||||
|
author: authorId,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Grant permissions (simulating sharing)
|
||||||
|
await permissionService.grantPermission({
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: authorId,
|
||||||
|
resourceType: ResourceType.AGENT,
|
||||||
|
resourceId: agent._id,
|
||||||
|
accessRoleId: AccessRoleIds.AGENT_OWNER,
|
||||||
|
grantedBy: authorId,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify ACL entry exists
|
||||||
|
const aclEntriesBefore = await AclEntry.find({
|
||||||
|
resourceType: ResourceType.AGENT,
|
||||||
|
resourceId: agent._id,
|
||||||
|
});
|
||||||
|
expect(aclEntriesBefore).toHaveLength(1);
|
||||||
|
|
||||||
|
// Delete the agent
|
||||||
|
await deleteAgent({ id: agentId });
|
||||||
|
|
||||||
|
// Verify agent is deleted
|
||||||
|
const agentAfterDelete = await getAgent({ id: agentId });
|
||||||
|
expect(agentAfterDelete).toBeNull();
|
||||||
|
|
||||||
|
// Verify ACL entries are removed
|
||||||
|
const aclEntriesAfter = await AclEntry.find({
|
||||||
|
resourceType: ResourceType.AGENT,
|
||||||
|
resourceId: agent._id,
|
||||||
|
});
|
||||||
|
expect(aclEntriesAfter).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
test('should list agents by author', async () => {
|
test('should list agents by author', async () => {
|
||||||
const authorId = new mongoose.Types.ObjectId();
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
const otherAuthorId = new mongoose.Types.ObjectId();
|
const otherAuthorId = new mongoose.Types.ObjectId();
|
||||||
@@ -1237,6 +1301,335 @@ describe('models/Agent', () => {
|
|||||||
expect(secondUpdate.versions).toHaveLength(3);
|
expect(secondUpdate.versions).toHaveLength(3);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('should detect changes in support_contact fields', async () => {
|
||||||
|
const agentId = `agent_${uuidv4()}`;
|
||||||
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
|
// Create agent with initial support_contact
|
||||||
|
await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Agent with Support Contact',
|
||||||
|
provider: 'test',
|
||||||
|
model: 'test-model',
|
||||||
|
author: authorId,
|
||||||
|
support_contact: {
|
||||||
|
name: 'Initial Support',
|
||||||
|
email: 'initial@support.com',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update support_contact name only
|
||||||
|
const firstUpdate = await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{
|
||||||
|
support_contact: {
|
||||||
|
name: 'Updated Support',
|
||||||
|
email: 'initial@support.com',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(firstUpdate.versions).toHaveLength(2);
|
||||||
|
expect(firstUpdate.support_contact.name).toBe('Updated Support');
|
||||||
|
expect(firstUpdate.support_contact.email).toBe('initial@support.com');
|
||||||
|
|
||||||
|
// Update support_contact email only
|
||||||
|
const secondUpdate = await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{
|
||||||
|
support_contact: {
|
||||||
|
name: 'Updated Support',
|
||||||
|
email: 'updated@support.com',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(secondUpdate.versions).toHaveLength(3);
|
||||||
|
expect(secondUpdate.support_contact.email).toBe('updated@support.com');
|
||||||
|
|
||||||
|
// Try to update with same support_contact - should be detected as duplicate but return successfully
|
||||||
|
const duplicateUpdate = await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{
|
||||||
|
support_contact: {
|
||||||
|
name: 'Updated Support',
|
||||||
|
email: 'updated@support.com',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Should not create a new version
|
||||||
|
expect(duplicateUpdate.versions).toHaveLength(3);
|
||||||
|
expect(duplicateUpdate.version).toBe(3);
|
||||||
|
expect(duplicateUpdate.support_contact.email).toBe('updated@support.com');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle support_contact from empty to populated', async () => {
|
||||||
|
const agentId = `agent_${uuidv4()}`;
|
||||||
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
|
// Create agent without support_contact
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Agent without Support',
|
||||||
|
provider: 'test',
|
||||||
|
model: 'test-model',
|
||||||
|
author: authorId,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify support_contact is undefined since it wasn't provided
|
||||||
|
expect(agent.support_contact).toBeUndefined();
|
||||||
|
|
||||||
|
// Update to add support_contact
|
||||||
|
const updated = await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{
|
||||||
|
support_contact: {
|
||||||
|
name: 'New Support Team',
|
||||||
|
email: 'support@example.com',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(updated.versions).toHaveLength(2);
|
||||||
|
expect(updated.support_contact.name).toBe('New Support Team');
|
||||||
|
expect(updated.support_contact.email).toBe('support@example.com');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle support_contact edge cases in isDuplicateVersion', async () => {
|
||||||
|
const agentId = `agent_${uuidv4()}`;
|
||||||
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
|
// Create agent with support_contact
|
||||||
|
await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Edge Case Agent',
|
||||||
|
provider: 'test',
|
||||||
|
model: 'test-model',
|
||||||
|
author: authorId,
|
||||||
|
support_contact: {
|
||||||
|
name: 'Support',
|
||||||
|
email: 'support@test.com',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update to empty support_contact
|
||||||
|
const emptyUpdate = await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{
|
||||||
|
support_contact: {},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(emptyUpdate.versions).toHaveLength(2);
|
||||||
|
expect(emptyUpdate.support_contact).toEqual({});
|
||||||
|
|
||||||
|
// Update back to populated support_contact
|
||||||
|
const repopulated = await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{
|
||||||
|
support_contact: {
|
||||||
|
name: 'Support',
|
||||||
|
email: 'support@test.com',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(repopulated.versions).toHaveLength(3);
|
||||||
|
|
||||||
|
// Verify all versions have correct support_contact
|
||||||
|
const finalAgent = await getAgent({ id: agentId });
|
||||||
|
expect(finalAgent.versions[0].support_contact).toEqual({
|
||||||
|
name: 'Support',
|
||||||
|
email: 'support@test.com',
|
||||||
|
});
|
||||||
|
expect(finalAgent.versions[1].support_contact).toEqual({});
|
||||||
|
expect(finalAgent.versions[2].support_contact).toEqual({
|
||||||
|
name: 'Support',
|
||||||
|
email: 'support@test.com',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should preserve support_contact in version history', async () => {
|
||||||
|
const agentId = `agent_${uuidv4()}`;
|
||||||
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
|
// Create agent
|
||||||
|
await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Version History Test',
|
||||||
|
provider: 'test',
|
||||||
|
model: 'test-model',
|
||||||
|
author: authorId,
|
||||||
|
support_contact: {
|
||||||
|
name: 'Initial Contact',
|
||||||
|
email: 'initial@test.com',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Multiple updates with different support_contact values
|
||||||
|
await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{
|
||||||
|
support_contact: {
|
||||||
|
name: 'Second Contact',
|
||||||
|
email: 'second@test.com',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{
|
||||||
|
support_contact: {
|
||||||
|
name: 'Third Contact',
|
||||||
|
email: 'third@test.com',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const finalAgent = await getAgent({ id: agentId });
|
||||||
|
|
||||||
|
// Verify version history
|
||||||
|
expect(finalAgent.versions).toHaveLength(3);
|
||||||
|
expect(finalAgent.versions[0].support_contact).toEqual({
|
||||||
|
name: 'Initial Contact',
|
||||||
|
email: 'initial@test.com',
|
||||||
|
});
|
||||||
|
expect(finalAgent.versions[1].support_contact).toEqual({
|
||||||
|
name: 'Second Contact',
|
||||||
|
email: 'second@test.com',
|
||||||
|
});
|
||||||
|
expect(finalAgent.versions[2].support_contact).toEqual({
|
||||||
|
name: 'Third Contact',
|
||||||
|
email: 'third@test.com',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Current state should match last version
|
||||||
|
expect(finalAgent.support_contact).toEqual({
|
||||||
|
name: 'Third Contact',
|
||||||
|
email: 'third@test.com',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle partial support_contact updates', async () => {
|
||||||
|
const agentId = `agent_${uuidv4()}`;
|
||||||
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
|
// Create agent with full support_contact
|
||||||
|
await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Partial Update Test',
|
||||||
|
provider: 'test',
|
||||||
|
model: 'test-model',
|
||||||
|
author: authorId,
|
||||||
|
support_contact: {
|
||||||
|
name: 'Original Name',
|
||||||
|
email: 'original@email.com',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// MongoDB's findOneAndUpdate will replace the entire support_contact object
|
||||||
|
// So we need to verify that partial updates still work correctly
|
||||||
|
const updated = await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{
|
||||||
|
support_contact: {
|
||||||
|
name: 'New Name',
|
||||||
|
email: '', // Empty email
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(updated.versions).toHaveLength(2);
|
||||||
|
expect(updated.support_contact.name).toBe('New Name');
|
||||||
|
expect(updated.support_contact.email).toBe('');
|
||||||
|
|
||||||
|
// Verify isDuplicateVersion works with partial changes - should return successfully without creating new version
|
||||||
|
const duplicateUpdate = await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{
|
||||||
|
support_contact: {
|
||||||
|
name: 'New Name',
|
||||||
|
email: '',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Should not create a new version since content is the same
|
||||||
|
expect(duplicateUpdate.versions).toHaveLength(2);
|
||||||
|
expect(duplicateUpdate.version).toBe(2);
|
||||||
|
expect(duplicateUpdate.support_contact.name).toBe('New Name');
|
||||||
|
expect(duplicateUpdate.support_contact.email).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Edge Cases
|
||||||
|
describe.each([
|
||||||
|
{
|
||||||
|
operation: 'add',
|
||||||
|
name: 'empty file_id',
|
||||||
|
needsAgent: true,
|
||||||
|
params: { tool_resource: 'file_search', file_id: '' },
|
||||||
|
shouldResolve: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
operation: 'add',
|
||||||
|
name: 'non-existent agent',
|
||||||
|
needsAgent: false,
|
||||||
|
params: { tool_resource: 'file_search', file_id: 'file123' },
|
||||||
|
shouldResolve: false,
|
||||||
|
error: 'Agent not found for adding resource file',
|
||||||
|
},
|
||||||
|
])('addAgentResourceFile with $name', ({ needsAgent, params, shouldResolve, error }) => {
|
||||||
|
test(`should ${shouldResolve ? 'resolve' : 'reject'}`, async () => {
|
||||||
|
const agent = needsAgent ? await createBasicAgent() : null;
|
||||||
|
const agent_id = needsAgent ? agent.id : `agent_${uuidv4()}`;
|
||||||
|
|
||||||
|
if (shouldResolve) {
|
||||||
|
await expect(addAgentResourceFile({ agent_id, ...params })).resolves.toBeDefined();
|
||||||
|
} else {
|
||||||
|
await expect(addAgentResourceFile({ agent_id, ...params })).rejects.toThrow(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe.each([
|
||||||
|
{
|
||||||
|
name: 'empty files array',
|
||||||
|
files: [],
|
||||||
|
needsAgent: true,
|
||||||
|
shouldResolve: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'non-existent tool_resource',
|
||||||
|
files: [{ tool_resource: 'non_existent_tool', file_id: 'file123' }],
|
||||||
|
needsAgent: true,
|
||||||
|
shouldResolve: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'non-existent agent',
|
||||||
|
files: [{ tool_resource: 'file_search', file_id: 'file123' }],
|
||||||
|
needsAgent: false,
|
||||||
|
shouldResolve: false,
|
||||||
|
error: 'Agent not found for removing resource files',
|
||||||
|
},
|
||||||
|
])('removeAgentResourceFiles with $name', ({ files, needsAgent, shouldResolve, error }) => {
|
||||||
|
test(`should ${shouldResolve ? 'resolve' : 'reject'}`, async () => {
|
||||||
|
const agent = needsAgent ? await createBasicAgent() : null;
|
||||||
|
const agent_id = needsAgent ? agent.id : `agent_${uuidv4()}`;
|
||||||
|
|
||||||
|
if (shouldResolve) {
|
||||||
|
const result = await removeAgentResourceFiles({ agent_id, files });
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
if (agent) {
|
||||||
|
expect(result.id).toBe(agent.id);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await expect(removeAgentResourceFiles({ agent_id, files })).rejects.toThrow(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('Edge Cases', () => {
|
describe('Edge Cases', () => {
|
||||||
test('should handle extremely large version history', async () => {
|
test('should handle extremely large version history', async () => {
|
||||||
const agentId = `agent_${uuidv4()}`;
|
const agentId = `agent_${uuidv4()}`;
|
||||||
@@ -1537,6 +1930,16 @@ describe('models/Agent', () => {
|
|||||||
another_tool: {},
|
another_tool: {},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Mock getMCPServerTools to return tools for each server
|
||||||
|
getMCPServerTools.mockImplementation(async (server) => {
|
||||||
|
if (server === 'server1') {
|
||||||
|
return { tool1_mcp_server1: {} };
|
||||||
|
} else if (server === 'server2') {
|
||||||
|
return { tool2_mcp_server2: {} };
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
const mockReq = {
|
const mockReq = {
|
||||||
user: { id: 'user123' },
|
user: { id: 'user123' },
|
||||||
body: {
|
body: {
|
||||||
@@ -1612,7 +2015,7 @@ describe('models/Agent', () => {
|
|||||||
expect(result.version).toBe(1);
|
expect(result.version).toBe(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('should return null when user is not author and agent has no projectIds', async () => {
|
test('should return agent even when user is not author (permissions checked at route level)', async () => {
|
||||||
const authorId = new mongoose.Types.ObjectId();
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
const userId = new mongoose.Types.ObjectId();
|
const userId = new mongoose.Types.ObjectId();
|
||||||
const agentId = `agent_${uuidv4()}`;
|
const agentId = `agent_${uuidv4()}`;
|
||||||
@@ -1633,7 +2036,11 @@ describe('models/Agent', () => {
|
|||||||
model_parameters: { model: 'gpt-4' },
|
model_parameters: { model: 'gpt-4' },
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(result).toBeFalsy();
|
// With the new permission system, loadAgent returns the agent regardless of permissions
|
||||||
|
// Permission checks are handled at the route level via middleware
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result.id).toBe(agentId);
|
||||||
|
expect(result.name).toBe('Test Agent');
|
||||||
});
|
});
|
||||||
|
|
||||||
test('should handle ephemeral agent with no MCP servers', async () => {
|
test('should handle ephemeral agent with no MCP servers', async () => {
|
||||||
@@ -1717,6 +2124,14 @@ describe('models/Agent', () => {
|
|||||||
|
|
||||||
getCachedTools.mockResolvedValue(availableTools);
|
getCachedTools.mockResolvedValue(availableTools);
|
||||||
|
|
||||||
|
// Mock getMCPServerTools to return all tools for server1
|
||||||
|
getMCPServerTools.mockImplementation(async (server) => {
|
||||||
|
if (server === 'server1') {
|
||||||
|
return availableTools; // All 100 tools belong to server1
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
const mockReq = {
|
const mockReq = {
|
||||||
user: { id: 'user123' },
|
user: { id: 'user123' },
|
||||||
body: {
|
body: {
|
||||||
@@ -1741,7 +2156,7 @@ describe('models/Agent', () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
test('should handle loadAgent with agent from different project', async () => {
|
test('should return agent from different project (permissions checked at route level)', async () => {
|
||||||
const authorId = new mongoose.Types.ObjectId();
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
const userId = new mongoose.Types.ObjectId();
|
const userId = new mongoose.Types.ObjectId();
|
||||||
const agentId = `agent_${uuidv4()}`;
|
const agentId = `agent_${uuidv4()}`;
|
||||||
@@ -1764,7 +2179,11 @@ describe('models/Agent', () => {
|
|||||||
model_parameters: { model: 'gpt-4' },
|
model_parameters: { model: 'gpt-4' },
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(result).toBeFalsy();
|
// With the new permission system, loadAgent returns the agent regardless of permissions
|
||||||
|
// Permission checks are handled at the route level via middleware
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result.id).toBe(agentId);
|
||||||
|
expect(result.name).toBe('Project Agent');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -2254,6 +2673,17 @@ describe('models/Agent', () => {
|
|||||||
tool_mcp_server2: {}, // Different server
|
tool_mcp_server2: {}, // Different server
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Mock getMCPServerTools to return only tools matching the server
|
||||||
|
getMCPServerTools.mockImplementation(async (server) => {
|
||||||
|
if (server === 'server1') {
|
||||||
|
// Only return tool that correctly matches server1 format
|
||||||
|
return { tool_mcp_server1: {} };
|
||||||
|
} else if (server === 'server2') {
|
||||||
|
return { tool_mcp_server2: {} };
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
const mockReq = {
|
const mockReq = {
|
||||||
user: { id: 'user123' },
|
user: { id: 'user123' },
|
||||||
body: {
|
body: {
|
||||||
@@ -2557,6 +2987,299 @@ describe('models/Agent', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Support Contact Field', () => {
|
||||||
|
let mongoServer;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
mongoServer = await MongoMemoryServer.create();
|
||||||
|
const mongoUri = mongoServer.getUri();
|
||||||
|
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||||
|
await mongoose.connect(mongoUri);
|
||||||
|
}, 20000);
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await mongoose.disconnect();
|
||||||
|
await mongoServer.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await Agent.deleteMany({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not create subdocument with ObjectId for support_contact', async () => {
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const agentData = {
|
||||||
|
id: 'agent_test_support',
|
||||||
|
name: 'Test Agent',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: userId,
|
||||||
|
support_contact: {
|
||||||
|
name: 'Support Team',
|
||||||
|
email: 'support@example.com',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create agent
|
||||||
|
const agent = await createAgent(agentData);
|
||||||
|
|
||||||
|
// Verify support_contact is stored correctly
|
||||||
|
expect(agent.support_contact).toBeDefined();
|
||||||
|
expect(agent.support_contact.name).toBe('Support Team');
|
||||||
|
expect(agent.support_contact.email).toBe('support@example.com');
|
||||||
|
|
||||||
|
// Verify no _id field is created in support_contact
|
||||||
|
expect(agent.support_contact._id).toBeUndefined();
|
||||||
|
|
||||||
|
// Fetch from database to double-check
|
||||||
|
const dbAgent = await Agent.findOne({ id: agentData.id });
|
||||||
|
expect(dbAgent.support_contact).toBeDefined();
|
||||||
|
expect(dbAgent.support_contact.name).toBe('Support Team');
|
||||||
|
expect(dbAgent.support_contact.email).toBe('support@example.com');
|
||||||
|
expect(dbAgent.support_contact._id).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty support_contact correctly', async () => {
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const agentData = {
|
||||||
|
id: 'agent_test_empty_support',
|
||||||
|
name: 'Test Agent',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: userId,
|
||||||
|
support_contact: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
const agent = await createAgent(agentData);
|
||||||
|
|
||||||
|
// Verify empty support_contact is stored as empty object
|
||||||
|
expect(agent.support_contact).toEqual({});
|
||||||
|
expect(agent.support_contact._id).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing support_contact correctly', async () => {
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const agentData = {
|
||||||
|
id: 'agent_test_no_support',
|
||||||
|
name: 'Test Agent',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: userId,
|
||||||
|
};
|
||||||
|
|
||||||
|
const agent = await createAgent(agentData);
|
||||||
|
|
||||||
|
// Verify support_contact is undefined when not provided
|
||||||
|
expect(agent.support_contact).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getListAgentsByAccess - Security Tests', () => {
|
||||||
|
let userA, userB;
|
||||||
|
let agentA1, agentA2, agentA3;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||||
|
await Agent.deleteMany({});
|
||||||
|
await AclEntry.deleteMany({});
|
||||||
|
|
||||||
|
// Create two users
|
||||||
|
userA = new mongoose.Types.ObjectId();
|
||||||
|
userB = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
|
// Create agents for user A
|
||||||
|
agentA1 = await createAgent({
|
||||||
|
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||||
|
name: 'Agent A1',
|
||||||
|
description: 'User A agent 1',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: userA,
|
||||||
|
});
|
||||||
|
|
||||||
|
agentA2 = await createAgent({
|
||||||
|
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||||
|
name: 'Agent A2',
|
||||||
|
description: 'User A agent 2',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: userA,
|
||||||
|
});
|
||||||
|
|
||||||
|
agentA3 = await createAgent({
|
||||||
|
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||||
|
name: 'Agent A3',
|
||||||
|
description: 'User A agent 3',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: userA,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return empty list when user has no accessible agents (empty accessibleIds)', async () => {
|
||||||
|
// User B has no agents and no shared agents
|
||||||
|
const result = await getListAgentsByAccess({
|
||||||
|
accessibleIds: [],
|
||||||
|
otherParams: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.data).toHaveLength(0);
|
||||||
|
expect(result.has_more).toBe(false);
|
||||||
|
expect(result.first_id).toBeNull();
|
||||||
|
expect(result.last_id).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should not return other users agents when accessibleIds is empty', async () => {
|
||||||
|
// User B trying to list agents with empty accessibleIds should not see User A's agents
|
||||||
|
const result = await getListAgentsByAccess({
|
||||||
|
accessibleIds: [],
|
||||||
|
otherParams: { author: userB },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.data).toHaveLength(0);
|
||||||
|
expect(result.has_more).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should only return agents in accessibleIds list', async () => {
|
||||||
|
// Give User B access to only one of User A's agents
|
||||||
|
const accessibleIds = [agentA1._id];
|
||||||
|
|
||||||
|
const result = await getListAgentsByAccess({
|
||||||
|
accessibleIds,
|
||||||
|
otherParams: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.data).toHaveLength(1);
|
||||||
|
expect(result.data[0].id).toBe(agentA1.id);
|
||||||
|
expect(result.data[0].name).toBe('Agent A1');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return multiple accessible agents when provided', async () => {
|
||||||
|
// Give User B access to two of User A's agents
|
||||||
|
const accessibleIds = [agentA1._id, agentA3._id];
|
||||||
|
|
||||||
|
const result = await getListAgentsByAccess({
|
||||||
|
accessibleIds,
|
||||||
|
otherParams: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.data).toHaveLength(2);
|
||||||
|
const returnedIds = result.data.map((agent) => agent.id);
|
||||||
|
expect(returnedIds).toContain(agentA1.id);
|
||||||
|
expect(returnedIds).toContain(agentA3.id);
|
||||||
|
expect(returnedIds).not.toContain(agentA2.id);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should respect other query parameters while enforcing accessibleIds', async () => {
|
||||||
|
// Give access to all agents but filter by name
|
||||||
|
const accessibleIds = [agentA1._id, agentA2._id, agentA3._id];
|
||||||
|
|
||||||
|
const result = await getListAgentsByAccess({
|
||||||
|
accessibleIds,
|
||||||
|
otherParams: { name: 'Agent A2' },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.data).toHaveLength(1);
|
||||||
|
expect(result.data[0].id).toBe(agentA2.id);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle pagination correctly with accessibleIds filter', async () => {
|
||||||
|
// Create more agents
|
||||||
|
const moreAgents = [];
|
||||||
|
for (let i = 4; i <= 10; i++) {
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||||
|
name: `Agent A${i}`,
|
||||||
|
description: `User A agent ${i}`,
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: userA,
|
||||||
|
});
|
||||||
|
moreAgents.push(agent);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Give access to all agents
|
||||||
|
const allAgentIds = [agentA1, agentA2, agentA3, ...moreAgents].map((a) => a._id);
|
||||||
|
|
||||||
|
// First page
|
||||||
|
const page1 = await getListAgentsByAccess({
|
||||||
|
accessibleIds: allAgentIds,
|
||||||
|
otherParams: {},
|
||||||
|
limit: 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(page1.data).toHaveLength(5);
|
||||||
|
expect(page1.has_more).toBe(true);
|
||||||
|
expect(page1.after).toBeTruthy();
|
||||||
|
|
||||||
|
// Second page
|
||||||
|
const page2 = await getListAgentsByAccess({
|
||||||
|
accessibleIds: allAgentIds,
|
||||||
|
otherParams: {},
|
||||||
|
limit: 5,
|
||||||
|
after: page1.after,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(page2.data).toHaveLength(5);
|
||||||
|
expect(page2.has_more).toBe(false);
|
||||||
|
|
||||||
|
// Verify no overlap between pages
|
||||||
|
const page1Ids = page1.data.map((a) => a.id);
|
||||||
|
const page2Ids = page2.data.map((a) => a.id);
|
||||||
|
const intersection = page1Ids.filter((id) => page2Ids.includes(id));
|
||||||
|
expect(intersection).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return empty list when accessibleIds contains non-existent IDs', async () => {
|
||||||
|
// Try with non-existent agent IDs
|
||||||
|
const fakeIds = [new mongoose.Types.ObjectId(), new mongoose.Types.ObjectId()];
|
||||||
|
|
||||||
|
const result = await getListAgentsByAccess({
|
||||||
|
accessibleIds: fakeIds,
|
||||||
|
otherParams: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.data).toHaveLength(0);
|
||||||
|
expect(result.has_more).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle undefined accessibleIds as empty array', async () => {
|
||||||
|
// When accessibleIds is undefined, it should be treated as empty array
|
||||||
|
const result = await getListAgentsByAccess({
|
||||||
|
accessibleIds: undefined,
|
||||||
|
otherParams: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.data).toHaveLength(0);
|
||||||
|
expect(result.has_more).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should combine accessibleIds with author filter correctly', async () => {
|
||||||
|
// Create an agent for User B
|
||||||
|
const agentB1 = await createAgent({
|
||||||
|
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||||
|
name: 'Agent B1',
|
||||||
|
description: 'User B agent 1',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: userB,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Give User B access to one of User A's agents
|
||||||
|
const accessibleIds = [agentA1._id, agentB1._id];
|
||||||
|
|
||||||
|
// Filter by author should further restrict the results
|
||||||
|
const result = await getListAgentsByAccess({
|
||||||
|
accessibleIds,
|
||||||
|
otherParams: { author: userB },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.data).toHaveLength(1);
|
||||||
|
expect(result.data[0].id).toBe(agentB1.id);
|
||||||
|
expect(result.data[0].author).toBe(userB.toString());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
function createBasicAgent(overrides = {}) {
|
function createBasicAgent(overrides = {}) {
|
||||||
const defaults = {
|
const defaults = {
|
||||||
id: `agent_${uuidv4()}`,
|
id: `agent_${uuidv4()}`,
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
const { logger } = require('~/config');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
|
||||||
const options = [
|
const options = [
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { createTempChatExpirationDate } = require('@librechat/api');
|
const { createTempChatExpirationDate } = require('@librechat/api');
|
||||||
const { getCustomConfig } = require('~/server/services/Config/getCustomConfig');
|
|
||||||
const { getMessages, deleteMessages } = require('./Message');
|
const { getMessages, deleteMessages } = require('./Message');
|
||||||
const { Conversation } = require('~/db/models');
|
const { Conversation } = require('~/db/models');
|
||||||
|
|
||||||
@@ -102,8 +101,8 @@ module.exports = {
|
|||||||
|
|
||||||
if (req?.body?.isTemporary) {
|
if (req?.body?.isTemporary) {
|
||||||
try {
|
try {
|
||||||
const customConfig = await getCustomConfig();
|
const appConfig = req.config;
|
||||||
update.expiredAt = createTempChatExpirationDate(customConfig);
|
update.expiredAt = createTempChatExpirationDate(appConfig?.interfaceConfig);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('Error creating temporary chat expiration date:', err);
|
logger.error('Error creating temporary chat expiration date:', err);
|
||||||
logger.info(`---\`saveConvo\` context: ${metadata?.context}`);
|
logger.info(`---\`saveConvo\` context: ${metadata?.context}`);
|
||||||
@@ -175,7 +174,7 @@ module.exports = {
|
|||||||
|
|
||||||
if (search) {
|
if (search) {
|
||||||
try {
|
try {
|
||||||
const meiliResults = await Conversation.meiliSearch(search);
|
const meiliResults = await Conversation.meiliSearch(search, { filter: `user = "${user}"` });
|
||||||
const matchingIds = Array.isArray(meiliResults.hits)
|
const matchingIds = Array.isArray(meiliResults.hits)
|
||||||
? meiliResults.hits.map((result) => result.conversationId)
|
? meiliResults.hits.map((result) => result.conversationId)
|
||||||
: [];
|
: [];
|
||||||
|
|||||||
@@ -13,9 +13,8 @@ const {
|
|||||||
saveConvo,
|
saveConvo,
|
||||||
getConvo,
|
getConvo,
|
||||||
} = require('./Conversation');
|
} = require('./Conversation');
|
||||||
jest.mock('~/server/services/Config/getCustomConfig');
|
jest.mock('~/server/services/Config/app');
|
||||||
jest.mock('./Message');
|
jest.mock('./Message');
|
||||||
const { getCustomConfig } = require('~/server/services/Config/getCustomConfig');
|
|
||||||
const { getMessages, deleteMessages } = require('./Message');
|
const { getMessages, deleteMessages } = require('./Message');
|
||||||
|
|
||||||
const { Conversation } = require('~/db/models');
|
const { Conversation } = require('~/db/models');
|
||||||
@@ -50,6 +49,11 @@ describe('Conversation Operations', () => {
|
|||||||
mockReq = {
|
mockReq = {
|
||||||
user: { id: 'user123' },
|
user: { id: 'user123' },
|
||||||
body: {},
|
body: {},
|
||||||
|
config: {
|
||||||
|
interfaceConfig: {
|
||||||
|
temporaryChatRetention: 24, // Default 24 hours
|
||||||
|
},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
mockConversationData = {
|
mockConversationData = {
|
||||||
@@ -118,12 +122,8 @@ describe('Conversation Operations', () => {
|
|||||||
|
|
||||||
describe('isTemporary conversation handling', () => {
|
describe('isTemporary conversation handling', () => {
|
||||||
it('should save a conversation with expiredAt when isTemporary is true', async () => {
|
it('should save a conversation with expiredAt when isTemporary is true', async () => {
|
||||||
// Mock custom config with 24 hour retention
|
// Mock app config with 24 hour retention
|
||||||
getCustomConfig.mockResolvedValue({
|
mockReq.config.interfaceConfig.temporaryChatRetention = 24;
|
||||||
interface: {
|
|
||||||
temporaryChatRetention: 24,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
@@ -167,12 +167,8 @@ describe('Conversation Operations', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should use custom retention period from config', async () => {
|
it('should use custom retention period from config', async () => {
|
||||||
// Mock custom config with 48 hour retention
|
// Mock app config with 48 hour retention
|
||||||
getCustomConfig.mockResolvedValue({
|
mockReq.config.interfaceConfig.temporaryChatRetention = 48;
|
||||||
interface: {
|
|
||||||
temporaryChatRetention: 48,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
@@ -194,12 +190,8 @@ describe('Conversation Operations', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle minimum retention period (1 hour)', async () => {
|
it('should handle minimum retention period (1 hour)', async () => {
|
||||||
// Mock custom config with less than minimum retention
|
// Mock app config with less than minimum retention
|
||||||
getCustomConfig.mockResolvedValue({
|
mockReq.config.interfaceConfig.temporaryChatRetention = 0.5; // Half hour - should be clamped to 1 hour
|
||||||
interface: {
|
|
||||||
temporaryChatRetention: 0.5, // Half hour - should be clamped to 1 hour
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
@@ -221,12 +213,8 @@ describe('Conversation Operations', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle maximum retention period (8760 hours)', async () => {
|
it('should handle maximum retention period (8760 hours)', async () => {
|
||||||
// Mock custom config with more than maximum retention
|
// Mock app config with more than maximum retention
|
||||||
getCustomConfig.mockResolvedValue({
|
mockReq.config.interfaceConfig.temporaryChatRetention = 10000; // Should be clamped to 8760 hours
|
||||||
interface: {
|
|
||||||
temporaryChatRetention: 10000, // Should be clamped to 8760 hours
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
@@ -247,22 +235,36 @@ describe('Conversation Operations', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle getCustomConfig errors gracefully', async () => {
|
it('should handle missing config gracefully', async () => {
|
||||||
// Mock getCustomConfig to throw an error
|
// Simulate missing config - should use default retention period
|
||||||
getCustomConfig.mockRejectedValue(new Error('Config service unavailable'));
|
delete mockReq.config;
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
|
const beforeSave = new Date();
|
||||||
const result = await saveConvo(mockReq, mockConversationData);
|
const result = await saveConvo(mockReq, mockConversationData);
|
||||||
|
const afterSave = new Date();
|
||||||
|
|
||||||
// Should still save the conversation but with expiredAt as null
|
// Should still save the conversation with default retention period (30 days)
|
||||||
expect(result.conversationId).toBe(mockConversationData.conversationId);
|
expect(result.conversationId).toBe(mockConversationData.conversationId);
|
||||||
expect(result.expiredAt).toBeNull();
|
expect(result.expiredAt).toBeDefined();
|
||||||
|
expect(result.expiredAt).toBeInstanceOf(Date);
|
||||||
|
|
||||||
|
// Verify expiredAt is approximately 30 days in the future (720 hours)
|
||||||
|
const expectedExpirationTime = new Date(beforeSave.getTime() + 720 * 60 * 60 * 1000);
|
||||||
|
const actualExpirationTime = new Date(result.expiredAt);
|
||||||
|
|
||||||
|
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||||
|
expectedExpirationTime.getTime() - 1000,
|
||||||
|
);
|
||||||
|
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||||
|
new Date(afterSave.getTime() + 720 * 60 * 60 * 1000 + 1000).getTime(),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use default retention when config is not provided', async () => {
|
it('should use default retention when config is not provided', async () => {
|
||||||
// Mock getCustomConfig to return empty config
|
// Mock getAppConfig to return empty config
|
||||||
getCustomConfig.mockResolvedValue({});
|
mockReq.config = {}; // Empty config
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
@@ -285,11 +287,7 @@ describe('Conversation Operations', () => {
|
|||||||
|
|
||||||
it('should update expiredAt when saving existing temporary conversation', async () => {
|
it('should update expiredAt when saving existing temporary conversation', async () => {
|
||||||
// First save a temporary conversation
|
// First save a temporary conversation
|
||||||
getCustomConfig.mockResolvedValue({
|
mockReq.config.interfaceConfig.temporaryChatRetention = 24;
|
||||||
interface: {
|
|
||||||
temporaryChatRetention: 24,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
const firstSave = await saveConvo(mockReq, mockConversationData);
|
const firstSave = await saveConvo(mockReq, mockConversationData);
|
||||||
|
|||||||
@@ -239,10 +239,46 @@ const updateTagsForConversation = async (user, conversationId, tags) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Increments tag counts for existing tags only.
|
||||||
|
* @param {string} user - The user ID.
|
||||||
|
* @param {string[]} tags - Array of tag names to increment
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
const bulkIncrementTagCounts = async (user, tags) => {
|
||||||
|
if (!tags || tags.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const uniqueTags = [...new Set(tags.filter(Boolean))];
|
||||||
|
if (uniqueTags.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const bulkOps = uniqueTags.map((tag) => ({
|
||||||
|
updateOne: {
|
||||||
|
filter: { user, tag },
|
||||||
|
update: { $inc: { count: 1 } },
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
const result = await ConversationTag.bulkWrite(bulkOps);
|
||||||
|
if (result && result.modifiedCount > 0) {
|
||||||
|
logger.debug(
|
||||||
|
`user: ${user} | Incremented tag counts - modified ${result.modifiedCount} tags`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[bulkIncrementTagCounts] Error incrementing tag counts', error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getConversationTags,
|
getConversationTags,
|
||||||
createConversationTag,
|
createConversationTag,
|
||||||
updateConversationTag,
|
updateConversationTag,
|
||||||
deleteConversationTag,
|
deleteConversationTag,
|
||||||
|
bulkIncrementTagCounts,
|
||||||
updateTagsForConversation,
|
updateTagsForConversation,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { EToolResources, FileContext, Constants } = require('librechat-data-provider');
|
const { EToolResources, FileContext } = require('librechat-data-provider');
|
||||||
const { getProjectByName } = require('./Project');
|
|
||||||
const { getAgent } = require('./Agent');
|
|
||||||
const { File } = require('~/db/models');
|
const { File } = require('~/db/models');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -14,124 +12,17 @@ const findFileById = async (file_id, options = {}) => {
|
|||||||
return await File.findOne({ file_id, ...options }).lean();
|
return await File.findOne({ file_id, ...options }).lean();
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if a user has access to multiple files through a shared agent (batch operation)
|
|
||||||
* @param {string} userId - The user ID to check access for
|
|
||||||
* @param {string[]} fileIds - Array of file IDs to check
|
|
||||||
* @param {string} agentId - The agent ID that might grant access
|
|
||||||
* @returns {Promise<Map<string, boolean>>} Map of fileId to access status
|
|
||||||
*/
|
|
||||||
const hasAccessToFilesViaAgent = async (userId, fileIds, agentId, checkCollaborative = true) => {
|
|
||||||
const accessMap = new Map();
|
|
||||||
|
|
||||||
// Initialize all files as no access
|
|
||||||
fileIds.forEach((fileId) => accessMap.set(fileId, false));
|
|
||||||
|
|
||||||
try {
|
|
||||||
const agent = await getAgent({ id: agentId });
|
|
||||||
|
|
||||||
if (!agent) {
|
|
||||||
return accessMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if user is the author - if so, grant access to all files
|
|
||||||
if (agent.author.toString() === userId) {
|
|
||||||
fileIds.forEach((fileId) => accessMap.set(fileId, true));
|
|
||||||
return accessMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if agent is shared with the user via projects
|
|
||||||
if (!agent.projectIds || agent.projectIds.length === 0) {
|
|
||||||
return accessMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if agent is in global project
|
|
||||||
const globalProject = await getProjectByName(Constants.GLOBAL_PROJECT_NAME, '_id');
|
|
||||||
if (
|
|
||||||
!globalProject ||
|
|
||||||
!agent.projectIds.some((pid) => pid.toString() === globalProject._id.toString())
|
|
||||||
) {
|
|
||||||
return accessMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Agent is globally shared - check if it's collaborative
|
|
||||||
if (checkCollaborative && !agent.isCollaborative) {
|
|
||||||
return accessMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check which files are actually attached
|
|
||||||
const attachedFileIds = new Set();
|
|
||||||
if (agent.tool_resources) {
|
|
||||||
for (const [_resourceType, resource] of Object.entries(agent.tool_resources)) {
|
|
||||||
if (resource?.file_ids && Array.isArray(resource.file_ids)) {
|
|
||||||
resource.file_ids.forEach((fileId) => attachedFileIds.add(fileId));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Grant access only to files that are attached to this agent
|
|
||||||
fileIds.forEach((fileId) => {
|
|
||||||
if (attachedFileIds.has(fileId)) {
|
|
||||||
accessMap.set(fileId, true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return accessMap;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[hasAccessToFilesViaAgent] Error checking file access:', error);
|
|
||||||
return accessMap;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves files matching a given filter, sorted by the most recently updated.
|
* Retrieves files matching a given filter, sorted by the most recently updated.
|
||||||
* @param {Object} filter - The filter criteria to apply.
|
* @param {Object} filter - The filter criteria to apply.
|
||||||
* @param {Object} [_sortOptions] - Optional sort parameters.
|
* @param {Object} [_sortOptions] - Optional sort parameters.
|
||||||
* @param {Object|String} [selectFields={ text: 0 }] - Fields to include/exclude in the query results.
|
* @param {Object|String} [selectFields={ text: 0 }] - Fields to include/exclude in the query results.
|
||||||
* Default excludes the 'text' field.
|
* Default excludes the 'text' field.
|
||||||
* @param {Object} [options] - Additional options
|
|
||||||
* @param {string} [options.userId] - User ID for access control
|
|
||||||
* @param {string} [options.agentId] - Agent ID that might grant access to files
|
|
||||||
* @returns {Promise<Array<MongoFile>>} A promise that resolves to an array of file documents.
|
* @returns {Promise<Array<MongoFile>>} A promise that resolves to an array of file documents.
|
||||||
*/
|
*/
|
||||||
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }, options = {}) => {
|
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
|
||||||
const sortOptions = { updatedAt: -1, ..._sortOptions };
|
const sortOptions = { updatedAt: -1, ..._sortOptions };
|
||||||
const files = await File.find(filter).select(selectFields).sort(sortOptions).lean();
|
return await File.find(filter).select(selectFields).sort(sortOptions).lean();
|
||||||
|
|
||||||
// If userId and agentId are provided, filter files based on access
|
|
||||||
if (options.userId && options.agentId) {
|
|
||||||
// Collect file IDs that need access check
|
|
||||||
const filesToCheck = [];
|
|
||||||
const ownedFiles = [];
|
|
||||||
|
|
||||||
for (const file of files) {
|
|
||||||
if (file.user && file.user.toString() === options.userId) {
|
|
||||||
ownedFiles.push(file);
|
|
||||||
} else {
|
|
||||||
filesToCheck.push(file);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (filesToCheck.length === 0) {
|
|
||||||
return ownedFiles;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Batch check access for all non-owned files
|
|
||||||
const fileIds = filesToCheck.map((f) => f.file_id);
|
|
||||||
const accessMap = await hasAccessToFilesViaAgent(
|
|
||||||
options.userId,
|
|
||||||
fileIds,
|
|
||||||
options.agentId,
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Filter files based on access
|
|
||||||
const accessibleFiles = filesToCheck.filter((file) => accessMap.get(file.file_id));
|
|
||||||
|
|
||||||
return [...ownedFiles, ...accessibleFiles];
|
|
||||||
}
|
|
||||||
|
|
||||||
return files;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -151,7 +42,7 @@ const getToolFilesByIds = async (fileIds, toolResourceSet) => {
|
|||||||
$or: [],
|
$or: [],
|
||||||
};
|
};
|
||||||
|
|
||||||
if (toolResourceSet.has(EToolResources.ocr)) {
|
if (toolResourceSet.has(EToolResources.context)) {
|
||||||
filter.$or.push({ text: { $exists: true, $ne: null }, context: FileContext.agents });
|
filter.$or.push({ text: { $exists: true, $ne: null }, context: FileContext.agents });
|
||||||
}
|
}
|
||||||
if (toolResourceSet.has(EToolResources.file_search)) {
|
if (toolResourceSet.has(EToolResources.file_search)) {
|
||||||
@@ -285,5 +176,4 @@ module.exports = {
|
|||||||
deleteFiles,
|
deleteFiles,
|
||||||
deleteFileByFilter,
|
deleteFileByFilter,
|
||||||
batchUpdateFiles,
|
batchUpdateFiles,
|
||||||
hasAccessToFilesViaAgent,
|
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,17 +1,23 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const { v4: uuidv4 } = require('uuid');
|
const { v4: uuidv4 } = require('uuid');
|
||||||
const { fileSchema } = require('@librechat/data-schemas');
|
const { createModels } = require('@librechat/data-schemas');
|
||||||
const { agentSchema } = require('@librechat/data-schemas');
|
|
||||||
const { projectSchema } = require('@librechat/data-schemas');
|
|
||||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
const {
|
||||||
|
SystemRoles,
|
||||||
|
ResourceType,
|
||||||
|
AccessRoleIds,
|
||||||
|
PrincipalType,
|
||||||
|
} = require('librechat-data-provider');
|
||||||
|
const { grantPermission } = require('~/server/services/PermissionService');
|
||||||
const { getFiles, createFile } = require('./File');
|
const { getFiles, createFile } = require('./File');
|
||||||
const { getProjectByName } = require('./Project');
|
const { seedDefaultRoles } = require('~/models');
|
||||||
const { createAgent } = require('./Agent');
|
const { createAgent } = require('./Agent');
|
||||||
|
|
||||||
let File;
|
let File;
|
||||||
let Agent;
|
let Agent;
|
||||||
let Project;
|
let AclEntry;
|
||||||
|
let User;
|
||||||
|
let modelsToCleanup = [];
|
||||||
|
|
||||||
describe('File Access Control', () => {
|
describe('File Access Control', () => {
|
||||||
let mongoServer;
|
let mongoServer;
|
||||||
@@ -19,13 +25,41 @@ describe('File Access Control', () => {
|
|||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
mongoServer = await MongoMemoryServer.create();
|
mongoServer = await MongoMemoryServer.create();
|
||||||
const mongoUri = mongoServer.getUri();
|
const mongoUri = mongoServer.getUri();
|
||||||
File = mongoose.models.File || mongoose.model('File', fileSchema);
|
|
||||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
|
||||||
Project = mongoose.models.Project || mongoose.model('Project', projectSchema);
|
|
||||||
await mongoose.connect(mongoUri);
|
await mongoose.connect(mongoUri);
|
||||||
|
|
||||||
|
// Initialize all models
|
||||||
|
const models = createModels(mongoose);
|
||||||
|
|
||||||
|
// Track which models we're adding
|
||||||
|
modelsToCleanup = Object.keys(models);
|
||||||
|
|
||||||
|
// Register models on mongoose.models so methods can access them
|
||||||
|
const dbModels = require('~/db/models');
|
||||||
|
Object.assign(mongoose.models, dbModels);
|
||||||
|
|
||||||
|
File = dbModels.File;
|
||||||
|
Agent = dbModels.Agent;
|
||||||
|
AclEntry = dbModels.AclEntry;
|
||||||
|
User = dbModels.User;
|
||||||
|
|
||||||
|
// Seed default roles
|
||||||
|
await seedDefaultRoles();
|
||||||
});
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
|
// Clean up all collections before disconnecting
|
||||||
|
const collections = mongoose.connection.collections;
|
||||||
|
for (const key in collections) {
|
||||||
|
await collections[key].deleteMany({});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear only the models we added
|
||||||
|
for (const modelName of modelsToCleanup) {
|
||||||
|
if (mongoose.models[modelName]) {
|
||||||
|
delete mongoose.models[modelName];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
await mongoose.disconnect();
|
await mongoose.disconnect();
|
||||||
await mongoServer.stop();
|
await mongoServer.stop();
|
||||||
});
|
});
|
||||||
@@ -33,16 +67,33 @@ describe('File Access Control', () => {
|
|||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
await File.deleteMany({});
|
await File.deleteMany({});
|
||||||
await Agent.deleteMany({});
|
await Agent.deleteMany({});
|
||||||
await Project.deleteMany({});
|
await AclEntry.deleteMany({});
|
||||||
|
await User.deleteMany({});
|
||||||
|
// Don't delete AccessRole as they are seeded defaults needed for tests
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('hasAccessToFilesViaAgent', () => {
|
describe('hasAccessToFilesViaAgent', () => {
|
||||||
it('should efficiently check access for multiple files at once', async () => {
|
it('should efficiently check access for multiple files at once', async () => {
|
||||||
const userId = new mongoose.Types.ObjectId().toString();
|
const userId = new mongoose.Types.ObjectId();
|
||||||
const authorId = new mongoose.Types.ObjectId().toString();
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
const agentId = uuidv4();
|
const agentId = uuidv4();
|
||||||
const fileIds = [uuidv4(), uuidv4(), uuidv4(), uuidv4()];
|
const fileIds = [uuidv4(), uuidv4(), uuidv4(), uuidv4()];
|
||||||
|
|
||||||
|
// Create users
|
||||||
|
await User.create({
|
||||||
|
_id: userId,
|
||||||
|
email: 'user@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
|
await User.create({
|
||||||
|
_id: authorId,
|
||||||
|
email: 'author@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
// Create files
|
// Create files
|
||||||
for (const fileId of fileIds) {
|
for (const fileId of fileIds) {
|
||||||
await createFile({
|
await createFile({
|
||||||
@@ -54,13 +105,12 @@ describe('File Access Control', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create agent with only first two files attached
|
// Create agent with only first two files attached
|
||||||
await createAgent({
|
const agent = await createAgent({
|
||||||
id: agentId,
|
id: agentId,
|
||||||
name: 'Test Agent',
|
name: 'Test Agent',
|
||||||
author: authorId,
|
author: authorId,
|
||||||
model: 'gpt-4',
|
model: 'gpt-4',
|
||||||
provider: 'openai',
|
provider: 'openai',
|
||||||
isCollaborative: true,
|
|
||||||
tool_resources: {
|
tool_resources: {
|
||||||
file_search: {
|
file_search: {
|
||||||
file_ids: [fileIds[0], fileIds[1]],
|
file_ids: [fileIds[0], fileIds[1]],
|
||||||
@@ -68,15 +118,24 @@ describe('File Access Control', () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get or create global project
|
// Grant EDIT permission to user on the agent
|
||||||
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
|
await grantPermission({
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
// Share agent globally
|
principalId: userId,
|
||||||
await Agent.updateOne({ id: agentId }, { $push: { projectIds: globalProject._id } });
|
resourceType: ResourceType.AGENT,
|
||||||
|
resourceId: agent._id,
|
||||||
|
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||||
|
grantedBy: authorId,
|
||||||
|
});
|
||||||
|
|
||||||
// Check access for all files
|
// Check access for all files
|
||||||
const { hasAccessToFilesViaAgent } = require('./File');
|
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||||
const accessMap = await hasAccessToFilesViaAgent(userId, fileIds, agentId);
|
const accessMap = await hasAccessToFilesViaAgent({
|
||||||
|
userId: userId,
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
fileIds,
|
||||||
|
agentId: agent.id, // Use agent.id which is the custom UUID
|
||||||
|
});
|
||||||
|
|
||||||
// Should have access only to the first two files
|
// Should have access only to the first two files
|
||||||
expect(accessMap.get(fileIds[0])).toBe(true);
|
expect(accessMap.get(fileIds[0])).toBe(true);
|
||||||
@@ -86,10 +145,18 @@ describe('File Access Control', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should grant access to all files when user is the agent author', async () => {
|
it('should grant access to all files when user is the agent author', async () => {
|
||||||
const authorId = new mongoose.Types.ObjectId().toString();
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
const agentId = uuidv4();
|
const agentId = uuidv4();
|
||||||
const fileIds = [uuidv4(), uuidv4(), uuidv4()];
|
const fileIds = [uuidv4(), uuidv4(), uuidv4()];
|
||||||
|
|
||||||
|
// Create author user
|
||||||
|
await User.create({
|
||||||
|
_id: authorId,
|
||||||
|
email: 'author@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
// Create agent
|
// Create agent
|
||||||
await createAgent({
|
await createAgent({
|
||||||
id: agentId,
|
id: agentId,
|
||||||
@@ -105,8 +172,13 @@ describe('File Access Control', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Check access as the author
|
// Check access as the author
|
||||||
const { hasAccessToFilesViaAgent } = require('./File');
|
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||||
const accessMap = await hasAccessToFilesViaAgent(authorId, fileIds, agentId);
|
const accessMap = await hasAccessToFilesViaAgent({
|
||||||
|
userId: authorId,
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
fileIds,
|
||||||
|
agentId,
|
||||||
|
});
|
||||||
|
|
||||||
// Author should have access to all files
|
// Author should have access to all files
|
||||||
expect(accessMap.get(fileIds[0])).toBe(true);
|
expect(accessMap.get(fileIds[0])).toBe(true);
|
||||||
@@ -115,31 +187,58 @@ describe('File Access Control', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle non-existent agent gracefully', async () => {
|
it('should handle non-existent agent gracefully', async () => {
|
||||||
const userId = new mongoose.Types.ObjectId().toString();
|
const userId = new mongoose.Types.ObjectId();
|
||||||
const fileIds = [uuidv4(), uuidv4()];
|
const fileIds = [uuidv4(), uuidv4()];
|
||||||
|
|
||||||
const { hasAccessToFilesViaAgent } = require('./File');
|
// Create user
|
||||||
const accessMap = await hasAccessToFilesViaAgent(userId, fileIds, 'non-existent-agent');
|
await User.create({
|
||||||
|
_id: userId,
|
||||||
|
email: 'user@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
|
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||||
|
const accessMap = await hasAccessToFilesViaAgent({
|
||||||
|
userId: userId,
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
fileIds,
|
||||||
|
agentId: 'non-existent-agent',
|
||||||
|
});
|
||||||
|
|
||||||
// Should have no access to any files
|
// Should have no access to any files
|
||||||
expect(accessMap.get(fileIds[0])).toBe(false);
|
expect(accessMap.get(fileIds[0])).toBe(false);
|
||||||
expect(accessMap.get(fileIds[1])).toBe(false);
|
expect(accessMap.get(fileIds[1])).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should deny access when agent is not collaborative', async () => {
|
it('should deny access when user only has VIEW permission and needs access for deletion', async () => {
|
||||||
const userId = new mongoose.Types.ObjectId().toString();
|
const userId = new mongoose.Types.ObjectId();
|
||||||
const authorId = new mongoose.Types.ObjectId().toString();
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
const agentId = uuidv4();
|
const agentId = uuidv4();
|
||||||
const fileIds = [uuidv4(), uuidv4()];
|
const fileIds = [uuidv4(), uuidv4()];
|
||||||
|
|
||||||
// Create agent with files but isCollaborative: false
|
// Create users
|
||||||
await createAgent({
|
await User.create({
|
||||||
|
_id: userId,
|
||||||
|
email: 'user@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
|
await User.create({
|
||||||
|
_id: authorId,
|
||||||
|
email: 'author@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create agent with files
|
||||||
|
const agent = await createAgent({
|
||||||
id: agentId,
|
id: agentId,
|
||||||
name: 'Non-Collaborative Agent',
|
name: 'View-Only Agent',
|
||||||
author: authorId,
|
author: authorId,
|
||||||
model: 'gpt-4',
|
model: 'gpt-4',
|
||||||
provider: 'openai',
|
provider: 'openai',
|
||||||
isCollaborative: false,
|
|
||||||
tool_resources: {
|
tool_resources: {
|
||||||
file_search: {
|
file_search: {
|
||||||
file_ids: fileIds,
|
file_ids: fileIds,
|
||||||
@@ -147,20 +246,88 @@ describe('File Access Control', () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get or create global project
|
// Grant only VIEW permission to user on the agent
|
||||||
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
|
await grantPermission({
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
// Share agent globally
|
principalId: userId,
|
||||||
await Agent.updateOne({ id: agentId }, { $push: { projectIds: globalProject._id } });
|
resourceType: ResourceType.AGENT,
|
||||||
|
resourceId: agent._id,
|
||||||
|
accessRoleId: AccessRoleIds.AGENT_VIEWER,
|
||||||
|
grantedBy: authorId,
|
||||||
|
});
|
||||||
|
|
||||||
// Check access for files
|
// Check access for files
|
||||||
const { hasAccessToFilesViaAgent } = require('./File');
|
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||||
const accessMap = await hasAccessToFilesViaAgent(userId, fileIds, agentId);
|
const accessMap = await hasAccessToFilesViaAgent({
|
||||||
|
userId: userId,
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
fileIds,
|
||||||
|
agentId,
|
||||||
|
isDelete: true,
|
||||||
|
});
|
||||||
|
|
||||||
// Should have no access to any files when isCollaborative is false
|
// Should have no access to any files when only VIEW permission
|
||||||
expect(accessMap.get(fileIds[0])).toBe(false);
|
expect(accessMap.get(fileIds[0])).toBe(false);
|
||||||
expect(accessMap.get(fileIds[1])).toBe(false);
|
expect(accessMap.get(fileIds[1])).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should grant access when user has VIEW permission', async () => {
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
|
const agentId = uuidv4();
|
||||||
|
const fileIds = [uuidv4(), uuidv4()];
|
||||||
|
|
||||||
|
// Create users
|
||||||
|
await User.create({
|
||||||
|
_id: userId,
|
||||||
|
email: 'user@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
|
await User.create({
|
||||||
|
_id: authorId,
|
||||||
|
email: 'author@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create agent with files
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'View-Only Agent',
|
||||||
|
author: authorId,
|
||||||
|
model: 'gpt-4',
|
||||||
|
provider: 'openai',
|
||||||
|
tool_resources: {
|
||||||
|
file_search: {
|
||||||
|
file_ids: fileIds,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Grant only VIEW permission to user on the agent
|
||||||
|
await grantPermission({
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: userId,
|
||||||
|
resourceType: ResourceType.AGENT,
|
||||||
|
resourceId: agent._id,
|
||||||
|
accessRoleId: AccessRoleIds.AGENT_VIEWER,
|
||||||
|
grantedBy: authorId,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check access for files
|
||||||
|
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||||
|
const accessMap = await hasAccessToFilesViaAgent({
|
||||||
|
userId: userId,
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
fileIds,
|
||||||
|
agentId,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(accessMap.get(fileIds[0])).toBe(true);
|
||||||
|
expect(accessMap.get(fileIds[1])).toBe(true);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getFiles with agent access control', () => {
|
describe('getFiles with agent access control', () => {
|
||||||
@@ -172,18 +339,28 @@ describe('File Access Control', () => {
|
|||||||
const sharedFileId = `file_${uuidv4()}`;
|
const sharedFileId = `file_${uuidv4()}`;
|
||||||
const inaccessibleFileId = `file_${uuidv4()}`;
|
const inaccessibleFileId = `file_${uuidv4()}`;
|
||||||
|
|
||||||
// Create/get global project using getProjectByName which will upsert
|
// Create users
|
||||||
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME);
|
await User.create({
|
||||||
|
_id: userId,
|
||||||
|
email: 'user@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
|
await User.create({
|
||||||
|
_id: authorId,
|
||||||
|
email: 'author@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
// Create agent with shared file
|
// Create agent with shared file
|
||||||
await createAgent({
|
const agent = await createAgent({
|
||||||
id: agentId,
|
id: agentId,
|
||||||
name: 'Shared Agent',
|
name: 'Shared Agent',
|
||||||
provider: 'test',
|
provider: 'test',
|
||||||
model: 'test-model',
|
model: 'test-model',
|
||||||
author: authorId,
|
author: authorId,
|
||||||
projectIds: [globalProject._id],
|
|
||||||
isCollaborative: true,
|
|
||||||
tool_resources: {
|
tool_resources: {
|
||||||
file_search: {
|
file_search: {
|
||||||
file_ids: [sharedFileId],
|
file_ids: [sharedFileId],
|
||||||
@@ -191,6 +368,16 @@ describe('File Access Control', () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Grant EDIT permission to user on the agent
|
||||||
|
await grantPermission({
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: userId,
|
||||||
|
resourceType: ResourceType.AGENT,
|
||||||
|
resourceId: agent._id,
|
||||||
|
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||||
|
grantedBy: authorId,
|
||||||
|
});
|
||||||
|
|
||||||
// Create files
|
// Create files
|
||||||
await createFile({
|
await createFile({
|
||||||
file_id: ownedFileId,
|
file_id: ownedFileId,
|
||||||
@@ -220,14 +407,22 @@ describe('File Access Control', () => {
|
|||||||
bytes: 300,
|
bytes: 300,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get files with access control
|
// Get all files first
|
||||||
const files = await getFiles(
|
const allFiles = await getFiles(
|
||||||
{ file_id: { $in: [ownedFileId, sharedFileId, inaccessibleFileId] } },
|
{ file_id: { $in: [ownedFileId, sharedFileId, inaccessibleFileId] } },
|
||||||
null,
|
null,
|
||||||
{ text: 0 },
|
{ text: 0 },
|
||||||
{ userId: userId.toString(), agentId },
|
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Then filter by access control
|
||||||
|
const { filterFilesByAgentAccess } = require('~/server/services/Files/permissions');
|
||||||
|
const files = await filterFilesByAgentAccess({
|
||||||
|
files: allFiles,
|
||||||
|
userId: userId,
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
agentId,
|
||||||
|
});
|
||||||
|
|
||||||
expect(files).toHaveLength(2);
|
expect(files).toHaveLength(2);
|
||||||
expect(files.map((f) => f.file_id)).toContain(ownedFileId);
|
expect(files.map((f) => f.file_id)).toContain(ownedFileId);
|
||||||
expect(files.map((f) => f.file_id)).toContain(sharedFileId);
|
expect(files.map((f) => f.file_id)).toContain(sharedFileId);
|
||||||
@@ -261,4 +456,166 @@ describe('File Access Control', () => {
|
|||||||
expect(files).toHaveLength(2);
|
expect(files).toHaveLength(2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Role-based file permissions', () => {
|
||||||
|
it('should optimize permission checks when role is provided', async () => {
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
|
const agentId = uuidv4();
|
||||||
|
const fileIds = [uuidv4(), uuidv4()];
|
||||||
|
|
||||||
|
// Create users
|
||||||
|
await User.create({
|
||||||
|
_id: userId,
|
||||||
|
email: 'user@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
role: 'ADMIN', // User has ADMIN role
|
||||||
|
});
|
||||||
|
|
||||||
|
await User.create({
|
||||||
|
_id: authorId,
|
||||||
|
email: 'author@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create files
|
||||||
|
for (const fileId of fileIds) {
|
||||||
|
await createFile({
|
||||||
|
file_id: fileId,
|
||||||
|
user: authorId,
|
||||||
|
filename: `${fileId}.txt`,
|
||||||
|
filepath: `/uploads/${fileId}.txt`,
|
||||||
|
type: 'text/plain',
|
||||||
|
bytes: 100,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create agent with files
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Test Agent',
|
||||||
|
author: authorId,
|
||||||
|
model: 'gpt-4',
|
||||||
|
provider: 'openai',
|
||||||
|
tool_resources: {
|
||||||
|
file_search: {
|
||||||
|
file_ids: fileIds,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Grant permission to ADMIN role
|
||||||
|
await grantPermission({
|
||||||
|
principalType: PrincipalType.ROLE,
|
||||||
|
principalId: 'ADMIN',
|
||||||
|
resourceType: ResourceType.AGENT,
|
||||||
|
resourceId: agent._id,
|
||||||
|
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||||
|
grantedBy: authorId,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check access with role provided (should avoid DB query)
|
||||||
|
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||||
|
const accessMapWithRole = await hasAccessToFilesViaAgent({
|
||||||
|
userId: userId,
|
||||||
|
role: 'ADMIN',
|
||||||
|
fileIds,
|
||||||
|
agentId: agent.id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// User should have access through their ADMIN role
|
||||||
|
expect(accessMapWithRole.get(fileIds[0])).toBe(true);
|
||||||
|
expect(accessMapWithRole.get(fileIds[1])).toBe(true);
|
||||||
|
|
||||||
|
// Check access without role (will query DB to get user's role)
|
||||||
|
const accessMapWithoutRole = await hasAccessToFilesViaAgent({
|
||||||
|
userId: userId,
|
||||||
|
fileIds,
|
||||||
|
agentId: agent.id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should have same result
|
||||||
|
expect(accessMapWithoutRole.get(fileIds[0])).toBe(true);
|
||||||
|
expect(accessMapWithoutRole.get(fileIds[1])).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should deny access when user role changes', async () => {
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const authorId = new mongoose.Types.ObjectId();
|
||||||
|
const agentId = uuidv4();
|
||||||
|
const fileId = uuidv4();
|
||||||
|
|
||||||
|
// Create users
|
||||||
|
await User.create({
|
||||||
|
_id: userId,
|
||||||
|
email: 'user@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
role: 'EDITOR',
|
||||||
|
});
|
||||||
|
|
||||||
|
await User.create({
|
||||||
|
_id: authorId,
|
||||||
|
email: 'author@example.com',
|
||||||
|
emailVerified: true,
|
||||||
|
provider: 'local',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create file
|
||||||
|
await createFile({
|
||||||
|
file_id: fileId,
|
||||||
|
user: authorId,
|
||||||
|
filename: 'test.txt',
|
||||||
|
filepath: '/uploads/test.txt',
|
||||||
|
type: 'text/plain',
|
||||||
|
bytes: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create agent
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Test Agent',
|
||||||
|
author: authorId,
|
||||||
|
model: 'gpt-4',
|
||||||
|
provider: 'openai',
|
||||||
|
tool_resources: {
|
||||||
|
file_search: {
|
||||||
|
file_ids: [fileId],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Grant permission to EDITOR role only
|
||||||
|
await grantPermission({
|
||||||
|
principalType: PrincipalType.ROLE,
|
||||||
|
principalId: 'EDITOR',
|
||||||
|
resourceType: ResourceType.AGENT,
|
||||||
|
resourceId: agent._id,
|
||||||
|
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||||
|
grantedBy: authorId,
|
||||||
|
});
|
||||||
|
|
||||||
|
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||||
|
|
||||||
|
// Check with EDITOR role - should have access
|
||||||
|
const accessAsEditor = await hasAccessToFilesViaAgent({
|
||||||
|
userId: userId,
|
||||||
|
role: 'EDITOR',
|
||||||
|
fileIds: [fileId],
|
||||||
|
agentId: agent.id,
|
||||||
|
});
|
||||||
|
expect(accessAsEditor.get(fileId)).toBe(true);
|
||||||
|
|
||||||
|
// Simulate role change to USER - should lose access
|
||||||
|
const accessAsUser = await hasAccessToFilesViaAgent({
|
||||||
|
userId: userId,
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
fileIds: [fileId],
|
||||||
|
agentId: agent.id,
|
||||||
|
});
|
||||||
|
expect(accessAsUser.get(fileId)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { createTempChatExpirationDate } = require('@librechat/api');
|
const { createTempChatExpirationDate } = require('@librechat/api');
|
||||||
const { getCustomConfig } = require('~/server/services/Config/getCustomConfig');
|
|
||||||
const { Message } = require('~/db/models');
|
const { Message } = require('~/db/models');
|
||||||
|
|
||||||
const idSchema = z.string().uuid();
|
const idSchema = z.string().uuid();
|
||||||
@@ -11,7 +10,7 @@ const idSchema = z.string().uuid();
|
|||||||
*
|
*
|
||||||
* @async
|
* @async
|
||||||
* @function saveMessage
|
* @function saveMessage
|
||||||
* @param {Express.Request} req - The request object containing user information.
|
* @param {ServerRequest} req - The request object containing user information.
|
||||||
* @param {Object} params - The message data object.
|
* @param {Object} params - The message data object.
|
||||||
* @param {string} params.endpoint - The endpoint where the message originated.
|
* @param {string} params.endpoint - The endpoint where the message originated.
|
||||||
* @param {string} params.iconURL - The URL of the sender's icon.
|
* @param {string} params.iconURL - The URL of the sender's icon.
|
||||||
@@ -57,8 +56,8 @@ async function saveMessage(req, params, metadata) {
|
|||||||
|
|
||||||
if (req?.body?.isTemporary) {
|
if (req?.body?.isTemporary) {
|
||||||
try {
|
try {
|
||||||
const customConfig = await getCustomConfig();
|
const appConfig = req.config;
|
||||||
update.expiredAt = createTempChatExpirationDate(customConfig);
|
update.expiredAt = createTempChatExpirationDate(appConfig?.interfaceConfig);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('Error creating temporary chat expiration date:', err);
|
logger.error('Error creating temporary chat expiration date:', err);
|
||||||
logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
|
logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
|
||||||
|
|||||||
@@ -13,8 +13,7 @@ const {
|
|||||||
deleteMessagesSince,
|
deleteMessagesSince,
|
||||||
} = require('./Message');
|
} = require('./Message');
|
||||||
|
|
||||||
jest.mock('~/server/services/Config/getCustomConfig');
|
jest.mock('~/server/services/Config/app');
|
||||||
const { getCustomConfig } = require('~/server/services/Config/getCustomConfig');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @type {import('mongoose').Model<import('@librechat/data-schemas').IMessage>}
|
* @type {import('mongoose').Model<import('@librechat/data-schemas').IMessage>}
|
||||||
@@ -44,6 +43,11 @@ describe('Message Operations', () => {
|
|||||||
|
|
||||||
mockReq = {
|
mockReq = {
|
||||||
user: { id: 'user123' },
|
user: { id: 'user123' },
|
||||||
|
config: {
|
||||||
|
interfaceConfig: {
|
||||||
|
temporaryChatRetention: 24, // Default 24 hours
|
||||||
|
},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
mockMessageData = {
|
mockMessageData = {
|
||||||
@@ -326,12 +330,8 @@ describe('Message Operations', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should save a message with expiredAt when isTemporary is true', async () => {
|
it('should save a message with expiredAt when isTemporary is true', async () => {
|
||||||
// Mock custom config with 24 hour retention
|
// Mock app config with 24 hour retention
|
||||||
getCustomConfig.mockResolvedValue({
|
mockReq.config.interfaceConfig.temporaryChatRetention = 24;
|
||||||
interface: {
|
|
||||||
temporaryChatRetention: 24,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
@@ -375,12 +375,8 @@ describe('Message Operations', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should use custom retention period from config', async () => {
|
it('should use custom retention period from config', async () => {
|
||||||
// Mock custom config with 48 hour retention
|
// Mock app config with 48 hour retention
|
||||||
getCustomConfig.mockResolvedValue({
|
mockReq.config.interfaceConfig.temporaryChatRetention = 48;
|
||||||
interface: {
|
|
||||||
temporaryChatRetention: 48,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
@@ -402,12 +398,8 @@ describe('Message Operations', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle minimum retention period (1 hour)', async () => {
|
it('should handle minimum retention period (1 hour)', async () => {
|
||||||
// Mock custom config with less than minimum retention
|
// Mock app config with less than minimum retention
|
||||||
getCustomConfig.mockResolvedValue({
|
mockReq.config.interfaceConfig.temporaryChatRetention = 0.5; // Half hour - should be clamped to 1 hour
|
||||||
interface: {
|
|
||||||
temporaryChatRetention: 0.5, // Half hour - should be clamped to 1 hour
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
@@ -429,12 +421,8 @@ describe('Message Operations', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle maximum retention period (8760 hours)', async () => {
|
it('should handle maximum retention period (8760 hours)', async () => {
|
||||||
// Mock custom config with more than maximum retention
|
// Mock app config with more than maximum retention
|
||||||
getCustomConfig.mockResolvedValue({
|
mockReq.config.interfaceConfig.temporaryChatRetention = 10000; // Should be clamped to 8760 hours
|
||||||
interface: {
|
|
||||||
temporaryChatRetention: 10000, // Should be clamped to 8760 hours
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
@@ -455,22 +443,36 @@ describe('Message Operations', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle getCustomConfig errors gracefully', async () => {
|
it('should handle missing config gracefully', async () => {
|
||||||
// Mock getCustomConfig to throw an error
|
// Simulate missing config - should use default retention period
|
||||||
getCustomConfig.mockRejectedValue(new Error('Config service unavailable'));
|
delete mockReq.config;
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
|
const beforeSave = new Date();
|
||||||
const result = await saveMessage(mockReq, mockMessageData);
|
const result = await saveMessage(mockReq, mockMessageData);
|
||||||
|
const afterSave = new Date();
|
||||||
|
|
||||||
// Should still save the message but with expiredAt as null
|
// Should still save the message with default retention period (30 days)
|
||||||
expect(result.messageId).toBe('msg123');
|
expect(result.messageId).toBe('msg123');
|
||||||
expect(result.expiredAt).toBeNull();
|
expect(result.expiredAt).toBeDefined();
|
||||||
|
expect(result.expiredAt).toBeInstanceOf(Date);
|
||||||
|
|
||||||
|
// Verify expiredAt is approximately 30 days in the future (720 hours)
|
||||||
|
const expectedExpirationTime = new Date(beforeSave.getTime() + 720 * 60 * 60 * 1000);
|
||||||
|
const actualExpirationTime = new Date(result.expiredAt);
|
||||||
|
|
||||||
|
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||||
|
expectedExpirationTime.getTime() - 1000,
|
||||||
|
);
|
||||||
|
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||||
|
new Date(afterSave.getTime() + 720 * 60 * 60 * 1000 + 1000).getTime(),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use default retention when config is not provided', async () => {
|
it('should use default retention when config is not provided', async () => {
|
||||||
// Mock getCustomConfig to return empty config
|
// Mock getAppConfig to return empty config
|
||||||
getCustomConfig.mockResolvedValue({});
|
mockReq.config = {}; // Empty config
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
|
|
||||||
@@ -493,11 +495,7 @@ describe('Message Operations', () => {
|
|||||||
|
|
||||||
it('should not update expiredAt on message update', async () => {
|
it('should not update expiredAt on message update', async () => {
|
||||||
// First save a temporary message
|
// First save a temporary message
|
||||||
getCustomConfig.mockResolvedValue({
|
mockReq.config.interfaceConfig.temporaryChatRetention = 24;
|
||||||
interface: {
|
|
||||||
temporaryChatRetention: 24,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
const savedMessage = await saveMessage(mockReq, mockMessageData);
|
const savedMessage = await saveMessage(mockReq, mockMessageData);
|
||||||
@@ -520,11 +518,7 @@ describe('Message Operations', () => {
|
|||||||
|
|
||||||
it('should preserve expiredAt when saving existing temporary message', async () => {
|
it('should preserve expiredAt when saving existing temporary message', async () => {
|
||||||
// First save a temporary message
|
// First save a temporary message
|
||||||
getCustomConfig.mockResolvedValue({
|
mockReq.config.interfaceConfig.temporaryChatRetention = 24;
|
||||||
interface: {
|
|
||||||
temporaryChatRetention: 24,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockReq.body = { isTemporary: true };
|
mockReq.body = { isTemporary: true };
|
||||||
const firstSave = await saveMessage(mockReq, mockMessageData);
|
const firstSave = await saveMessage(mockReq, mockMessageData);
|
||||||
|
|||||||
@@ -1,12 +1,18 @@
|
|||||||
const { ObjectId } = require('mongodb');
|
const { ObjectId } = require('mongodb');
|
||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
|
|
||||||
const {
|
const {
|
||||||
getProjectByName,
|
Constants,
|
||||||
addGroupIdsToProject,
|
SystemRoles,
|
||||||
removeGroupIdsFromProject,
|
ResourceType,
|
||||||
|
SystemCategories,
|
||||||
|
} = require('librechat-data-provider');
|
||||||
|
const {
|
||||||
removeGroupFromAllProjects,
|
removeGroupFromAllProjects,
|
||||||
|
removeGroupIdsFromProject,
|
||||||
|
addGroupIdsToProject,
|
||||||
|
getProjectByName,
|
||||||
} = require('./Project');
|
} = require('./Project');
|
||||||
|
const { removeAllPermissions } = require('~/server/services/PermissionService');
|
||||||
const { PromptGroup, Prompt } = require('~/db/models');
|
const { PromptGroup, Prompt } = require('~/db/models');
|
||||||
const { escapeRegExp } = require('~/server/utils');
|
const { escapeRegExp } = require('~/server/utils');
|
||||||
|
|
||||||
@@ -100,10 +106,6 @@ const getAllPromptGroups = async (req, filter) => {
|
|||||||
try {
|
try {
|
||||||
const { name, ...query } = filter;
|
const { name, ...query } = filter;
|
||||||
|
|
||||||
if (!query.author) {
|
|
||||||
throw new Error('Author is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
let searchShared = true;
|
let searchShared = true;
|
||||||
let searchSharedOnly = false;
|
let searchSharedOnly = false;
|
||||||
if (name) {
|
if (name) {
|
||||||
@@ -153,10 +155,6 @@ const getPromptGroups = async (req, filter) => {
|
|||||||
const validatedPageNumber = Math.max(parseInt(pageNumber, 10), 1);
|
const validatedPageNumber = Math.max(parseInt(pageNumber, 10), 1);
|
||||||
const validatedPageSize = Math.max(parseInt(pageSize, 10), 1);
|
const validatedPageSize = Math.max(parseInt(pageSize, 10), 1);
|
||||||
|
|
||||||
if (!query.author) {
|
|
||||||
throw new Error('Author is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
let searchShared = true;
|
let searchShared = true;
|
||||||
let searchSharedOnly = false;
|
let searchSharedOnly = false;
|
||||||
if (name) {
|
if (name) {
|
||||||
@@ -221,12 +219,16 @@ const getPromptGroups = async (req, filter) => {
|
|||||||
* @returns {Promise<TDeletePromptGroupResponse>}
|
* @returns {Promise<TDeletePromptGroupResponse>}
|
||||||
*/
|
*/
|
||||||
const deletePromptGroup = async ({ _id, author, role }) => {
|
const deletePromptGroup = async ({ _id, author, role }) => {
|
||||||
const query = { _id, author };
|
// Build query - with ACL, author is optional
|
||||||
const groupQuery = { groupId: new ObjectId(_id), author };
|
const query = { _id };
|
||||||
if (role === SystemRoles.ADMIN) {
|
const groupQuery = { groupId: new ObjectId(_id) };
|
||||||
delete query.author;
|
|
||||||
delete groupQuery.author;
|
// Legacy: Add author filter if provided (backward compatibility)
|
||||||
|
if (author && role !== SystemRoles.ADMIN) {
|
||||||
|
query.author = author;
|
||||||
|
groupQuery.author = author;
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await PromptGroup.deleteOne(query);
|
const response = await PromptGroup.deleteOne(query);
|
||||||
|
|
||||||
if (!response || response.deletedCount === 0) {
|
if (!response || response.deletedCount === 0) {
|
||||||
@@ -235,13 +237,140 @@ const deletePromptGroup = async ({ _id, author, role }) => {
|
|||||||
|
|
||||||
await Prompt.deleteMany(groupQuery);
|
await Prompt.deleteMany(groupQuery);
|
||||||
await removeGroupFromAllProjects(_id);
|
await removeGroupFromAllProjects(_id);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await removeAllPermissions({ resourceType: ResourceType.PROMPTGROUP, resourceId: _id });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error removing promptGroup permissions:', error);
|
||||||
|
}
|
||||||
|
|
||||||
return { message: 'Prompt group deleted successfully' };
|
return { message: 'Prompt group deleted successfully' };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get prompt groups by accessible IDs with optional cursor-based pagination.
|
||||||
|
* @param {Object} params - The parameters for getting accessible prompt groups.
|
||||||
|
* @param {Array} [params.accessibleIds] - Array of prompt group ObjectIds the user has ACL access to.
|
||||||
|
* @param {Object} [params.otherParams] - Additional query parameters (including author filter).
|
||||||
|
* @param {number} [params.limit] - Number of prompt groups to return (max 100). If not provided, returns all prompt groups.
|
||||||
|
* @param {string} [params.after] - Cursor for pagination - get prompt groups after this cursor. // base64 encoded JSON string with updatedAt and _id.
|
||||||
|
* @returns {Promise<Object>} A promise that resolves to an object containing the prompt groups data and pagination info.
|
||||||
|
*/
|
||||||
|
async function getListPromptGroupsByAccess({
|
||||||
|
accessibleIds = [],
|
||||||
|
otherParams = {},
|
||||||
|
limit = null,
|
||||||
|
after = null,
|
||||||
|
}) {
|
||||||
|
const isPaginated = limit !== null && limit !== undefined;
|
||||||
|
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
|
||||||
|
|
||||||
|
// Build base query combining ACL accessible prompt groups with other filters
|
||||||
|
const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
|
||||||
|
|
||||||
|
// Add cursor condition
|
||||||
|
if (after && typeof after === 'string' && after !== 'undefined' && after !== 'null') {
|
||||||
|
try {
|
||||||
|
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
|
||||||
|
const { updatedAt, _id } = cursor;
|
||||||
|
|
||||||
|
const cursorCondition = {
|
||||||
|
$or: [
|
||||||
|
{ updatedAt: { $lt: new Date(updatedAt) } },
|
||||||
|
{ updatedAt: new Date(updatedAt), _id: { $gt: new ObjectId(_id) } },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Merge cursor condition with base query
|
||||||
|
if (Object.keys(baseQuery).length > 0) {
|
||||||
|
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
|
||||||
|
// Remove the original conditions from baseQuery to avoid duplication
|
||||||
|
Object.keys(baseQuery).forEach((key) => {
|
||||||
|
if (key !== '$and') delete baseQuery[key];
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
Object.assign(baseQuery, cursorCondition);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Invalid cursor:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build aggregation pipeline
|
||||||
|
const pipeline = [{ $match: baseQuery }, { $sort: { updatedAt: -1, _id: 1 } }];
|
||||||
|
|
||||||
|
// Only apply limit if pagination is requested
|
||||||
|
if (isPaginated) {
|
||||||
|
pipeline.push({ $limit: normalizedLimit + 1 });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add lookup for production prompt
|
||||||
|
pipeline.push(
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'prompts',
|
||||||
|
localField: 'productionId',
|
||||||
|
foreignField: '_id',
|
||||||
|
as: 'productionPrompt',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{ $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
|
||||||
|
{
|
||||||
|
$project: {
|
||||||
|
name: 1,
|
||||||
|
numberOfGenerations: 1,
|
||||||
|
oneliner: 1,
|
||||||
|
category: 1,
|
||||||
|
projectIds: 1,
|
||||||
|
productionId: 1,
|
||||||
|
author: 1,
|
||||||
|
authorName: 1,
|
||||||
|
createdAt: 1,
|
||||||
|
updatedAt: 1,
|
||||||
|
'productionPrompt.prompt': 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const promptGroups = await PromptGroup.aggregate(pipeline).exec();
|
||||||
|
|
||||||
|
const hasMore = isPaginated ? promptGroups.length > normalizedLimit : false;
|
||||||
|
const data = (isPaginated ? promptGroups.slice(0, normalizedLimit) : promptGroups).map(
|
||||||
|
(group) => {
|
||||||
|
if (group.author) {
|
||||||
|
group.author = group.author.toString();
|
||||||
|
}
|
||||||
|
return group;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Generate next cursor only if paginated
|
||||||
|
let nextCursor = null;
|
||||||
|
if (isPaginated && hasMore && data.length > 0) {
|
||||||
|
const lastGroup = promptGroups[normalizedLimit - 1];
|
||||||
|
nextCursor = Buffer.from(
|
||||||
|
JSON.stringify({
|
||||||
|
updatedAt: lastGroup.updatedAt.toISOString(),
|
||||||
|
_id: lastGroup._id.toString(),
|
||||||
|
}),
|
||||||
|
).toString('base64');
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
object: 'list',
|
||||||
|
data,
|
||||||
|
first_id: data.length > 0 ? data[0]._id.toString() : null,
|
||||||
|
last_id: data.length > 0 ? data[data.length - 1]._id.toString() : null,
|
||||||
|
has_more: hasMore,
|
||||||
|
after: nextCursor,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getPromptGroups,
|
getPromptGroups,
|
||||||
deletePromptGroup,
|
deletePromptGroup,
|
||||||
getAllPromptGroups,
|
getAllPromptGroups,
|
||||||
|
getListPromptGroupsByAccess,
|
||||||
/**
|
/**
|
||||||
* Create a prompt and its respective group
|
* Create a prompt and its respective group
|
||||||
* @param {TCreatePromptRecord} saveData
|
* @param {TCreatePromptRecord} saveData
|
||||||
@@ -430,6 +559,16 @@ module.exports = {
|
|||||||
.lean();
|
.lean();
|
||||||
|
|
||||||
if (remainingPrompts.length === 0) {
|
if (remainingPrompts.length === 0) {
|
||||||
|
// Remove all ACL entries for the promptGroup when deleting the last prompt
|
||||||
|
try {
|
||||||
|
await removeAllPermissions({
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: groupId,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error removing promptGroup permissions:', error);
|
||||||
|
}
|
||||||
|
|
||||||
await PromptGroup.deleteOne({ _id: groupId });
|
await PromptGroup.deleteOne({ _id: groupId });
|
||||||
await removeGroupFromAllProjects(groupId);
|
await removeGroupFromAllProjects(groupId);
|
||||||
|
|
||||||
|
|||||||
564
api/models/Prompt.spec.js
Normal file
564
api/models/Prompt.spec.js
Normal file
@@ -0,0 +1,564 @@
|
|||||||
|
const mongoose = require('mongoose');
|
||||||
|
const { ObjectId } = require('mongodb');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
|
const {
|
||||||
|
SystemRoles,
|
||||||
|
ResourceType,
|
||||||
|
AccessRoleIds,
|
||||||
|
PrincipalType,
|
||||||
|
PermissionBits,
|
||||||
|
} = require('librechat-data-provider');
|
||||||
|
|
||||||
|
// Mock the config/connect module to prevent connection attempts during tests
|
||||||
|
jest.mock('../../config/connect', () => jest.fn().mockResolvedValue(true));
|
||||||
|
|
||||||
|
const dbModels = require('~/db/models');
|
||||||
|
|
||||||
|
// Disable console for tests
|
||||||
|
logger.silent = true;
|
||||||
|
|
||||||
|
let mongoServer;
|
||||||
|
let Prompt, PromptGroup, AclEntry, AccessRole, User, Group, Project;
|
||||||
|
let promptFns, permissionService;
|
||||||
|
let testUsers, testGroups, testRoles;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
// Set up MongoDB memory server
|
||||||
|
mongoServer = await MongoMemoryServer.create();
|
||||||
|
const mongoUri = mongoServer.getUri();
|
||||||
|
await mongoose.connect(mongoUri);
|
||||||
|
|
||||||
|
// Initialize models
|
||||||
|
Prompt = dbModels.Prompt;
|
||||||
|
PromptGroup = dbModels.PromptGroup;
|
||||||
|
AclEntry = dbModels.AclEntry;
|
||||||
|
AccessRole = dbModels.AccessRole;
|
||||||
|
User = dbModels.User;
|
||||||
|
Group = dbModels.Group;
|
||||||
|
Project = dbModels.Project;
|
||||||
|
|
||||||
|
promptFns = require('~/models/Prompt');
|
||||||
|
permissionService = require('~/server/services/PermissionService');
|
||||||
|
|
||||||
|
// Create test data
|
||||||
|
await setupTestData();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await mongoose.disconnect();
|
||||||
|
await mongoServer.stop();
|
||||||
|
jest.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
async function setupTestData() {
|
||||||
|
// Create access roles for promptGroups
|
||||||
|
testRoles = {
|
||||||
|
viewer: await AccessRole.create({
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||||
|
name: 'Viewer',
|
||||||
|
description: 'Can view promptGroups',
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
permBits: PermissionBits.VIEW,
|
||||||
|
}),
|
||||||
|
editor: await AccessRole.create({
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
|
||||||
|
name: 'Editor',
|
||||||
|
description: 'Can view and edit promptGroups',
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
permBits: PermissionBits.VIEW | PermissionBits.EDIT,
|
||||||
|
}),
|
||||||
|
owner: await AccessRole.create({
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||||
|
name: 'Owner',
|
||||||
|
description: 'Full control over promptGroups',
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
permBits:
|
||||||
|
PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create test users
|
||||||
|
testUsers = {
|
||||||
|
owner: await User.create({
|
||||||
|
name: 'Prompt Owner',
|
||||||
|
email: 'owner@example.com',
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
}),
|
||||||
|
editor: await User.create({
|
||||||
|
name: 'Prompt Editor',
|
||||||
|
email: 'editor@example.com',
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
}),
|
||||||
|
viewer: await User.create({
|
||||||
|
name: 'Prompt Viewer',
|
||||||
|
email: 'viewer@example.com',
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
}),
|
||||||
|
admin: await User.create({
|
||||||
|
name: 'Admin User',
|
||||||
|
email: 'admin@example.com',
|
||||||
|
role: SystemRoles.ADMIN,
|
||||||
|
}),
|
||||||
|
noAccess: await User.create({
|
||||||
|
name: 'No Access User',
|
||||||
|
email: 'noaccess@example.com',
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create test groups
|
||||||
|
testGroups = {
|
||||||
|
editors: await Group.create({
|
||||||
|
name: 'Prompt Editors',
|
||||||
|
description: 'Group with editor access',
|
||||||
|
}),
|
||||||
|
viewers: await Group.create({
|
||||||
|
name: 'Prompt Viewers',
|
||||||
|
description: 'Group with viewer access',
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
await Project.create({
|
||||||
|
name: 'Global',
|
||||||
|
description: 'Global project',
|
||||||
|
promptGroupIds: [],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Prompt ACL Permissions', () => {
|
||||||
|
describe('Creating Prompts with Permissions', () => {
|
||||||
|
it('should grant owner permissions when creating a prompt', async () => {
|
||||||
|
// First create a group
|
||||||
|
const testGroup = await PromptGroup.create({
|
||||||
|
name: 'Test Group',
|
||||||
|
category: 'testing',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
authorName: testUsers.owner.name,
|
||||||
|
productionId: new mongoose.Types.ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const promptData = {
|
||||||
|
prompt: {
|
||||||
|
prompt: 'Test prompt content',
|
||||||
|
name: 'Test Prompt',
|
||||||
|
type: 'text',
|
||||||
|
groupId: testGroup._id,
|
||||||
|
},
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
};
|
||||||
|
|
||||||
|
await promptFns.savePrompt(promptData);
|
||||||
|
|
||||||
|
// Manually grant permissions as would happen in the route
|
||||||
|
await permissionService.grantPermission({
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: testUsers.owner._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testGroup._id,
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||||
|
grantedBy: testUsers.owner._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check ACL entry
|
||||||
|
const aclEntry = await AclEntry.findOne({
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testGroup._id,
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: testUsers.owner._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(aclEntry).toBeTruthy();
|
||||||
|
expect(aclEntry.permBits).toBe(testRoles.owner.permBits);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Accessing Prompts', () => {
|
||||||
|
let testPromptGroup;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Create a prompt group
|
||||||
|
testPromptGroup = await PromptGroup.create({
|
||||||
|
name: 'Test Group',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
authorName: testUsers.owner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a prompt
|
||||||
|
await Prompt.create({
|
||||||
|
prompt: 'Test prompt for access control',
|
||||||
|
name: 'Access Test Prompt',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
groupId: testPromptGroup._id,
|
||||||
|
type: 'text',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Grant owner permissions
|
||||||
|
await permissionService.grantPermission({
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: testUsers.owner._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||||
|
grantedBy: testUsers.owner._id,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await Prompt.deleteMany({});
|
||||||
|
await PromptGroup.deleteMany({});
|
||||||
|
await AclEntry.deleteMany({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('owner should have full access to their prompt', async () => {
|
||||||
|
const hasAccess = await permissionService.checkPermission({
|
||||||
|
userId: testUsers.owner._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
requiredPermission: PermissionBits.VIEW,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(hasAccess).toBe(true);
|
||||||
|
|
||||||
|
const canEdit = await permissionService.checkPermission({
|
||||||
|
userId: testUsers.owner._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
requiredPermission: PermissionBits.EDIT,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(canEdit).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('user with viewer role should only have view access', async () => {
|
||||||
|
// Grant viewer permissions
|
||||||
|
await permissionService.grantPermission({
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: testUsers.viewer._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||||
|
grantedBy: testUsers.owner._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
const canView = await permissionService.checkPermission({
|
||||||
|
userId: testUsers.viewer._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
requiredPermission: PermissionBits.VIEW,
|
||||||
|
});
|
||||||
|
|
||||||
|
const canEdit = await permissionService.checkPermission({
|
||||||
|
userId: testUsers.viewer._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
requiredPermission: PermissionBits.EDIT,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(canView).toBe(true);
|
||||||
|
expect(canEdit).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('user without permissions should have no access', async () => {
|
||||||
|
const hasAccess = await permissionService.checkPermission({
|
||||||
|
userId: testUsers.noAccess._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
requiredPermission: PermissionBits.VIEW,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(hasAccess).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('admin should have access regardless of permissions', async () => {
|
||||||
|
// Admin users should work through normal permission system
|
||||||
|
// The middleware layer handles admin bypass, not the permission service
|
||||||
|
const hasAccess = await permissionService.checkPermission({
|
||||||
|
userId: testUsers.admin._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
requiredPermission: PermissionBits.VIEW,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Without explicit permissions, even admin won't have access at this layer
|
||||||
|
expect(hasAccess).toBe(false);
|
||||||
|
|
||||||
|
// The actual admin bypass happens in the middleware layer (`canAccessPromptViaGroup`/`canAccessPromptGroupResource`)
|
||||||
|
// which checks req.user.role === SystemRoles.ADMIN
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Group-based Access', () => {
|
||||||
|
let testPromptGroup;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Create a prompt group first
|
||||||
|
testPromptGroup = await PromptGroup.create({
|
||||||
|
name: 'Group Access Test Group',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
authorName: testUsers.owner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await Prompt.create({
|
||||||
|
prompt: 'Group access test prompt',
|
||||||
|
name: 'Group Test',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
groupId: testPromptGroup._id,
|
||||||
|
type: 'text',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add users to groups
|
||||||
|
await User.findByIdAndUpdate(testUsers.editor._id, {
|
||||||
|
$push: { groups: testGroups.editors._id },
|
||||||
|
});
|
||||||
|
|
||||||
|
await User.findByIdAndUpdate(testUsers.viewer._id, {
|
||||||
|
$push: { groups: testGroups.viewers._id },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await Prompt.deleteMany({});
|
||||||
|
await AclEntry.deleteMany({});
|
||||||
|
await User.updateMany({}, { $set: { groups: [] } });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('group members should inherit group permissions', async () => {
|
||||||
|
// Create a prompt group
|
||||||
|
const testPromptGroup = await PromptGroup.create({
|
||||||
|
name: 'Group Test Group',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
authorName: testUsers.owner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const { addUserToGroup } = require('~/models');
|
||||||
|
await addUserToGroup(testUsers.editor._id, testGroups.editors._id);
|
||||||
|
|
||||||
|
const prompt = await promptFns.savePrompt({
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
prompt: {
|
||||||
|
prompt: 'Group test prompt',
|
||||||
|
name: 'Group Test',
|
||||||
|
groupId: testPromptGroup._id,
|
||||||
|
type: 'text',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if savePrompt returned an error
|
||||||
|
if (!prompt || !prompt.prompt) {
|
||||||
|
throw new Error(`Failed to save prompt: ${prompt?.message || 'Unknown error'}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Grant edit permissions to the group
|
||||||
|
await permissionService.grantPermission({
|
||||||
|
principalType: PrincipalType.GROUP,
|
||||||
|
principalId: testGroups.editors._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
|
||||||
|
grantedBy: testUsers.owner._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if group member has access
|
||||||
|
const hasAccess = await permissionService.checkPermission({
|
||||||
|
userId: testUsers.editor._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
requiredPermission: PermissionBits.EDIT,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(hasAccess).toBe(true);
|
||||||
|
|
||||||
|
// Check that non-member doesn't have access
|
||||||
|
const nonMemberAccess = await permissionService.checkPermission({
|
||||||
|
userId: testUsers.viewer._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
requiredPermission: PermissionBits.EDIT,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(nonMemberAccess).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Public Access', () => {
|
||||||
|
let publicPromptGroup, privatePromptGroup;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Create separate prompt groups for public and private access
|
||||||
|
publicPromptGroup = await PromptGroup.create({
|
||||||
|
name: 'Public Access Test Group',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
authorName: testUsers.owner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
privatePromptGroup = await PromptGroup.create({
|
||||||
|
name: 'Private Access Test Group',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
authorName: testUsers.owner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create prompts in their respective groups
|
||||||
|
await Prompt.create({
|
||||||
|
prompt: 'Public prompt',
|
||||||
|
name: 'Public',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
groupId: publicPromptGroup._id,
|
||||||
|
type: 'text',
|
||||||
|
});
|
||||||
|
|
||||||
|
await Prompt.create({
|
||||||
|
prompt: 'Private prompt',
|
||||||
|
name: 'Private',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
groupId: privatePromptGroup._id,
|
||||||
|
type: 'text',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Grant public view access to publicPromptGroup
|
||||||
|
await permissionService.grantPermission({
|
||||||
|
principalType: PrincipalType.PUBLIC,
|
||||||
|
principalId: null,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: publicPromptGroup._id,
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||||
|
grantedBy: testUsers.owner._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Grant only owner access to privatePromptGroup
|
||||||
|
await permissionService.grantPermission({
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: testUsers.owner._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: privatePromptGroup._id,
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||||
|
grantedBy: testUsers.owner._id,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await Prompt.deleteMany({});
|
||||||
|
await PromptGroup.deleteMany({});
|
||||||
|
await AclEntry.deleteMany({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('public prompt should be accessible to any user', async () => {
|
||||||
|
const hasAccess = await permissionService.checkPermission({
|
||||||
|
userId: testUsers.noAccess._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: publicPromptGroup._id,
|
||||||
|
requiredPermission: PermissionBits.VIEW,
|
||||||
|
includePublic: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(hasAccess).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('private prompt should not be accessible to unauthorized users', async () => {
|
||||||
|
const hasAccess = await permissionService.checkPermission({
|
||||||
|
userId: testUsers.noAccess._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: privatePromptGroup._id,
|
||||||
|
requiredPermission: PermissionBits.VIEW,
|
||||||
|
includePublic: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(hasAccess).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Prompt Deletion', () => {
|
||||||
|
let testPromptGroup;
|
||||||
|
|
||||||
|
it('should remove ACL entries when prompt is deleted', async () => {
|
||||||
|
testPromptGroup = await PromptGroup.create({
|
||||||
|
name: 'Deletion Test Group',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
authorName: testUsers.owner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const prompt = await promptFns.savePrompt({
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
prompt: {
|
||||||
|
prompt: 'To be deleted',
|
||||||
|
name: 'Delete Test',
|
||||||
|
groupId: testPromptGroup._id,
|
||||||
|
type: 'text',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if savePrompt returned an error
|
||||||
|
if (!prompt || !prompt.prompt) {
|
||||||
|
throw new Error(`Failed to save prompt: ${prompt?.message || 'Unknown error'}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const testPromptId = prompt.prompt._id;
|
||||||
|
const promptGroupId = testPromptGroup._id;
|
||||||
|
|
||||||
|
// Grant permission
|
||||||
|
await permissionService.grantPermission({
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: testUsers.owner._id,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||||
|
grantedBy: testUsers.owner._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify ACL entry exists
|
||||||
|
const beforeDelete = await AclEntry.find({
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
});
|
||||||
|
expect(beforeDelete).toHaveLength(1);
|
||||||
|
|
||||||
|
// Delete the prompt
|
||||||
|
await promptFns.deletePrompt({
|
||||||
|
promptId: testPromptId,
|
||||||
|
groupId: promptGroupId,
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
role: SystemRoles.USER,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify ACL entries are removed
|
||||||
|
const aclEntries = await AclEntry.find({
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: testPromptGroup._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(aclEntries).toHaveLength(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Backwards Compatibility', () => {
|
||||||
|
it('should handle prompts without ACL entries gracefully', async () => {
|
||||||
|
// Create a prompt group first
|
||||||
|
const promptGroup = await PromptGroup.create({
|
||||||
|
name: 'Legacy Test Group',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
authorName: testUsers.owner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a prompt without ACL entries (legacy prompt)
|
||||||
|
const legacyPrompt = await Prompt.create({
|
||||||
|
prompt: 'Legacy prompt without ACL',
|
||||||
|
name: 'Legacy',
|
||||||
|
author: testUsers.owner._id,
|
||||||
|
groupId: promptGroup._id,
|
||||||
|
type: 'text',
|
||||||
|
});
|
||||||
|
|
||||||
|
// The system should handle this gracefully
|
||||||
|
const prompt = await promptFns.getPrompt({ _id: legacyPrompt._id });
|
||||||
|
expect(prompt).toBeTruthy();
|
||||||
|
expect(prompt._id.toString()).toBe(legacyPrompt._id.toString());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
280
api/models/PromptGroupMigration.spec.js
Normal file
280
api/models/PromptGroupMigration.spec.js
Normal file
@@ -0,0 +1,280 @@
|
|||||||
|
const mongoose = require('mongoose');
|
||||||
|
const { ObjectId } = require('mongodb');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
|
const {
|
||||||
|
Constants,
|
||||||
|
ResourceType,
|
||||||
|
AccessRoleIds,
|
||||||
|
PrincipalType,
|
||||||
|
PrincipalModel,
|
||||||
|
PermissionBits,
|
||||||
|
} = require('librechat-data-provider');
|
||||||
|
|
||||||
|
// Mock the config/connect module to prevent connection attempts during tests
|
||||||
|
jest.mock('../../config/connect', () => jest.fn().mockResolvedValue(true));
|
||||||
|
|
||||||
|
// Disable console for tests
|
||||||
|
logger.silent = true;
|
||||||
|
|
||||||
|
describe('PromptGroup Migration Script', () => {
|
||||||
|
let mongoServer;
|
||||||
|
let Prompt, PromptGroup, AclEntry, AccessRole, User, Project;
|
||||||
|
let migrateToPromptGroupPermissions;
|
||||||
|
let testOwner, testProject;
|
||||||
|
let ownerRole, viewerRole;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
// Set up MongoDB memory server
|
||||||
|
mongoServer = await MongoMemoryServer.create();
|
||||||
|
const mongoUri = mongoServer.getUri();
|
||||||
|
await mongoose.connect(mongoUri);
|
||||||
|
|
||||||
|
// Initialize models
|
||||||
|
const dbModels = require('~/db/models');
|
||||||
|
Prompt = dbModels.Prompt;
|
||||||
|
PromptGroup = dbModels.PromptGroup;
|
||||||
|
AclEntry = dbModels.AclEntry;
|
||||||
|
AccessRole = dbModels.AccessRole;
|
||||||
|
User = dbModels.User;
|
||||||
|
Project = dbModels.Project;
|
||||||
|
|
||||||
|
// Create test user
|
||||||
|
testOwner = await User.create({
|
||||||
|
name: 'Test Owner',
|
||||||
|
email: 'owner@test.com',
|
||||||
|
role: 'USER',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create test project with the proper name
|
||||||
|
const projectName = Constants.GLOBAL_PROJECT_NAME || 'instance';
|
||||||
|
testProject = await Project.create({
|
||||||
|
name: projectName,
|
||||||
|
description: 'Global project',
|
||||||
|
promptGroupIds: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create promptGroup access roles
|
||||||
|
ownerRole = await AccessRole.create({
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||||
|
name: 'Owner',
|
||||||
|
description: 'Full control over promptGroups',
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
permBits:
|
||||||
|
PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE,
|
||||||
|
});
|
||||||
|
|
||||||
|
viewerRole = await AccessRole.create({
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||||
|
name: 'Viewer',
|
||||||
|
description: 'Can view promptGroups',
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
permBits: PermissionBits.VIEW,
|
||||||
|
});
|
||||||
|
|
||||||
|
await AccessRole.create({
|
||||||
|
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
|
||||||
|
name: 'Editor',
|
||||||
|
description: 'Can view and edit promptGroups',
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
permBits: PermissionBits.VIEW | PermissionBits.EDIT,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Import migration function
|
||||||
|
const migration = require('../../config/migrate-prompt-permissions');
|
||||||
|
migrateToPromptGroupPermissions = migration.migrateToPromptGroupPermissions;
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await mongoose.disconnect();
|
||||||
|
await mongoServer.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Clean up before each test
|
||||||
|
await Prompt.deleteMany({});
|
||||||
|
await PromptGroup.deleteMany({});
|
||||||
|
await AclEntry.deleteMany({});
|
||||||
|
// Reset the project's promptGroupIds array
|
||||||
|
testProject.promptGroupIds = [];
|
||||||
|
await testProject.save();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should categorize promptGroups correctly in dry run', async () => {
|
||||||
|
// Create global prompt group (in Global project)
|
||||||
|
const globalPromptGroup = await PromptGroup.create({
|
||||||
|
name: 'Global Group',
|
||||||
|
author: testOwner._id,
|
||||||
|
authorName: testOwner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create private prompt group (not in any project)
|
||||||
|
await PromptGroup.create({
|
||||||
|
name: 'Private Group',
|
||||||
|
author: testOwner._id,
|
||||||
|
authorName: testOwner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add global group to project's promptGroupIds array
|
||||||
|
testProject.promptGroupIds = [globalPromptGroup._id];
|
||||||
|
await testProject.save();
|
||||||
|
|
||||||
|
const result = await migrateToPromptGroupPermissions({ dryRun: true });
|
||||||
|
|
||||||
|
expect(result.dryRun).toBe(true);
|
||||||
|
expect(result.summary.total).toBe(2);
|
||||||
|
expect(result.summary.globalViewAccess).toBe(1);
|
||||||
|
expect(result.summary.privateGroups).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should grant appropriate permissions during migration', async () => {
|
||||||
|
// Create prompt groups
|
||||||
|
const globalPromptGroup = await PromptGroup.create({
|
||||||
|
name: 'Global Group',
|
||||||
|
author: testOwner._id,
|
||||||
|
authorName: testOwner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const privatePromptGroup = await PromptGroup.create({
|
||||||
|
name: 'Private Group',
|
||||||
|
author: testOwner._id,
|
||||||
|
authorName: testOwner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add global group to project's promptGroupIds array
|
||||||
|
testProject.promptGroupIds = [globalPromptGroup._id];
|
||||||
|
await testProject.save();
|
||||||
|
|
||||||
|
const result = await migrateToPromptGroupPermissions({ dryRun: false });
|
||||||
|
|
||||||
|
expect(result.migrated).toBe(2);
|
||||||
|
expect(result.errors).toBe(0);
|
||||||
|
expect(result.ownerGrants).toBe(2);
|
||||||
|
expect(result.publicViewGrants).toBe(1);
|
||||||
|
|
||||||
|
// Check global promptGroup permissions
|
||||||
|
const globalOwnerEntry = await AclEntry.findOne({
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: globalPromptGroup._id,
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: testOwner._id,
|
||||||
|
});
|
||||||
|
expect(globalOwnerEntry).toBeTruthy();
|
||||||
|
expect(globalOwnerEntry.permBits).toBe(ownerRole.permBits);
|
||||||
|
|
||||||
|
const globalPublicEntry = await AclEntry.findOne({
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: globalPromptGroup._id,
|
||||||
|
principalType: PrincipalType.PUBLIC,
|
||||||
|
});
|
||||||
|
expect(globalPublicEntry).toBeTruthy();
|
||||||
|
expect(globalPublicEntry.permBits).toBe(viewerRole.permBits);
|
||||||
|
|
||||||
|
// Check private promptGroup permissions
|
||||||
|
const privateOwnerEntry = await AclEntry.findOne({
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: privatePromptGroup._id,
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: testOwner._id,
|
||||||
|
});
|
||||||
|
expect(privateOwnerEntry).toBeTruthy();
|
||||||
|
expect(privateOwnerEntry.permBits).toBe(ownerRole.permBits);
|
||||||
|
|
||||||
|
const privatePublicEntry = await AclEntry.findOne({
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: privatePromptGroup._id,
|
||||||
|
principalType: PrincipalType.PUBLIC,
|
||||||
|
});
|
||||||
|
expect(privatePublicEntry).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip promptGroups that already have ACL entries', async () => {
|
||||||
|
// Create prompt groups
|
||||||
|
const promptGroup1 = await PromptGroup.create({
|
||||||
|
name: 'Group 1',
|
||||||
|
author: testOwner._id,
|
||||||
|
authorName: testOwner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const promptGroup2 = await PromptGroup.create({
|
||||||
|
name: 'Group 2',
|
||||||
|
author: testOwner._id,
|
||||||
|
authorName: testOwner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Grant permission to one promptGroup manually (simulating it already has ACL)
|
||||||
|
await AclEntry.create({
|
||||||
|
principalType: PrincipalType.USER,
|
||||||
|
principalId: testOwner._id,
|
||||||
|
principalModel: PrincipalModel.USER,
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: promptGroup1._id,
|
||||||
|
permBits: ownerRole.permBits,
|
||||||
|
roleId: ownerRole._id,
|
||||||
|
grantedBy: testOwner._id,
|
||||||
|
grantedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await migrateToPromptGroupPermissions({ dryRun: false });
|
||||||
|
|
||||||
|
// Should only migrate promptGroup2, skip promptGroup1
|
||||||
|
expect(result.migrated).toBe(1);
|
||||||
|
expect(result.errors).toBe(0);
|
||||||
|
|
||||||
|
// Verify promptGroup2 now has permissions
|
||||||
|
const group2Entry = await AclEntry.findOne({
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: promptGroup2._id,
|
||||||
|
});
|
||||||
|
expect(group2Entry).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle promptGroups with prompts correctly', async () => {
|
||||||
|
// Create a promptGroup with some prompts
|
||||||
|
const promptGroup = await PromptGroup.create({
|
||||||
|
name: 'Group with Prompts',
|
||||||
|
author: testOwner._id,
|
||||||
|
authorName: testOwner.name,
|
||||||
|
productionId: new ObjectId(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create some prompts in this group
|
||||||
|
await Prompt.create({
|
||||||
|
prompt: 'First prompt',
|
||||||
|
author: testOwner._id,
|
||||||
|
groupId: promptGroup._id,
|
||||||
|
type: 'text',
|
||||||
|
});
|
||||||
|
|
||||||
|
await Prompt.create({
|
||||||
|
prompt: 'Second prompt',
|
||||||
|
author: testOwner._id,
|
||||||
|
groupId: promptGroup._id,
|
||||||
|
type: 'text',
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await migrateToPromptGroupPermissions({ dryRun: false });
|
||||||
|
|
||||||
|
expect(result.migrated).toBe(1);
|
||||||
|
expect(result.errors).toBe(0);
|
||||||
|
|
||||||
|
// Verify the promptGroup has permissions
|
||||||
|
const groupEntry = await AclEntry.findOne({
|
||||||
|
resourceType: ResourceType.PROMPTGROUP,
|
||||||
|
resourceId: promptGroup._id,
|
||||||
|
});
|
||||||
|
expect(groupEntry).toBeTruthy();
|
||||||
|
|
||||||
|
// Verify no prompt-level permissions were created
|
||||||
|
const promptEntries = await AclEntry.find({
|
||||||
|
resourceType: 'prompt',
|
||||||
|
});
|
||||||
|
expect(promptEntries).toHaveLength(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -2,7 +2,6 @@ const {
|
|||||||
CacheKeys,
|
CacheKeys,
|
||||||
SystemRoles,
|
SystemRoles,
|
||||||
roleDefaults,
|
roleDefaults,
|
||||||
PermissionTypes,
|
|
||||||
permissionsSchema,
|
permissionsSchema,
|
||||||
removeNullishValues,
|
removeNullishValues,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
@@ -17,7 +16,7 @@ const { Role } = require('~/db/models');
|
|||||||
*
|
*
|
||||||
* @param {string} roleName - The name of the role to find or create.
|
* @param {string} roleName - The name of the role to find or create.
|
||||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||||
* @returns {Promise<Object>} A plain object representing the role document.
|
* @returns {Promise<IRole>} Role document.
|
||||||
*/
|
*/
|
||||||
const getRoleByName = async function (roleName, fieldsToSelect = null) {
|
const getRoleByName = async function (roleName, fieldsToSelect = null) {
|
||||||
const cache = getLogStores(CacheKeys.ROLES);
|
const cache = getLogStores(CacheKeys.ROLES);
|
||||||
@@ -73,8 +72,9 @@ const updateRoleByName = async function (roleName, updates) {
|
|||||||
* Updates access permissions for a specific role and multiple permission types.
|
* Updates access permissions for a specific role and multiple permission types.
|
||||||
* @param {string} roleName - The role to update.
|
* @param {string} roleName - The role to update.
|
||||||
* @param {Object.<PermissionTypes, Object.<Permissions, boolean>>} permissionsUpdate - Permissions to update and their values.
|
* @param {Object.<PermissionTypes, Object.<Permissions, boolean>>} permissionsUpdate - Permissions to update and their values.
|
||||||
|
* @param {IRole} [roleData] - Optional role data to use instead of fetching from the database.
|
||||||
*/
|
*/
|
||||||
async function updateAccessPermissions(roleName, permissionsUpdate) {
|
async function updateAccessPermissions(roleName, permissionsUpdate, roleData) {
|
||||||
// Filter and clean the permission updates based on our schema definition.
|
// Filter and clean the permission updates based on our schema definition.
|
||||||
const updates = {};
|
const updates = {};
|
||||||
for (const [permissionType, permissions] of Object.entries(permissionsUpdate)) {
|
for (const [permissionType, permissions] of Object.entries(permissionsUpdate)) {
|
||||||
@@ -87,7 +87,7 @@ async function updateAccessPermissions(roleName, permissionsUpdate) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const role = await getRoleByName(roleName);
|
const role = roleData ?? (await getRoleByName(roleName));
|
||||||
if (!role) {
|
if (!role) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -114,7 +114,6 @@ async function updateAccessPermissions(roleName, permissionsUpdate) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process the current updates
|
|
||||||
for (const [permissionType, permissions] of Object.entries(updates)) {
|
for (const [permissionType, permissions] of Object.entries(updates)) {
|
||||||
const currentTypePermissions = currentPermissions[permissionType] || {};
|
const currentTypePermissions = currentPermissions[permissionType] || {};
|
||||||
updatedPermissions[permissionType] = { ...currentTypePermissions };
|
updatedPermissions[permissionType] = { ...currentTypePermissions };
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { getBalanceConfig } = require('~/server/services/Config');
|
|
||||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||||
const { Transaction, Balance } = require('~/db/models');
|
const { Transaction, Balance } = require('~/db/models');
|
||||||
|
|
||||||
@@ -187,20 +186,23 @@ async function createAutoRefillTransaction(txData) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Static method to create a transaction and update the balance
|
* Static method to create a transaction and update the balance
|
||||||
* @param {txData} txData - Transaction data.
|
* @param {txData} _txData - Transaction data.
|
||||||
*/
|
*/
|
||||||
async function createTransaction(txData) {
|
async function createTransaction(_txData) {
|
||||||
|
const { balance, transactions, ...txData } = _txData;
|
||||||
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (transactions?.enabled === false) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const transaction = new Transaction(txData);
|
const transaction = new Transaction(txData);
|
||||||
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
||||||
calculateTokenValue(transaction);
|
calculateTokenValue(transaction);
|
||||||
|
|
||||||
await transaction.save();
|
await transaction.save();
|
||||||
|
|
||||||
const balance = await getBalanceConfig();
|
|
||||||
if (!balance?.enabled) {
|
if (!balance?.enabled) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -221,9 +223,14 @@ async function createTransaction(txData) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Static method to create a structured transaction and update the balance
|
* Static method to create a structured transaction and update the balance
|
||||||
* @param {txData} txData - Transaction data.
|
* @param {txData} _txData - Transaction data.
|
||||||
*/
|
*/
|
||||||
async function createStructuredTransaction(txData) {
|
async function createStructuredTransaction(_txData) {
|
||||||
|
const { balance, transactions, ...txData } = _txData;
|
||||||
|
if (transactions?.enabled === false) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const transaction = new Transaction({
|
const transaction = new Transaction({
|
||||||
...txData,
|
...txData,
|
||||||
endpointTokenConfig: txData.endpointTokenConfig,
|
endpointTokenConfig: txData.endpointTokenConfig,
|
||||||
@@ -233,7 +240,6 @@ async function createStructuredTransaction(txData) {
|
|||||||
|
|
||||||
await transaction.save();
|
await transaction.save();
|
||||||
|
|
||||||
const balance = await getBalanceConfig();
|
|
||||||
if (!balance?.enabled) {
|
if (!balance?.enabled) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +1,9 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||||
const { getBalanceConfig } = require('~/server/services/Config');
|
|
||||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||||
const { createTransaction } = require('./Transaction');
|
const { createTransaction, createStructuredTransaction } = require('./Transaction');
|
||||||
const { Balance } = require('~/db/models');
|
const { Balance, Transaction } = require('~/db/models');
|
||||||
|
|
||||||
// Mock the custom config module so we can control the balance flag.
|
|
||||||
jest.mock('~/server/services/Config');
|
|
||||||
|
|
||||||
let mongoServer;
|
let mongoServer;
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
@@ -23,8 +19,6 @@ afterAll(async () => {
|
|||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
await mongoose.connection.dropDatabase();
|
await mongoose.connection.dropDatabase();
|
||||||
// Default: enable balance updates in tests.
|
|
||||||
getBalanceConfig.mockResolvedValue({ enabled: true });
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Regular Token Spending Tests', () => {
|
describe('Regular Token Spending Tests', () => {
|
||||||
@@ -41,6 +35,7 @@ describe('Regular Token Spending Tests', () => {
|
|||||||
model,
|
model,
|
||||||
context: 'test',
|
context: 'test',
|
||||||
endpointTokenConfig: null,
|
endpointTokenConfig: null,
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
@@ -74,6 +69,7 @@ describe('Regular Token Spending Tests', () => {
|
|||||||
model,
|
model,
|
||||||
context: 'test',
|
context: 'test',
|
||||||
endpointTokenConfig: null,
|
endpointTokenConfig: null,
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
@@ -104,6 +100,7 @@ describe('Regular Token Spending Tests', () => {
|
|||||||
model,
|
model,
|
||||||
context: 'test',
|
context: 'test',
|
||||||
endpointTokenConfig: null,
|
endpointTokenConfig: null,
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage = {};
|
const tokenUsage = {};
|
||||||
@@ -128,6 +125,7 @@ describe('Regular Token Spending Tests', () => {
|
|||||||
model,
|
model,
|
||||||
context: 'test',
|
context: 'test',
|
||||||
endpointTokenConfig: null,
|
endpointTokenConfig: null,
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage = { promptTokens: 100 };
|
const tokenUsage = { promptTokens: 100 };
|
||||||
@@ -143,8 +141,7 @@ describe('Regular Token Spending Tests', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test('spendTokens should not update balance when balance feature is disabled', async () => {
|
test('spendTokens should not update balance when balance feature is disabled', async () => {
|
||||||
// Arrange: Override the config to disable balance updates.
|
// Arrange: Balance config is now passed directly in txData
|
||||||
getBalanceConfig.mockResolvedValue({ balance: { enabled: false } });
|
|
||||||
const userId = new mongoose.Types.ObjectId();
|
const userId = new mongoose.Types.ObjectId();
|
||||||
const initialBalance = 10000000;
|
const initialBalance = 10000000;
|
||||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||||
@@ -156,6 +153,7 @@ describe('Regular Token Spending Tests', () => {
|
|||||||
model,
|
model,
|
||||||
context: 'test',
|
context: 'test',
|
||||||
endpointTokenConfig: null,
|
endpointTokenConfig: null,
|
||||||
|
balance: { enabled: false },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
@@ -186,6 +184,7 @@ describe('Structured Token Spending Tests', () => {
|
|||||||
model,
|
model,
|
||||||
context: 'message',
|
context: 'message',
|
||||||
endpointTokenConfig: null,
|
endpointTokenConfig: null,
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
@@ -239,6 +238,7 @@ describe('Structured Token Spending Tests', () => {
|
|||||||
conversationId: 'test-convo',
|
conversationId: 'test-convo',
|
||||||
model,
|
model,
|
||||||
context: 'message',
|
context: 'message',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
@@ -271,6 +271,7 @@ describe('Structured Token Spending Tests', () => {
|
|||||||
conversationId: 'test-convo',
|
conversationId: 'test-convo',
|
||||||
model,
|
model,
|
||||||
context: 'message',
|
context: 'message',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
@@ -302,6 +303,7 @@ describe('Structured Token Spending Tests', () => {
|
|||||||
conversationId: 'test-convo',
|
conversationId: 'test-convo',
|
||||||
model,
|
model,
|
||||||
context: 'message',
|
context: 'message',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage = {};
|
const tokenUsage = {};
|
||||||
@@ -328,6 +330,7 @@ describe('Structured Token Spending Tests', () => {
|
|||||||
conversationId: 'test-convo',
|
conversationId: 'test-convo',
|
||||||
model,
|
model,
|
||||||
context: 'incomplete',
|
context: 'incomplete',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
@@ -364,6 +367,7 @@ describe('NaN Handling Tests', () => {
|
|||||||
endpointTokenConfig: null,
|
endpointTokenConfig: null,
|
||||||
rawAmount: NaN,
|
rawAmount: NaN,
|
||||||
tokenType: 'prompt',
|
tokenType: 'prompt',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
@@ -375,3 +379,188 @@ describe('NaN Handling Tests', () => {
|
|||||||
expect(balance.tokenCredits).toBe(initialBalance);
|
expect(balance.tokenCredits).toBe(initialBalance);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Transactions Config Tests', () => {
|
||||||
|
test('createTransaction should not save when transactions.enabled is false', async () => {
|
||||||
|
// Arrange
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const initialBalance = 10000000;
|
||||||
|
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||||
|
|
||||||
|
const model = 'gpt-3.5-turbo';
|
||||||
|
const txData = {
|
||||||
|
user: userId,
|
||||||
|
conversationId: 'test-conversation-id',
|
||||||
|
model,
|
||||||
|
context: 'test',
|
||||||
|
endpointTokenConfig: null,
|
||||||
|
rawAmount: -100,
|
||||||
|
tokenType: 'prompt',
|
||||||
|
transactions: { enabled: false },
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await createTransaction(txData);
|
||||||
|
|
||||||
|
// Assert: No transaction should be created
|
||||||
|
expect(result).toBeUndefined();
|
||||||
|
const transactions = await Transaction.find({ user: userId });
|
||||||
|
expect(transactions).toHaveLength(0);
|
||||||
|
const balance = await Balance.findOne({ user: userId });
|
||||||
|
expect(balance.tokenCredits).toBe(initialBalance);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('createTransaction should save when transactions.enabled is true', async () => {
|
||||||
|
// Arrange
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const initialBalance = 10000000;
|
||||||
|
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||||
|
|
||||||
|
const model = 'gpt-3.5-turbo';
|
||||||
|
const txData = {
|
||||||
|
user: userId,
|
||||||
|
conversationId: 'test-conversation-id',
|
||||||
|
model,
|
||||||
|
context: 'test',
|
||||||
|
endpointTokenConfig: null,
|
||||||
|
rawAmount: -100,
|
||||||
|
tokenType: 'prompt',
|
||||||
|
transactions: { enabled: true },
|
||||||
|
balance: { enabled: true },
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await createTransaction(txData);
|
||||||
|
|
||||||
|
// Assert: Transaction should be created
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.balance).toBeLessThan(initialBalance);
|
||||||
|
const transactions = await Transaction.find({ user: userId });
|
||||||
|
expect(transactions).toHaveLength(1);
|
||||||
|
expect(transactions[0].rawAmount).toBe(-100);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('createTransaction should save when balance.enabled is true even if transactions config is missing', async () => {
|
||||||
|
// Arrange
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const initialBalance = 10000000;
|
||||||
|
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||||
|
|
||||||
|
const model = 'gpt-3.5-turbo';
|
||||||
|
const txData = {
|
||||||
|
user: userId,
|
||||||
|
conversationId: 'test-conversation-id',
|
||||||
|
model,
|
||||||
|
context: 'test',
|
||||||
|
endpointTokenConfig: null,
|
||||||
|
rawAmount: -100,
|
||||||
|
tokenType: 'prompt',
|
||||||
|
balance: { enabled: true },
|
||||||
|
// No transactions config provided
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await createTransaction(txData);
|
||||||
|
|
||||||
|
// Assert: Transaction should be created (backward compatibility)
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.balance).toBeLessThan(initialBalance);
|
||||||
|
const transactions = await Transaction.find({ user: userId });
|
||||||
|
expect(transactions).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('createTransaction should save transaction but not update balance when balance is disabled but transactions enabled', async () => {
|
||||||
|
// Arrange
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const initialBalance = 10000000;
|
||||||
|
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||||
|
|
||||||
|
const model = 'gpt-3.5-turbo';
|
||||||
|
const txData = {
|
||||||
|
user: userId,
|
||||||
|
conversationId: 'test-conversation-id',
|
||||||
|
model,
|
||||||
|
context: 'test',
|
||||||
|
endpointTokenConfig: null,
|
||||||
|
rawAmount: -100,
|
||||||
|
tokenType: 'prompt',
|
||||||
|
transactions: { enabled: true },
|
||||||
|
balance: { enabled: false },
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await createTransaction(txData);
|
||||||
|
|
||||||
|
// Assert: Transaction should be created but balance unchanged
|
||||||
|
expect(result).toBeUndefined();
|
||||||
|
const transactions = await Transaction.find({ user: userId });
|
||||||
|
expect(transactions).toHaveLength(1);
|
||||||
|
expect(transactions[0].rawAmount).toBe(-100);
|
||||||
|
const balance = await Balance.findOne({ user: userId });
|
||||||
|
expect(balance.tokenCredits).toBe(initialBalance);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('createStructuredTransaction should not save when transactions.enabled is false', async () => {
|
||||||
|
// Arrange
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const initialBalance = 10000000;
|
||||||
|
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||||
|
|
||||||
|
const model = 'claude-3-5-sonnet';
|
||||||
|
const txData = {
|
||||||
|
user: userId,
|
||||||
|
conversationId: 'test-conversation-id',
|
||||||
|
model,
|
||||||
|
context: 'message',
|
||||||
|
tokenType: 'prompt',
|
||||||
|
inputTokens: -10,
|
||||||
|
writeTokens: -100,
|
||||||
|
readTokens: -5,
|
||||||
|
transactions: { enabled: false },
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await createStructuredTransaction(txData);
|
||||||
|
|
||||||
|
// Assert: No transaction should be created
|
||||||
|
expect(result).toBeUndefined();
|
||||||
|
const transactions = await Transaction.find({ user: userId });
|
||||||
|
expect(transactions).toHaveLength(0);
|
||||||
|
const balance = await Balance.findOne({ user: userId });
|
||||||
|
expect(balance.tokenCredits).toBe(initialBalance);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('createStructuredTransaction should save transaction but not update balance when balance is disabled but transactions enabled', async () => {
|
||||||
|
// Arrange
|
||||||
|
const userId = new mongoose.Types.ObjectId();
|
||||||
|
const initialBalance = 10000000;
|
||||||
|
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||||
|
|
||||||
|
const model = 'claude-3-5-sonnet';
|
||||||
|
const txData = {
|
||||||
|
user: userId,
|
||||||
|
conversationId: 'test-conversation-id',
|
||||||
|
model,
|
||||||
|
context: 'message',
|
||||||
|
tokenType: 'prompt',
|
||||||
|
inputTokens: -10,
|
||||||
|
writeTokens: -100,
|
||||||
|
readTokens: -5,
|
||||||
|
transactions: { enabled: true },
|
||||||
|
balance: { enabled: false },
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await createStructuredTransaction(txData);
|
||||||
|
|
||||||
|
// Assert: Transaction should be created but balance unchanged
|
||||||
|
expect(result).toBeUndefined();
|
||||||
|
const transactions = await Transaction.find({ user: userId });
|
||||||
|
expect(transactions).toHaveLength(1);
|
||||||
|
expect(transactions[0].inputTokens).toBe(-10);
|
||||||
|
expect(transactions[0].writeTokens).toBe(-100);
|
||||||
|
expect(transactions[0].readTokens).toBe(-5);
|
||||||
|
const balance = await Balance.findOne({ user: userId });
|
||||||
|
expect(balance.tokenCredits).toBe(initialBalance);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -118,7 +118,7 @@ const addIntervalToDate = (date, value, unit) => {
|
|||||||
* @async
|
* @async
|
||||||
* @function
|
* @function
|
||||||
* @param {Object} params - The function parameters.
|
* @param {Object} params - The function parameters.
|
||||||
* @param {Express.Request} params.req - The Express request object.
|
* @param {ServerRequest} params.req - The Express request object.
|
||||||
* @param {Express.Response} params.res - The Express response object.
|
* @param {Express.Response} params.res - The Express response object.
|
||||||
* @param {Object} params.txData - The transaction data.
|
* @param {Object} params.txData - The transaction data.
|
||||||
* @param {string} params.txData.user - The user ID or identifier.
|
* @param {string} params.txData.user - The user ID or identifier.
|
||||||
|
|||||||
@@ -1,47 +1,9 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
|
const { buildTree } = require('librechat-data-provider');
|
||||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
const { getMessages, bulkSaveMessages } = require('./Message');
|
const { getMessages, bulkSaveMessages } = require('./Message');
|
||||||
const { Message } = require('~/db/models');
|
const { Message } = require('~/db/models');
|
||||||
|
|
||||||
// Original version of buildTree function
|
|
||||||
function buildTree({ messages, fileMap }) {
|
|
||||||
if (messages === null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const messageMap = {};
|
|
||||||
const rootMessages = [];
|
|
||||||
const childrenCount = {};
|
|
||||||
|
|
||||||
messages.forEach((message) => {
|
|
||||||
const parentId = message.parentMessageId ?? '';
|
|
||||||
childrenCount[parentId] = (childrenCount[parentId] || 0) + 1;
|
|
||||||
|
|
||||||
const extendedMessage = {
|
|
||||||
...message,
|
|
||||||
children: [],
|
|
||||||
depth: 0,
|
|
||||||
siblingIndex: childrenCount[parentId] - 1,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (message.files && fileMap) {
|
|
||||||
extendedMessage.files = message.files.map((file) => fileMap[file.file_id ?? ''] ?? file);
|
|
||||||
}
|
|
||||||
|
|
||||||
messageMap[message.messageId] = extendedMessage;
|
|
||||||
|
|
||||||
const parentMessage = messageMap[parentId];
|
|
||||||
if (parentMessage) {
|
|
||||||
parentMessage.children.push(extendedMessage);
|
|
||||||
extendedMessage.depth = parentMessage.depth + 1;
|
|
||||||
} else {
|
|
||||||
rootMessages.push(extendedMessage);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return rootMessages;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mongod;
|
let mongod;
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
mongod = await MongoMemoryServer.create();
|
mongod = await MongoMemoryServer.create();
|
||||||
|
|||||||
@@ -22,9 +22,17 @@ const {
|
|||||||
} = require('./Message');
|
} = require('./Message');
|
||||||
const { getConvoTitle, getConvo, saveConvo, deleteConvos } = require('./Conversation');
|
const { getConvoTitle, getConvo, saveConvo, deleteConvos } = require('./Conversation');
|
||||||
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
|
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
|
||||||
|
const { File } = require('~/db/models');
|
||||||
|
|
||||||
|
const seedDatabase = async () => {
|
||||||
|
await methods.initializeRoles();
|
||||||
|
await methods.seedDefaultRoles();
|
||||||
|
await methods.ensureDefaultCategories();
|
||||||
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
...methods,
|
...methods,
|
||||||
|
seedDatabase,
|
||||||
comparePassword,
|
comparePassword,
|
||||||
findFileById,
|
findFileById,
|
||||||
createFile,
|
createFile,
|
||||||
@@ -51,4 +59,6 @@ module.exports = {
|
|||||||
getPresets,
|
getPresets,
|
||||||
savePreset,
|
savePreset,
|
||||||
deletePresets,
|
deletePresets,
|
||||||
|
|
||||||
|
Files: File,
|
||||||
};
|
};
|
||||||
|
|||||||
24
api/models/interface.js
Normal file
24
api/models/interface.js
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const { updateInterfacePermissions: updateInterfacePerms } = require('@librechat/api');
|
||||||
|
const { getRoleByName, updateAccessPermissions } = require('./Role');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update interface permissions based on app configuration.
|
||||||
|
* Must be done independently from loading the app config.
|
||||||
|
* @param {AppConfig} appConfig
|
||||||
|
*/
|
||||||
|
async function updateInterfacePermissions(appConfig) {
|
||||||
|
try {
|
||||||
|
await updateInterfacePerms({
|
||||||
|
appConfig,
|
||||||
|
getRoleByName,
|
||||||
|
updateAccessPermissions,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error updating interface permissions:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
updateInterfacePermissions,
|
||||||
|
};
|
||||||
@@ -1,17 +1,11 @@
|
|||||||
const { logger } = require('~/config');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { createTransaction, createStructuredTransaction } = require('./Transaction');
|
const { createTransaction, createStructuredTransaction } = require('./Transaction');
|
||||||
/**
|
/**
|
||||||
* Creates up to two transactions to record the spending of tokens.
|
* Creates up to two transactions to record the spending of tokens.
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
* @async
|
* @async
|
||||||
* @param {Object} txData - Transaction data.
|
* @param {txData} txData - Transaction data.
|
||||||
* @param {mongoose.Schema.Types.ObjectId} txData.user - The user ID.
|
|
||||||
* @param {String} txData.conversationId - The ID of the conversation.
|
|
||||||
* @param {String} txData.model - The model name.
|
|
||||||
* @param {String} txData.context - The context in which the transaction is made.
|
|
||||||
* @param {EndpointTokenConfig} [txData.endpointTokenConfig] - The current endpoint token config.
|
|
||||||
* @param {String} [txData.valueKey] - The value key (optional).
|
|
||||||
* @param {Object} tokenUsage - The number of tokens used.
|
* @param {Object} tokenUsage - The number of tokens used.
|
||||||
* @param {Number} tokenUsage.promptTokens - The number of prompt tokens used.
|
* @param {Number} tokenUsage.promptTokens - The number of prompt tokens used.
|
||||||
* @param {Number} tokenUsage.completionTokens - The number of completion tokens used.
|
* @param {Number} tokenUsage.completionTokens - The number of completion tokens used.
|
||||||
@@ -69,13 +63,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
|||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
* @async
|
* @async
|
||||||
* @param {Object} txData - Transaction data.
|
* @param {txData} txData - Transaction data.
|
||||||
* @param {mongoose.Schema.Types.ObjectId} txData.user - The user ID.
|
|
||||||
* @param {String} txData.conversationId - The ID of the conversation.
|
|
||||||
* @param {String} txData.model - The model name.
|
|
||||||
* @param {String} txData.context - The context in which the transaction is made.
|
|
||||||
* @param {EndpointTokenConfig} [txData.endpointTokenConfig] - The current endpoint token config.
|
|
||||||
* @param {String} [txData.valueKey] - The value key (optional).
|
|
||||||
* @param {Object} tokenUsage - The number of tokens used.
|
* @param {Object} tokenUsage - The number of tokens used.
|
||||||
* @param {Object} tokenUsage.promptTokens - The number of prompt tokens used.
|
* @param {Object} tokenUsage.promptTokens - The number of prompt tokens used.
|
||||||
* @param {Number} tokenUsage.promptTokens.input - The number of input tokens.
|
* @param {Number} tokenUsage.promptTokens.input - The number of input tokens.
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ const { createTransaction, createAutoRefillTransaction } = require('./Transactio
|
|||||||
|
|
||||||
require('~/db/models');
|
require('~/db/models');
|
||||||
|
|
||||||
// Mock the logger to prevent console output during tests
|
|
||||||
jest.mock('~/config', () => ({
|
jest.mock('~/config', () => ({
|
||||||
logger: {
|
logger: {
|
||||||
debug: jest.fn(),
|
debug: jest.fn(),
|
||||||
@@ -13,10 +12,6 @@ jest.mock('~/config', () => ({
|
|||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the Config service
|
|
||||||
const { getBalanceConfig } = require('~/server/services/Config');
|
|
||||||
jest.mock('~/server/services/Config');
|
|
||||||
|
|
||||||
describe('spendTokens', () => {
|
describe('spendTokens', () => {
|
||||||
let mongoServer;
|
let mongoServer;
|
||||||
let userId;
|
let userId;
|
||||||
@@ -44,8 +39,7 @@ describe('spendTokens', () => {
|
|||||||
// Create a new user ID for each test
|
// Create a new user ID for each test
|
||||||
userId = new mongoose.Types.ObjectId();
|
userId = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
// Mock the balance config to be enabled by default
|
// Balance config is now passed directly in txData
|
||||||
getBalanceConfig.mockResolvedValue({ enabled: true });
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should create transactions for both prompt and completion tokens', async () => {
|
it('should create transactions for both prompt and completion tokens', async () => {
|
||||||
@@ -60,6 +54,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: 'test-convo',
|
conversationId: 'test-convo',
|
||||||
model: 'gpt-3.5-turbo',
|
model: 'gpt-3.5-turbo',
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
promptTokens: 100,
|
promptTokens: 100,
|
||||||
@@ -98,6 +93,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: 'test-convo',
|
conversationId: 'test-convo',
|
||||||
model: 'gpt-3.5-turbo',
|
model: 'gpt-3.5-turbo',
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
promptTokens: 100,
|
promptTokens: 100,
|
||||||
@@ -127,6 +123,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: 'test-convo',
|
conversationId: 'test-convo',
|
||||||
model: 'gpt-3.5-turbo',
|
model: 'gpt-3.5-turbo',
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
const tokenUsage = {};
|
const tokenUsage = {};
|
||||||
|
|
||||||
@@ -138,8 +135,7 @@ describe('spendTokens', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should not update balance when the balance feature is disabled', async () => {
|
it('should not update balance when the balance feature is disabled', async () => {
|
||||||
// Override configuration: disable balance updates
|
// Balance is now passed directly in txData
|
||||||
getBalanceConfig.mockResolvedValue({ enabled: false });
|
|
||||||
// Create a balance for the user
|
// Create a balance for the user
|
||||||
await Balance.create({
|
await Balance.create({
|
||||||
user: userId,
|
user: userId,
|
||||||
@@ -151,6 +147,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: 'test-convo',
|
conversationId: 'test-convo',
|
||||||
model: 'gpt-3.5-turbo',
|
model: 'gpt-3.5-turbo',
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: false },
|
||||||
};
|
};
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
promptTokens: 100,
|
promptTokens: 100,
|
||||||
@@ -180,6 +177,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: 'test-convo',
|
conversationId: 'test-convo',
|
||||||
model: 'gpt-4', // Using a more expensive model
|
model: 'gpt-4', // Using a more expensive model
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
// Spending more tokens than the user has balance for
|
// Spending more tokens than the user has balance for
|
||||||
@@ -233,6 +231,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: 'test-convo-1',
|
conversationId: 'test-convo-1',
|
||||||
model: 'gpt-4',
|
model: 'gpt-4',
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage1 = {
|
const tokenUsage1 = {
|
||||||
@@ -252,6 +251,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: 'test-convo-2',
|
conversationId: 'test-convo-2',
|
||||||
model: 'gpt-4',
|
model: 'gpt-4',
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage2 = {
|
const tokenUsage2 = {
|
||||||
@@ -292,6 +292,7 @@ describe('spendTokens', () => {
|
|||||||
tokenType: 'completion',
|
tokenType: 'completion',
|
||||||
rawAmount: -100,
|
rawAmount: -100,
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('Direct Transaction.create result:', directResult);
|
console.log('Direct Transaction.create result:', directResult);
|
||||||
@@ -316,6 +317,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: `test-convo-${model}`,
|
conversationId: `test-convo-${model}`,
|
||||||
model,
|
model,
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
@@ -352,6 +354,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: 'test-convo-1',
|
conversationId: 'test-convo-1',
|
||||||
model: 'claude-3-5-sonnet',
|
model: 'claude-3-5-sonnet',
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage1 = {
|
const tokenUsage1 = {
|
||||||
@@ -375,6 +378,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: 'test-convo-2',
|
conversationId: 'test-convo-2',
|
||||||
model: 'claude-3-5-sonnet',
|
model: 'claude-3-5-sonnet',
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenUsage2 = {
|
const tokenUsage2 = {
|
||||||
@@ -426,6 +430,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: 'test-convo',
|
conversationId: 'test-convo',
|
||||||
model: 'claude-3-5-sonnet', // Using a model that supports structured tokens
|
model: 'claude-3-5-sonnet', // Using a model that supports structured tokens
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
// Spending more tokens than the user has balance for
|
// Spending more tokens than the user has balance for
|
||||||
@@ -505,6 +510,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId,
|
conversationId,
|
||||||
user: userId,
|
user: userId,
|
||||||
model: usage.model,
|
model: usage.model,
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
// Calculate expected spend for this transaction
|
// Calculate expected spend for this transaction
|
||||||
@@ -617,6 +623,7 @@ describe('spendTokens', () => {
|
|||||||
tokenType: 'credits',
|
tokenType: 'credits',
|
||||||
context: 'concurrent-refill-test',
|
context: 'concurrent-refill-test',
|
||||||
rawAmount: refillAmount,
|
rawAmount: refillAmount,
|
||||||
|
balance: { enabled: true },
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -683,6 +690,7 @@ describe('spendTokens', () => {
|
|||||||
conversationId: 'test-convo',
|
conversationId: 'test-convo',
|
||||||
model: 'claude-3-5-sonnet',
|
model: 'claude-3-5-sonnet',
|
||||||
context: 'test',
|
context: 'test',
|
||||||
|
balance: { enabled: true },
|
||||||
};
|
};
|
||||||
const tokenUsage = {
|
const tokenUsage = {
|
||||||
promptTokens: {
|
promptTokens: {
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
const { matchModelName } = require('../utils/tokens');
|
const { matchModelName } = require('@librechat/api');
|
||||||
const defaultRate = 6;
|
const defaultRate = 6;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -111,8 +111,8 @@ const tokenValues = Object.assign(
|
|||||||
'claude-': { prompt: 0.8, completion: 2.4 },
|
'claude-': { prompt: 0.8, completion: 2.4 },
|
||||||
'command-r-plus': { prompt: 3, completion: 15 },
|
'command-r-plus': { prompt: 3, completion: 15 },
|
||||||
'command-r': { prompt: 0.5, completion: 1.5 },
|
'command-r': { prompt: 0.5, completion: 1.5 },
|
||||||
'deepseek-reasoner': { prompt: 0.55, completion: 2.19 },
|
'deepseek-reasoner': { prompt: 0.28, completion: 0.42 },
|
||||||
deepseek: { prompt: 0.14, completion: 0.28 },
|
deepseek: { prompt: 0.28, completion: 0.42 },
|
||||||
/* cohere doesn't have rates for the older command models,
|
/* cohere doesn't have rates for the older command models,
|
||||||
so this was from https://artificialanalysis.ai/models/command-light/providers */
|
so this was from https://artificialanalysis.ai/models/command-light/providers */
|
||||||
command: { prompt: 0.38, completion: 0.38 },
|
command: { prompt: 0.38, completion: 0.38 },
|
||||||
@@ -124,7 +124,8 @@ const tokenValues = Object.assign(
|
|||||||
'gemini-2.0-flash': { prompt: 0.1, completion: 0.4 },
|
'gemini-2.0-flash': { prompt: 0.1, completion: 0.4 },
|
||||||
'gemini-2.0': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
'gemini-2.0': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||||
'gemini-2.5-pro': { prompt: 1.25, completion: 10 },
|
'gemini-2.5-pro': { prompt: 1.25, completion: 10 },
|
||||||
'gemini-2.5-flash': { prompt: 0.15, completion: 3.5 },
|
'gemini-2.5-flash': { prompt: 0.3, completion: 2.5 },
|
||||||
|
'gemini-2.5-flash-lite': { prompt: 0.1, completion: 0.4 },
|
||||||
'gemini-2.5': { prompt: 0, completion: 0 }, // Free for a period of time
|
'gemini-2.5': { prompt: 0, completion: 0 }, // Free for a period of time
|
||||||
'gemini-1.5-flash-8b': { prompt: 0.075, completion: 0.3 },
|
'gemini-1.5-flash-8b': { prompt: 0.075, completion: 0.3 },
|
||||||
'gemini-1.5-flash': { prompt: 0.15, completion: 0.6 },
|
'gemini-1.5-flash': { prompt: 0.15, completion: 0.6 },
|
||||||
@@ -151,8 +152,19 @@ const tokenValues = Object.assign(
|
|||||||
'ministral-8b': { prompt: 0.1, completion: 0.1 },
|
'ministral-8b': { prompt: 0.1, completion: 0.1 },
|
||||||
'ministral-3b': { prompt: 0.04, completion: 0.04 },
|
'ministral-3b': { prompt: 0.04, completion: 0.04 },
|
||||||
// GPT-OSS models
|
// GPT-OSS models
|
||||||
|
'gpt-oss': { prompt: 0.05, completion: 0.2 },
|
||||||
|
'gpt-oss:20b': { prompt: 0.05, completion: 0.2 },
|
||||||
'gpt-oss-20b': { prompt: 0.05, completion: 0.2 },
|
'gpt-oss-20b': { prompt: 0.05, completion: 0.2 },
|
||||||
|
'gpt-oss:120b': { prompt: 0.15, completion: 0.6 },
|
||||||
'gpt-oss-120b': { prompt: 0.15, completion: 0.6 },
|
'gpt-oss-120b': { prompt: 0.15, completion: 0.6 },
|
||||||
|
// GLM models (Zhipu AI)
|
||||||
|
glm4: { prompt: 0.1, completion: 0.1 },
|
||||||
|
'glm-4': { prompt: 0.1, completion: 0.1 },
|
||||||
|
'glm-4-32b': { prompt: 0.1, completion: 0.1 },
|
||||||
|
'glm-4.5': { prompt: 0.35, completion: 1.55 },
|
||||||
|
'glm-4.5v': { prompt: 0.6, completion: 1.8 },
|
||||||
|
'glm-4.5-air': { prompt: 0.14, completion: 0.86 },
|
||||||
|
'glm-4.6': { prompt: 0.5, completion: 1.75 },
|
||||||
},
|
},
|
||||||
bedrockValues,
|
bedrockValues,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -184,6 +184,16 @@ describe('getValueKey', () => {
|
|||||||
expect(getValueKey('claude-3.5-haiku-turbo')).toBe('claude-3.5-haiku');
|
expect(getValueKey('claude-3.5-haiku-turbo')).toBe('claude-3.5-haiku');
|
||||||
expect(getValueKey('claude-3.5-haiku-0125')).toBe('claude-3.5-haiku');
|
expect(getValueKey('claude-3.5-haiku-0125')).toBe('claude-3.5-haiku');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should return expected value keys for "gpt-oss" models', () => {
|
||||||
|
expect(getValueKey('openai/gpt-oss-120b')).toBe('gpt-oss-120b');
|
||||||
|
expect(getValueKey('openai/gpt-oss:120b')).toBe('gpt-oss:120b');
|
||||||
|
expect(getValueKey('openai/gpt-oss-570b')).toBe('gpt-oss');
|
||||||
|
expect(getValueKey('gpt-oss-570b')).toBe('gpt-oss');
|
||||||
|
expect(getValueKey('groq/gpt-oss-1080b')).toBe('gpt-oss');
|
||||||
|
expect(getValueKey('gpt-oss-20b')).toBe('gpt-oss-20b');
|
||||||
|
expect(getValueKey('oai/gpt-oss:20b')).toBe('gpt-oss:20b');
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getMultiplier', () => {
|
describe('getMultiplier', () => {
|
||||||
@@ -394,6 +404,18 @@ describe('getMultiplier', () => {
|
|||||||
expect(getMultiplier({ model: key, tokenType: 'completion' })).toBe(expectedCompletion);
|
expect(getMultiplier({ model: key, tokenType: 'completion' })).toBe(expectedCompletion);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should return correct multipliers for GLM models', () => {
|
||||||
|
const models = ['glm-4.6', 'glm-4.5v', 'glm-4.5-air', 'glm-4.5', 'glm-4-32b', 'glm-4', 'glm4'];
|
||||||
|
models.forEach((key) => {
|
||||||
|
const expectedPrompt = tokenValues[key].prompt;
|
||||||
|
const expectedCompletion = tokenValues[key].completion;
|
||||||
|
expect(getMultiplier({ valueKey: key, tokenType: 'prompt' })).toBe(expectedPrompt);
|
||||||
|
expect(getMultiplier({ valueKey: key, tokenType: 'completion' })).toBe(expectedCompletion);
|
||||||
|
expect(getMultiplier({ model: key, tokenType: 'prompt' })).toBe(expectedPrompt);
|
||||||
|
expect(getMultiplier({ model: key, tokenType: 'completion' })).toBe(expectedCompletion);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('AWS Bedrock Model Tests', () => {
|
describe('AWS Bedrock Model Tests', () => {
|
||||||
@@ -571,6 +593,9 @@ describe('getCacheMultiplier', () => {
|
|||||||
|
|
||||||
describe('Google Model Tests', () => {
|
describe('Google Model Tests', () => {
|
||||||
const googleModels = [
|
const googleModels = [
|
||||||
|
'gemini-2.5-pro',
|
||||||
|
'gemini-2.5-flash',
|
||||||
|
'gemini-2.5-flash-lite',
|
||||||
'gemini-2.5-pro-preview-05-06',
|
'gemini-2.5-pro-preview-05-06',
|
||||||
'gemini-2.5-flash-preview-04-17',
|
'gemini-2.5-flash-preview-04-17',
|
||||||
'gemini-2.5-exp',
|
'gemini-2.5-exp',
|
||||||
@@ -611,6 +636,9 @@ describe('Google Model Tests', () => {
|
|||||||
|
|
||||||
it('should map to the correct model keys', () => {
|
it('should map to the correct model keys', () => {
|
||||||
const expected = {
|
const expected = {
|
||||||
|
'gemini-2.5-pro': 'gemini-2.5-pro',
|
||||||
|
'gemini-2.5-flash': 'gemini-2.5-flash',
|
||||||
|
'gemini-2.5-flash-lite': 'gemini-2.5-flash-lite',
|
||||||
'gemini-2.5-pro-preview-05-06': 'gemini-2.5-pro',
|
'gemini-2.5-pro-preview-05-06': 'gemini-2.5-pro',
|
||||||
'gemini-2.5-flash-preview-04-17': 'gemini-2.5-flash',
|
'gemini-2.5-flash-preview-04-17': 'gemini-2.5-flash',
|
||||||
'gemini-2.5-exp': 'gemini-2.5',
|
'gemini-2.5-exp': 'gemini-2.5',
|
||||||
@@ -766,6 +794,110 @@ describe('Grok Model Tests - Pricing', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('GLM Model Tests', () => {
|
||||||
|
it('should return expected value keys for GLM models', () => {
|
||||||
|
expect(getValueKey('glm-4.6')).toBe('glm-4.6');
|
||||||
|
expect(getValueKey('glm-4.5')).toBe('glm-4.5');
|
||||||
|
expect(getValueKey('glm-4.5v')).toBe('glm-4.5v');
|
||||||
|
expect(getValueKey('glm-4.5-air')).toBe('glm-4.5-air');
|
||||||
|
expect(getValueKey('glm-4-32b')).toBe('glm-4-32b');
|
||||||
|
expect(getValueKey('glm-4')).toBe('glm-4');
|
||||||
|
expect(getValueKey('glm4')).toBe('glm4');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match GLM model variations with provider prefixes', () => {
|
||||||
|
expect(getValueKey('z-ai/glm-4.6')).toBe('glm-4.6');
|
||||||
|
expect(getValueKey('z-ai/glm-4.5')).toBe('glm-4.5');
|
||||||
|
expect(getValueKey('z-ai/glm-4.5-air')).toBe('glm-4.5-air');
|
||||||
|
expect(getValueKey('z-ai/glm-4.5v')).toBe('glm-4.5v');
|
||||||
|
expect(getValueKey('z-ai/glm-4-32b')).toBe('glm-4-32b');
|
||||||
|
|
||||||
|
expect(getValueKey('zai/glm-4.6')).toBe('glm-4.6');
|
||||||
|
expect(getValueKey('zai/glm-4.5')).toBe('glm-4.5');
|
||||||
|
expect(getValueKey('zai/glm-4.5-air')).toBe('glm-4.5-air');
|
||||||
|
expect(getValueKey('zai/glm-4.5v')).toBe('glm-4.5v');
|
||||||
|
|
||||||
|
expect(getValueKey('zai-org/GLM-4.6')).toBe('glm-4.6');
|
||||||
|
expect(getValueKey('zai-org/GLM-4.5')).toBe('glm-4.5');
|
||||||
|
expect(getValueKey('zai-org/GLM-4.5-Air')).toBe('glm-4.5-air');
|
||||||
|
expect(getValueKey('zai-org/GLM-4.5V')).toBe('glm-4.5v');
|
||||||
|
expect(getValueKey('zai-org/GLM-4-32B-0414')).toBe('glm-4-32b');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match GLM model variations with suffixes', () => {
|
||||||
|
expect(getValueKey('glm-4.6-fp8')).toBe('glm-4.6');
|
||||||
|
expect(getValueKey('zai-org/GLM-4.6-FP8')).toBe('glm-4.6');
|
||||||
|
expect(getValueKey('zai-org/GLM-4.5-Air-FP8')).toBe('glm-4.5-air');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should prioritize more specific GLM model patterns', () => {
|
||||||
|
expect(getValueKey('glm-4.5-air-something')).toBe('glm-4.5-air');
|
||||||
|
expect(getValueKey('glm-4.5-something')).toBe('glm-4.5');
|
||||||
|
expect(getValueKey('glm-4.5v-something')).toBe('glm-4.5v');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return correct multipliers for all GLM models', () => {
|
||||||
|
expect(getMultiplier({ model: 'glm-4.6', tokenType: 'prompt' })).toBe(
|
||||||
|
tokenValues['glm-4.6'].prompt,
|
||||||
|
);
|
||||||
|
expect(getMultiplier({ model: 'glm-4.6', tokenType: 'completion' })).toBe(
|
||||||
|
tokenValues['glm-4.6'].completion,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(getMultiplier({ model: 'glm-4.5v', tokenType: 'prompt' })).toBe(
|
||||||
|
tokenValues['glm-4.5v'].prompt,
|
||||||
|
);
|
||||||
|
expect(getMultiplier({ model: 'glm-4.5v', tokenType: 'completion' })).toBe(
|
||||||
|
tokenValues['glm-4.5v'].completion,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(getMultiplier({ model: 'glm-4.5-air', tokenType: 'prompt' })).toBe(
|
||||||
|
tokenValues['glm-4.5-air'].prompt,
|
||||||
|
);
|
||||||
|
expect(getMultiplier({ model: 'glm-4.5-air', tokenType: 'completion' })).toBe(
|
||||||
|
tokenValues['glm-4.5-air'].completion,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(getMultiplier({ model: 'glm-4.5', tokenType: 'prompt' })).toBe(
|
||||||
|
tokenValues['glm-4.5'].prompt,
|
||||||
|
);
|
||||||
|
expect(getMultiplier({ model: 'glm-4.5', tokenType: 'completion' })).toBe(
|
||||||
|
tokenValues['glm-4.5'].completion,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(getMultiplier({ model: 'glm-4-32b', tokenType: 'prompt' })).toBe(
|
||||||
|
tokenValues['glm-4-32b'].prompt,
|
||||||
|
);
|
||||||
|
expect(getMultiplier({ model: 'glm-4-32b', tokenType: 'completion' })).toBe(
|
||||||
|
tokenValues['glm-4-32b'].completion,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(getMultiplier({ model: 'glm-4', tokenType: 'prompt' })).toBe(
|
||||||
|
tokenValues['glm-4'].prompt,
|
||||||
|
);
|
||||||
|
expect(getMultiplier({ model: 'glm-4', tokenType: 'completion' })).toBe(
|
||||||
|
tokenValues['glm-4'].completion,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(getMultiplier({ model: 'glm4', tokenType: 'prompt' })).toBe(tokenValues['glm4'].prompt);
|
||||||
|
expect(getMultiplier({ model: 'glm4', tokenType: 'completion' })).toBe(
|
||||||
|
tokenValues['glm4'].completion,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return correct multipliers for GLM models with provider prefixes', () => {
|
||||||
|
expect(getMultiplier({ model: 'z-ai/glm-4.6', tokenType: 'prompt' })).toBe(
|
||||||
|
tokenValues['glm-4.6'].prompt,
|
||||||
|
);
|
||||||
|
expect(getMultiplier({ model: 'zai/glm-4.5-air', tokenType: 'completion' })).toBe(
|
||||||
|
tokenValues['glm-4.5-air'].completion,
|
||||||
|
);
|
||||||
|
expect(getMultiplier({ model: 'zai-org/GLM-4.5V', tokenType: 'prompt' })).toBe(
|
||||||
|
tokenValues['glm-4.5v'].prompt,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('Claude Model Tests', () => {
|
describe('Claude Model Tests', () => {
|
||||||
it('should return correct prompt and completion rates for Claude 4 models', () => {
|
it('should return correct prompt and completion rates for Claude 4 models', () => {
|
||||||
expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'prompt' })).toBe(
|
expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'prompt' })).toBe(
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ const bcrypt = require('bcryptjs');
|
|||||||
/**
|
/**
|
||||||
* Compares the provided password with the user's password.
|
* Compares the provided password with the user's password.
|
||||||
*
|
*
|
||||||
* @param {MongoUser} user - The user to compare the password for.
|
* @param {IUser} user - The user to compare the password for.
|
||||||
* @param {string} candidatePassword - The password to test against the user's password.
|
* @param {string} candidatePassword - The password to test against the user's password.
|
||||||
* @returns {Promise<boolean>} A promise that resolves to a boolean indicating if the password matches.
|
* @returns {Promise<boolean>} A promise that resolves to a boolean indicating if the password matches.
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@librechat/backend",
|
"name": "@librechat/backend",
|
||||||
"version": "v0.8.0-rc2",
|
"version": "v0.8.0",
|
||||||
"description": "",
|
"description": "",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "echo 'please run this from the root directory'",
|
"start": "echo 'please run this from the root directory'",
|
||||||
@@ -47,15 +47,15 @@
|
|||||||
"@langchain/core": "^0.3.62",
|
"@langchain/core": "^0.3.62",
|
||||||
"@langchain/google-genai": "^0.2.13",
|
"@langchain/google-genai": "^0.2.13",
|
||||||
"@langchain/google-vertexai": "^0.2.13",
|
"@langchain/google-vertexai": "^0.2.13",
|
||||||
"@langchain/openai": "^0.5.18",
|
|
||||||
"@langchain/textsplitters": "^0.1.0",
|
"@langchain/textsplitters": "^0.1.0",
|
||||||
"@librechat/agents": "^2.4.75",
|
"@librechat/agents": "^2.4.85",
|
||||||
"@librechat/api": "*",
|
"@librechat/api": "*",
|
||||||
"@librechat/data-schemas": "*",
|
"@librechat/data-schemas": "*",
|
||||||
|
"@microsoft/microsoft-graph-client": "^3.0.7",
|
||||||
"@modelcontextprotocol/sdk": "^1.17.1",
|
"@modelcontextprotocol/sdk": "^1.17.1",
|
||||||
"@node-saml/passport-saml": "^5.1.0",
|
"@node-saml/passport-saml": "^5.1.0",
|
||||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||||
"axios": "^1.8.2",
|
"axios": "^1.12.1",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
"compression": "^1.8.1",
|
"compression": "^1.8.1",
|
||||||
"connect-redis": "^8.1.0",
|
"connect-redis": "^8.1.0",
|
||||||
@@ -93,10 +93,9 @@
|
|||||||
"multer": "^2.0.2",
|
"multer": "^2.0.2",
|
||||||
"nanoid": "^3.3.7",
|
"nanoid": "^3.3.7",
|
||||||
"node-fetch": "^2.7.0",
|
"node-fetch": "^2.7.0",
|
||||||
"nodemailer": "^6.9.15",
|
"nodemailer": "^7.0.9",
|
||||||
"ollama": "^0.5.0",
|
"ollama": "^0.5.0",
|
||||||
"openai": "^5.10.1",
|
"openai": "^5.10.1",
|
||||||
"openai-chat-tokens": "^0.2.8",
|
|
||||||
"openid-client": "^6.5.0",
|
"openid-client": "^6.5.0",
|
||||||
"passport": "^0.6.0",
|
"passport": "^0.6.0",
|
||||||
"passport-apple": "^2.0.2",
|
"passport-apple": "^2.0.2",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
const { logger } = require('~/config');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
|
||||||
// WeakMap to hold temporary data associated with requests
|
/** WeakMap to hold temporary data associated with requests */
|
||||||
const requestDataMap = new WeakMap();
|
const requestDataMap = new WeakMap();
|
||||||
|
|
||||||
const FinalizationRegistry = global.FinalizationRegistry || null;
|
const FinalizationRegistry = global.FinalizationRegistry || null;
|
||||||
@@ -23,7 +23,7 @@ const clientRegistry = FinalizationRegistry
|
|||||||
} else {
|
} else {
|
||||||
logger.debug('[FinalizationRegistry] Cleaning up client');
|
logger.debug('[FinalizationRegistry] Cleaning up client');
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch {
|
||||||
// Ignore errors
|
// Ignore errors
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -55,6 +55,9 @@ function disposeClient(client) {
|
|||||||
if (client.responseMessageId) {
|
if (client.responseMessageId) {
|
||||||
client.responseMessageId = null;
|
client.responseMessageId = null;
|
||||||
}
|
}
|
||||||
|
if (client.parentMessageId) {
|
||||||
|
client.parentMessageId = null;
|
||||||
|
}
|
||||||
if (client.message_file_map) {
|
if (client.message_file_map) {
|
||||||
client.message_file_map = null;
|
client.message_file_map = null;
|
||||||
}
|
}
|
||||||
@@ -334,7 +337,7 @@ function disposeClient(client) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
client.options = null;
|
client.options = null;
|
||||||
} catch (e) {
|
} catch {
|
||||||
// Ignore errors during disposal
|
// Ignore errors during disposal
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
const cookies = require('cookie');
|
const cookies = require('cookie');
|
||||||
const jwt = require('jsonwebtoken');
|
const jwt = require('jsonwebtoken');
|
||||||
const openIdClient = require('openid-client');
|
const openIdClient = require('openid-client');
|
||||||
const { isEnabled } = require('@librechat/api');
|
|
||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const { isEnabled, findOpenIDUser } = require('@librechat/api');
|
||||||
const {
|
const {
|
||||||
requestPasswordReset,
|
requestPasswordReset,
|
||||||
setOpenIDAuthTokens,
|
setOpenIDAuthTokens,
|
||||||
@@ -11,6 +11,8 @@ const {
|
|||||||
registerUser,
|
registerUser,
|
||||||
} = require('~/server/services/AuthService');
|
} = require('~/server/services/AuthService');
|
||||||
const { findUser, getUserById, deleteAllUserSessions, findSession } = require('~/models');
|
const { findUser, getUserById, deleteAllUserSessions, findSession } = require('~/models');
|
||||||
|
const { getGraphApiToken } = require('~/server/services/GraphTokenService');
|
||||||
|
const { getOAuthReconnectionManager } = require('~/config');
|
||||||
const { getOpenIdConfig } = require('~/strategies');
|
const { getOpenIdConfig } = require('~/strategies');
|
||||||
|
|
||||||
const registrationController = async (req, res) => {
|
const registrationController = async (req, res) => {
|
||||||
@@ -70,11 +72,17 @@ const refreshController = async (req, res) => {
|
|||||||
const openIdConfig = getOpenIdConfig();
|
const openIdConfig = getOpenIdConfig();
|
||||||
const tokenset = await openIdClient.refreshTokenGrant(openIdConfig, refreshToken);
|
const tokenset = await openIdClient.refreshTokenGrant(openIdConfig, refreshToken);
|
||||||
const claims = tokenset.claims();
|
const claims = tokenset.claims();
|
||||||
const user = await findUser({ email: claims.email });
|
const { user, error } = await findOpenIDUser({
|
||||||
if (!user) {
|
findUser,
|
||||||
|
email: claims.email,
|
||||||
|
openidId: claims.sub,
|
||||||
|
idOnTheSource: claims.oid,
|
||||||
|
strategyName: 'refreshController',
|
||||||
|
});
|
||||||
|
if (error || !user) {
|
||||||
return res.status(401).redirect('/login');
|
return res.status(401).redirect('/login');
|
||||||
}
|
}
|
||||||
const token = setOpenIDAuthTokens(tokenset, res);
|
const token = setOpenIDAuthTokens(tokenset, res, user._id.toString());
|
||||||
return res.status(200).send({ token, user });
|
return res.status(200).send({ token, user });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[refreshController] OpenID token refresh error', error);
|
logger.error('[refreshController] OpenID token refresh error', error);
|
||||||
@@ -83,7 +91,7 @@ const refreshController = async (req, res) => {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||||
const user = await getUserById(payload.id, '-password -__v -totpSecret');
|
const user = await getUserById(payload.id, '-password -__v -totpSecret -backupCodes');
|
||||||
if (!user) {
|
if (!user) {
|
||||||
return res.status(401).redirect('/login');
|
return res.status(401).redirect('/login');
|
||||||
}
|
}
|
||||||
@@ -95,14 +103,25 @@ const refreshController = async (req, res) => {
|
|||||||
return res.status(200).send({ token, user });
|
return res.status(200).send({ token, user });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find the session with the hashed refresh token
|
/** Session with the hashed refresh token */
|
||||||
const session = await findSession({
|
const session = await findSession(
|
||||||
userId: userId,
|
{
|
||||||
refreshToken: refreshToken,
|
userId: userId,
|
||||||
});
|
refreshToken: refreshToken,
|
||||||
|
},
|
||||||
|
{ lean: false },
|
||||||
|
);
|
||||||
|
|
||||||
if (session && session.expiration > new Date()) {
|
if (session && session.expiration > new Date()) {
|
||||||
const token = await setAuthTokens(userId, res, session._id);
|
const token = await setAuthTokens(userId, res, session);
|
||||||
|
|
||||||
|
// trigger OAuth MCP server reconnection asynchronously (best effort)
|
||||||
|
void getOAuthReconnectionManager()
|
||||||
|
.reconnectServers(userId)
|
||||||
|
.catch((err) => {
|
||||||
|
logger.error('Error reconnecting OAuth MCP servers:', err);
|
||||||
|
});
|
||||||
|
|
||||||
res.status(200).send({ token, user });
|
res.status(200).send({ token, user });
|
||||||
} else if (req?.query?.retry) {
|
} else if (req?.query?.retry) {
|
||||||
// Retrying from a refresh token request that failed (401)
|
// Retrying from a refresh token request that failed (401)
|
||||||
@@ -113,14 +132,59 @@ const refreshController = async (req, res) => {
|
|||||||
res.status(401).send('Refresh token expired or not found for this user');
|
res.status(401).send('Refresh token expired or not found for this user');
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error(`[refreshController] Refresh token: ${refreshToken}`, err);
|
logger.error(`[refreshController] Invalid refresh token:`, err);
|
||||||
res.status(403).send('Invalid refresh token');
|
res.status(403).send('Invalid refresh token');
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const graphTokenController = async (req, res) => {
|
||||||
|
try {
|
||||||
|
// Validate user is authenticated via Entra ID
|
||||||
|
if (!req.user.openidId || req.user.provider !== 'openid') {
|
||||||
|
return res.status(403).json({
|
||||||
|
message: 'Microsoft Graph access requires Entra ID authentication',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if OpenID token reuse is active (required for on-behalf-of flow)
|
||||||
|
if (!isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||||
|
return res.status(403).json({
|
||||||
|
message: 'SharePoint integration requires OpenID token reuse to be enabled',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract access token from Authorization header
|
||||||
|
const authHeader = req.headers.authorization;
|
||||||
|
if (!authHeader || !authHeader.startsWith('Bearer ')) {
|
||||||
|
return res.status(401).json({
|
||||||
|
message: 'Valid authorization token required',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get scopes from query parameters
|
||||||
|
const scopes = req.query.scopes;
|
||||||
|
if (!scopes) {
|
||||||
|
return res.status(400).json({
|
||||||
|
message: 'Graph API scopes are required as query parameter',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const accessToken = authHeader.substring(7); // Remove 'Bearer ' prefix
|
||||||
|
const tokenResponse = await getGraphApiToken(req.user, accessToken, scopes);
|
||||||
|
|
||||||
|
res.json(tokenResponse);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[graphTokenController] Failed to obtain Graph API token:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
message: 'Failed to obtain Microsoft Graph token',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
refreshController,
|
refreshController,
|
||||||
registrationController,
|
registrationController,
|
||||||
resetPasswordController,
|
resetPasswordController,
|
||||||
resetPasswordRequestController,
|
resetPasswordRequestController,
|
||||||
|
graphTokenController,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { CacheKeys } = require('librechat-data-provider');
|
const { CacheKeys } = require('librechat-data-provider');
|
||||||
const { loadDefaultModels, loadConfigModels } = require('~/server/services/Config');
|
const { loadDefaultModels, loadConfigModels } = require('~/server/services/Config');
|
||||||
const { getLogStores } = require('~/cache');
|
const { getLogStores } = require('~/cache');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {ServerRequest} req
|
* @param {ServerRequest} req
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
const { CacheKeys } = require('librechat-data-provider');
|
|
||||||
const { loadOverrideConfig } = require('~/server/services/Config');
|
|
||||||
const { getLogStores } = require('~/cache');
|
|
||||||
|
|
||||||
async function overrideController(req, res) {
|
|
||||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
|
||||||
let overrideConfig = await cache.get(CacheKeys.OVERRIDE_CONFIG);
|
|
||||||
if (overrideConfig) {
|
|
||||||
res.send(overrideConfig);
|
|
||||||
return;
|
|
||||||
} else if (overrideConfig === false) {
|
|
||||||
res.send(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
overrideConfig = await loadOverrideConfig();
|
|
||||||
const { endpointsConfig, modelsConfig } = overrideConfig;
|
|
||||||
if (endpointsConfig) {
|
|
||||||
await cache.set(CacheKeys.ENDPOINT_CONFIG, endpointsConfig);
|
|
||||||
}
|
|
||||||
if (modelsConfig) {
|
|
||||||
await cache.set(CacheKeys.MODELS_CONFIG, modelsConfig);
|
|
||||||
}
|
|
||||||
await cache.set(CacheKeys.OVERRIDE_CONFIG, overrideConfig);
|
|
||||||
res.send(JSON.stringify(overrideConfig));
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = overrideController;
|
|
||||||
484
api/server/controllers/PermissionsController.js
Normal file
484
api/server/controllers/PermissionsController.js
Normal file
@@ -0,0 +1,484 @@
|
|||||||
|
/**
|
||||||
|
* @import { TUpdateResourcePermissionsRequest, TUpdateResourcePermissionsResponse } from 'librechat-data-provider'
|
||||||
|
*/
|
||||||
|
|
||||||
|
const mongoose = require('mongoose');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const { ResourceType, PrincipalType } = require('librechat-data-provider');
|
||||||
|
const {
|
||||||
|
bulkUpdateResourcePermissions,
|
||||||
|
ensureGroupPrincipalExists,
|
||||||
|
getEffectivePermissions,
|
||||||
|
ensurePrincipalExists,
|
||||||
|
getAvailableRoles,
|
||||||
|
} = require('~/server/services/PermissionService');
|
||||||
|
const { AclEntry } = require('~/db/models');
|
||||||
|
const {
|
||||||
|
searchPrincipals: searchLocalPrincipals,
|
||||||
|
sortPrincipalsByRelevance,
|
||||||
|
calculateRelevanceScore,
|
||||||
|
} = require('~/models');
|
||||||
|
const {
|
||||||
|
entraIdPrincipalFeatureEnabled,
|
||||||
|
searchEntraIdPrincipals,
|
||||||
|
} = require('~/server/services/GraphApiService');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generic controller for resource permission endpoints
|
||||||
|
* Delegates validation and logic to PermissionService
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates that the resourceType is one of the supported enum values
|
||||||
|
* @param {string} resourceType - The resource type to validate
|
||||||
|
* @throws {Error} If resourceType is not valid
|
||||||
|
*/
|
||||||
|
const validateResourceType = (resourceType) => {
|
||||||
|
const validTypes = Object.values(ResourceType);
|
||||||
|
if (!validTypes.includes(resourceType)) {
|
||||||
|
throw new Error(`Invalid resourceType: ${resourceType}. Valid types: ${validTypes.join(', ')}`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bulk update permissions for a resource (grant, update, remove)
|
||||||
|
* @route PUT /api/{resourceType}/{resourceId}/permissions
|
||||||
|
* @param {Object} req - Express request object
|
||||||
|
* @param {Object} req.params - Route parameters
|
||||||
|
* @param {string} req.params.resourceType - Resource type (e.g., 'agent')
|
||||||
|
* @param {string} req.params.resourceId - Resource ID
|
||||||
|
* @param {TUpdateResourcePermissionsRequest} req.body - Request body
|
||||||
|
* @param {Object} res - Express response object
|
||||||
|
* @returns {Promise<TUpdateResourcePermissionsResponse>} Updated permissions response
|
||||||
|
*/
|
||||||
|
const updateResourcePermissions = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { resourceType, resourceId } = req.params;
|
||||||
|
validateResourceType(resourceType);
|
||||||
|
|
||||||
|
/** @type {TUpdateResourcePermissionsRequest} */
|
||||||
|
const { updated, removed, public: isPublic, publicAccessRoleId } = req.body;
|
||||||
|
const { id: userId } = req.user;
|
||||||
|
|
||||||
|
// Prepare principals for the service call
|
||||||
|
const updatedPrincipals = [];
|
||||||
|
const revokedPrincipals = [];
|
||||||
|
|
||||||
|
// Add updated principals
|
||||||
|
if (updated && Array.isArray(updated)) {
|
||||||
|
updatedPrincipals.push(...updated);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add public permission if enabled
|
||||||
|
if (isPublic && publicAccessRoleId) {
|
||||||
|
updatedPrincipals.push({
|
||||||
|
type: PrincipalType.PUBLIC,
|
||||||
|
id: null,
|
||||||
|
accessRoleId: publicAccessRoleId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare authentication context for enhanced group member fetching
|
||||||
|
const useEntraId = entraIdPrincipalFeatureEnabled(req.user);
|
||||||
|
const authHeader = req.headers.authorization;
|
||||||
|
const accessToken =
|
||||||
|
authHeader && authHeader.startsWith('Bearer ') ? authHeader.substring(7) : null;
|
||||||
|
const authContext =
|
||||||
|
useEntraId && accessToken
|
||||||
|
? {
|
||||||
|
accessToken,
|
||||||
|
sub: req.user.openidId,
|
||||||
|
}
|
||||||
|
: null;
|
||||||
|
|
||||||
|
// Ensure updated principals exist in the database before processing permissions
|
||||||
|
const validatedPrincipals = [];
|
||||||
|
for (const principal of updatedPrincipals) {
|
||||||
|
try {
|
||||||
|
let principalId;
|
||||||
|
|
||||||
|
if (principal.type === PrincipalType.PUBLIC) {
|
||||||
|
principalId = null; // Public principals don't need database records
|
||||||
|
} else if (principal.type === PrincipalType.ROLE) {
|
||||||
|
principalId = principal.id; // Role principals use role name as ID
|
||||||
|
} else if (principal.type === PrincipalType.USER) {
|
||||||
|
principalId = await ensurePrincipalExists(principal);
|
||||||
|
} else if (principal.type === PrincipalType.GROUP) {
|
||||||
|
// Pass authContext to enable member fetching for Entra ID groups when available
|
||||||
|
principalId = await ensureGroupPrincipalExists(principal, authContext);
|
||||||
|
} else {
|
||||||
|
logger.error(`Unsupported principal type: ${principal.type}`);
|
||||||
|
continue; // Skip invalid principal types
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the principal with the validated ID for ACL operations
|
||||||
|
validatedPrincipals.push({
|
||||||
|
...principal,
|
||||||
|
id: principalId,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error ensuring principal exists:', {
|
||||||
|
principal: {
|
||||||
|
type: principal.type,
|
||||||
|
id: principal.id,
|
||||||
|
name: principal.name,
|
||||||
|
source: principal.source,
|
||||||
|
},
|
||||||
|
error: error.message,
|
||||||
|
});
|
||||||
|
// Continue with other principals instead of failing the entire operation
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add removed principals
|
||||||
|
if (removed && Array.isArray(removed)) {
|
||||||
|
revokedPrincipals.push(...removed);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If public is disabled, add public to revoked list
|
||||||
|
if (!isPublic) {
|
||||||
|
revokedPrincipals.push({
|
||||||
|
type: PrincipalType.PUBLIC,
|
||||||
|
id: null,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const results = await bulkUpdateResourcePermissions({
|
||||||
|
resourceType,
|
||||||
|
resourceId,
|
||||||
|
updatedPrincipals: validatedPrincipals,
|
||||||
|
revokedPrincipals,
|
||||||
|
grantedBy: userId,
|
||||||
|
});
|
||||||
|
|
||||||
|
/** @type {TUpdateResourcePermissionsResponse} */
|
||||||
|
const response = {
|
||||||
|
message: 'Permissions updated successfully',
|
||||||
|
results: {
|
||||||
|
principals: results.granted,
|
||||||
|
public: isPublic || false,
|
||||||
|
publicAccessRoleId: isPublic ? publicAccessRoleId : undefined,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
res.status(200).json(response);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error updating resource permissions:', error);
|
||||||
|
res.status(400).json({
|
||||||
|
error: 'Failed to update permissions',
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get principals with their permission roles for a resource (UI-friendly format)
|
||||||
|
* Uses efficient aggregation pipeline to join User/Group data in single query
|
||||||
|
* @route GET /api/permissions/{resourceType}/{resourceId}
|
||||||
|
*/
|
||||||
|
const getResourcePermissions = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { resourceType, resourceId } = req.params;
|
||||||
|
validateResourceType(resourceType);
|
||||||
|
|
||||||
|
// Use aggregation pipeline for efficient single-query data retrieval
|
||||||
|
const results = await AclEntry.aggregate([
|
||||||
|
// Match ACL entries for this resource
|
||||||
|
{
|
||||||
|
$match: {
|
||||||
|
resourceType,
|
||||||
|
resourceId: mongoose.Types.ObjectId.isValid(resourceId)
|
||||||
|
? mongoose.Types.ObjectId.createFromHexString(resourceId)
|
||||||
|
: resourceId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Lookup AccessRole information
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'accessroles',
|
||||||
|
localField: 'roleId',
|
||||||
|
foreignField: '_id',
|
||||||
|
as: 'role',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Lookup User information (for user principals)
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'users',
|
||||||
|
localField: 'principalId',
|
||||||
|
foreignField: '_id',
|
||||||
|
as: 'userInfo',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Lookup Group information (for group principals)
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'groups',
|
||||||
|
localField: 'principalId',
|
||||||
|
foreignField: '_id',
|
||||||
|
as: 'groupInfo',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Project final structure
|
||||||
|
{
|
||||||
|
$project: {
|
||||||
|
principalType: 1,
|
||||||
|
principalId: 1,
|
||||||
|
accessRoleId: { $arrayElemAt: ['$role.accessRoleId', 0] },
|
||||||
|
userInfo: { $arrayElemAt: ['$userInfo', 0] },
|
||||||
|
groupInfo: { $arrayElemAt: ['$groupInfo', 0] },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const principals = [];
|
||||||
|
let publicPermission = null;
|
||||||
|
|
||||||
|
// Process aggregation results
|
||||||
|
for (const result of results) {
|
||||||
|
if (result.principalType === PrincipalType.PUBLIC) {
|
||||||
|
publicPermission = {
|
||||||
|
public: true,
|
||||||
|
publicAccessRoleId: result.accessRoleId,
|
||||||
|
};
|
||||||
|
} else if (result.principalType === PrincipalType.USER && result.userInfo) {
|
||||||
|
principals.push({
|
||||||
|
type: PrincipalType.USER,
|
||||||
|
id: result.userInfo._id.toString(),
|
||||||
|
name: result.userInfo.name || result.userInfo.username,
|
||||||
|
email: result.userInfo.email,
|
||||||
|
avatar: result.userInfo.avatar,
|
||||||
|
source: !result.userInfo._id ? 'entra' : 'local',
|
||||||
|
idOnTheSource: result.userInfo.idOnTheSource || result.userInfo._id.toString(),
|
||||||
|
accessRoleId: result.accessRoleId,
|
||||||
|
});
|
||||||
|
} else if (result.principalType === PrincipalType.GROUP && result.groupInfo) {
|
||||||
|
principals.push({
|
||||||
|
type: PrincipalType.GROUP,
|
||||||
|
id: result.groupInfo._id.toString(),
|
||||||
|
name: result.groupInfo.name,
|
||||||
|
email: result.groupInfo.email,
|
||||||
|
description: result.groupInfo.description,
|
||||||
|
avatar: result.groupInfo.avatar,
|
||||||
|
source: result.groupInfo.source || 'local',
|
||||||
|
idOnTheSource: result.groupInfo.idOnTheSource || result.groupInfo._id.toString(),
|
||||||
|
accessRoleId: result.accessRoleId,
|
||||||
|
});
|
||||||
|
} else if (result.principalType === PrincipalType.ROLE) {
|
||||||
|
principals.push({
|
||||||
|
type: PrincipalType.ROLE,
|
||||||
|
/** Role name as ID */
|
||||||
|
id: result.principalId,
|
||||||
|
/** Display the role name */
|
||||||
|
name: result.principalId,
|
||||||
|
description: `System role: ${result.principalId}`,
|
||||||
|
accessRoleId: result.accessRoleId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return response in format expected by frontend
|
||||||
|
const response = {
|
||||||
|
resourceType,
|
||||||
|
resourceId,
|
||||||
|
principals,
|
||||||
|
public: publicPermission?.public || false,
|
||||||
|
...(publicPermission?.publicAccessRoleId && {
|
||||||
|
publicAccessRoleId: publicPermission.publicAccessRoleId,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
res.status(200).json(response);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error getting resource permissions principals:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to get permissions principals',
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get available roles for a resource type
|
||||||
|
* @route GET /api/{resourceType}/roles
|
||||||
|
*/
|
||||||
|
const getResourceRoles = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { resourceType } = req.params;
|
||||||
|
validateResourceType(resourceType);
|
||||||
|
|
||||||
|
const roles = await getAvailableRoles({ resourceType });
|
||||||
|
|
||||||
|
res.status(200).json(
|
||||||
|
roles.map((role) => ({
|
||||||
|
accessRoleId: role.accessRoleId,
|
||||||
|
name: role.name,
|
||||||
|
description: role.description,
|
||||||
|
permBits: role.permBits,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error getting resource roles:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to get roles',
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get user's effective permission bitmask for a resource
|
||||||
|
* @route GET /api/{resourceType}/{resourceId}/effective
|
||||||
|
*/
|
||||||
|
const getUserEffectivePermissions = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { resourceType, resourceId } = req.params;
|
||||||
|
validateResourceType(resourceType);
|
||||||
|
|
||||||
|
const { id: userId } = req.user;
|
||||||
|
|
||||||
|
const permissionBits = await getEffectivePermissions({
|
||||||
|
userId,
|
||||||
|
role: req.user.role,
|
||||||
|
resourceType,
|
||||||
|
resourceId,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.status(200).json({
|
||||||
|
permissionBits,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error getting user effective permissions:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to get effective permissions',
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search for users and groups to grant permissions
|
||||||
|
* Supports hybrid local database + Entra ID search when configured
|
||||||
|
* @route GET /api/permissions/search-principals
|
||||||
|
*/
|
||||||
|
const searchPrincipals = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { q: query, limit = 20, types } = req.query;
|
||||||
|
|
||||||
|
if (!query || query.trim().length === 0) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Query parameter "q" is required and must not be empty',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (query.trim().length < 2) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Query must be at least 2 characters long',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const searchLimit = Math.min(Math.max(1, parseInt(limit) || 10), 50);
|
||||||
|
|
||||||
|
let typeFilters = null;
|
||||||
|
if (types) {
|
||||||
|
const typesArray = Array.isArray(types) ? types : types.split(',');
|
||||||
|
const validTypes = typesArray.filter((t) =>
|
||||||
|
[PrincipalType.USER, PrincipalType.GROUP, PrincipalType.ROLE].includes(t),
|
||||||
|
);
|
||||||
|
typeFilters = validTypes.length > 0 ? validTypes : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const localResults = await searchLocalPrincipals(query.trim(), searchLimit, typeFilters);
|
||||||
|
let allPrincipals = [...localResults];
|
||||||
|
|
||||||
|
const useEntraId = entraIdPrincipalFeatureEnabled(req.user);
|
||||||
|
|
||||||
|
if (useEntraId && localResults.length < searchLimit) {
|
||||||
|
try {
|
||||||
|
let graphType = 'all';
|
||||||
|
if (typeFilters && typeFilters.length === 1) {
|
||||||
|
const graphTypeMap = {
|
||||||
|
[PrincipalType.USER]: 'users',
|
||||||
|
[PrincipalType.GROUP]: 'groups',
|
||||||
|
};
|
||||||
|
const mappedType = graphTypeMap[typeFilters[0]];
|
||||||
|
if (mappedType) {
|
||||||
|
graphType = mappedType;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const authHeader = req.headers.authorization;
|
||||||
|
const accessToken =
|
||||||
|
authHeader && authHeader.startsWith('Bearer ') ? authHeader.substring(7) : null;
|
||||||
|
|
||||||
|
if (accessToken) {
|
||||||
|
const graphResults = await searchEntraIdPrincipals(
|
||||||
|
accessToken,
|
||||||
|
req.user.openidId,
|
||||||
|
query.trim(),
|
||||||
|
graphType,
|
||||||
|
searchLimit - localResults.length,
|
||||||
|
);
|
||||||
|
|
||||||
|
const localEmails = new Set(
|
||||||
|
localResults.map((p) => p.email?.toLowerCase()).filter(Boolean),
|
||||||
|
);
|
||||||
|
const localGroupSourceIds = new Set(
|
||||||
|
localResults.map((p) => p.idOnTheSource).filter(Boolean),
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const principal of graphResults) {
|
||||||
|
const isDuplicateByEmail =
|
||||||
|
principal.email && localEmails.has(principal.email.toLowerCase());
|
||||||
|
const isDuplicateBySourceId =
|
||||||
|
principal.idOnTheSource && localGroupSourceIds.has(principal.idOnTheSource);
|
||||||
|
|
||||||
|
if (!isDuplicateByEmail && !isDuplicateBySourceId) {
|
||||||
|
allPrincipals.push(principal);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (graphError) {
|
||||||
|
logger.warn('Graph API search failed, falling back to local results:', graphError.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const scoredResults = allPrincipals.map((item) => ({
|
||||||
|
...item,
|
||||||
|
_searchScore: calculateRelevanceScore(item, query.trim()),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const finalResults = sortPrincipalsByRelevance(scoredResults)
|
||||||
|
.slice(0, searchLimit)
|
||||||
|
.map((result) => {
|
||||||
|
const { _searchScore, ...resultWithoutScore } = result;
|
||||||
|
return resultWithoutScore;
|
||||||
|
});
|
||||||
|
|
||||||
|
res.status(200).json({
|
||||||
|
query: query.trim(),
|
||||||
|
limit: searchLimit,
|
||||||
|
types: typeFilters,
|
||||||
|
results: finalResults,
|
||||||
|
count: finalResults.length,
|
||||||
|
sources: {
|
||||||
|
local: finalResults.filter((r) => r.source === 'local').length,
|
||||||
|
entra: finalResults.filter((r) => r.source === 'entra').length,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error searching principals:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to search principals',
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
updateResourcePermissions,
|
||||||
|
getResourcePermissions,
|
||||||
|
getResourceRoles,
|
||||||
|
getUserEffectivePermissions,
|
||||||
|
searchPrincipals,
|
||||||
|
};
|
||||||
@@ -1,14 +1,9 @@
|
|||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
const { CacheKeys } = require('librechat-data-provider');
|
||||||
const {
|
const { getToolkitKey, checkPluginAuth, filterUniquePlugins } = require('@librechat/api');
|
||||||
getToolkitKey,
|
const { getCachedTools, setCachedTools } = require('~/server/services/Config');
|
||||||
checkPluginAuth,
|
|
||||||
filterUniquePlugins,
|
|
||||||
convertMCPToolsToPlugins,
|
|
||||||
} = require('@librechat/api');
|
|
||||||
const { getCustomConfig, getCachedTools } = require('~/server/services/Config');
|
|
||||||
const { availableTools, toolkits } = require('~/app/clients/tools');
|
const { availableTools, toolkits } = require('~/app/clients/tools');
|
||||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
const { getAppConfig } = require('~/server/services/Config');
|
||||||
const { getLogStores } = require('~/cache');
|
const { getLogStores } = require('~/cache');
|
||||||
|
|
||||||
const getAvailablePluginsController = async (req, res) => {
|
const getAvailablePluginsController = async (req, res) => {
|
||||||
@@ -20,8 +15,10 @@ const getAvailablePluginsController = async (req, res) => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||||
/** @type {{ filteredTools: string[], includedTools: string[] }} */
|
/** @type {{ filteredTools: string[], includedTools: string[] }} */
|
||||||
const { filteredTools = [], includedTools = [] } = req.app.locals;
|
const { filteredTools = [], includedTools = [] } = appConfig;
|
||||||
|
/** @type {import('@librechat/api').LCManifestTool[]} */
|
||||||
const pluginManifest = availableTools;
|
const pluginManifest = availableTools;
|
||||||
|
|
||||||
const uniquePlugins = filterUniquePlugins(pluginManifest);
|
const uniquePlugins = filterUniquePlugins(pluginManifest);
|
||||||
@@ -47,45 +44,6 @@ const getAvailablePluginsController = async (req, res) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
function createServerToolsCallback() {
|
|
||||||
/**
|
|
||||||
* @param {string} serverName
|
|
||||||
* @param {TPlugin[] | null} serverTools
|
|
||||||
*/
|
|
||||||
return async function (serverName, serverTools) {
|
|
||||||
try {
|
|
||||||
const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS);
|
|
||||||
if (!serverName || !mcpToolsCache) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
await mcpToolsCache.set(serverName, serverTools);
|
|
||||||
logger.debug(`MCP tools for ${serverName} added to cache.`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error retrieving MCP tools from cache:', error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function createGetServerTools() {
|
|
||||||
/**
|
|
||||||
* Retrieves cached server tools
|
|
||||||
* @param {string} serverName
|
|
||||||
* @returns {Promise<TPlugin[] | null>}
|
|
||||||
*/
|
|
||||||
return async function (serverName) {
|
|
||||||
try {
|
|
||||||
const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS);
|
|
||||||
if (!mcpToolsCache) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return await mcpToolsCache.get(serverName);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error retrieving MCP tools from cache:', error);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves and returns a list of available tools, either from a cache or by reading a plugin manifest file.
|
* Retrieves and returns a list of available tools, either from a cache or by reading a plugin manifest file.
|
||||||
*
|
*
|
||||||
@@ -101,37 +59,35 @@ function createGetServerTools() {
|
|||||||
const getAvailableTools = async (req, res) => {
|
const getAvailableTools = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const userId = req.user?.id;
|
const userId = req.user?.id;
|
||||||
const customConfig = await getCustomConfig();
|
if (!userId) {
|
||||||
|
logger.warn('[getAvailableTools] User ID not found in request');
|
||||||
|
return res.status(401).json({ message: 'Unauthorized' });
|
||||||
|
}
|
||||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||||
const cachedToolsArray = await cache.get(CacheKeys.TOOLS);
|
const cachedToolsArray = await cache.get(CacheKeys.TOOLS);
|
||||||
const cachedUserTools = await getCachedTools({ userId });
|
|
||||||
const userPlugins = convertMCPToolsToPlugins({ functionTools: cachedUserTools, customConfig });
|
|
||||||
|
|
||||||
if (cachedToolsArray != null && userPlugins != null) {
|
const appConfig = req.config ?? (await getAppConfig({ role: req.user?.role }));
|
||||||
const dedupedTools = filterUniquePlugins([...userPlugins, ...cachedToolsArray]);
|
|
||||||
res.status(200).json(dedupedTools);
|
// Return early if we have cached tools
|
||||||
|
if (cachedToolsArray != null) {
|
||||||
|
res.status(200).json(cachedToolsArray);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If not in cache, build from manifest
|
/** @type {Record<string, FunctionTool> | null} Get tool definitions to filter which tools are actually available */
|
||||||
let pluginManifest = availableTools;
|
let toolDefinitions = await getCachedTools();
|
||||||
if (customConfig?.mcpServers != null) {
|
|
||||||
const mcpManager = getMCPManager();
|
if (toolDefinitions == null && appConfig?.availableTools != null) {
|
||||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
logger.warn('[getAvailableTools] Tool cache was empty, re-initializing from app config');
|
||||||
const flowManager = flowsCache ? getFlowStateManager(flowsCache) : null;
|
await setCachedTools(appConfig.availableTools);
|
||||||
const serverToolsCallback = createServerToolsCallback();
|
toolDefinitions = appConfig.availableTools;
|
||||||
const getServerTools = createGetServerTools();
|
|
||||||
const mcpTools = await mcpManager.loadManifestTools({
|
|
||||||
flowManager,
|
|
||||||
serverToolsCallback,
|
|
||||||
getServerTools,
|
|
||||||
});
|
|
||||||
pluginManifest = [...mcpTools, ...pluginManifest];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @type {TPlugin[]} */
|
/** @type {import('@librechat/api').LCManifestTool[]} */
|
||||||
const uniquePlugins = filterUniquePlugins(pluginManifest);
|
let pluginManifest = availableTools;
|
||||||
|
|
||||||
|
/** @type {TPlugin[]} Deduplicate and authenticate plugins */
|
||||||
|
const uniquePlugins = filterUniquePlugins(pluginManifest);
|
||||||
const authenticatedPlugins = uniquePlugins.map((plugin) => {
|
const authenticatedPlugins = uniquePlugins.map((plugin) => {
|
||||||
if (checkPluginAuth(plugin)) {
|
if (checkPluginAuth(plugin)) {
|
||||||
return { ...plugin, authenticated: true };
|
return { ...plugin, authenticated: true };
|
||||||
@@ -140,14 +96,13 @@ const getAvailableTools = async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const toolDefinitions = (await getCachedTools({ includeGlobal: true })) || {};
|
/** Filter plugins based on availability */
|
||||||
|
|
||||||
const toolsOutput = [];
|
const toolsOutput = [];
|
||||||
for (const plugin of authenticatedPlugins) {
|
for (const plugin of authenticatedPlugins) {
|
||||||
const isToolDefined = toolDefinitions[plugin.pluginKey] !== undefined;
|
const isToolDefined = toolDefinitions?.[plugin.pluginKey] !== undefined;
|
||||||
const isToolkit =
|
const isToolkit =
|
||||||
plugin.toolkit === true &&
|
plugin.toolkit === true &&
|
||||||
Object.keys(toolDefinitions).some(
|
Object.keys(toolDefinitions ?? {}).some(
|
||||||
(key) => getToolkitKey({ toolkits, toolName: key }) === plugin.pluginKey,
|
(key) => getToolkitKey({ toolkits, toolName: key }) === plugin.pluginKey,
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -155,44 +110,13 @@ const getAvailableTools = async (req, res) => {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const toolToAdd = { ...plugin };
|
toolsOutput.push(plugin);
|
||||||
|
|
||||||
if (!plugin.pluginKey.includes(Constants.mcp_delimiter)) {
|
|
||||||
toolsOutput.push(toolToAdd);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const parts = plugin.pluginKey.split(Constants.mcp_delimiter);
|
|
||||||
const serverName = parts[parts.length - 1];
|
|
||||||
const serverConfig = customConfig?.mcpServers?.[serverName];
|
|
||||||
|
|
||||||
if (!serverConfig?.customUserVars) {
|
|
||||||
toolsOutput.push(toolToAdd);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const customVarKeys = Object.keys(serverConfig.customUserVars);
|
|
||||||
|
|
||||||
if (customVarKeys.length === 0) {
|
|
||||||
toolToAdd.authConfig = [];
|
|
||||||
toolToAdd.authenticated = true;
|
|
||||||
} else {
|
|
||||||
toolToAdd.authConfig = Object.entries(serverConfig.customUserVars).map(([key, value]) => ({
|
|
||||||
authField: key,
|
|
||||||
label: value.title || key,
|
|
||||||
description: value.description || '',
|
|
||||||
}));
|
|
||||||
toolToAdd.authenticated = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
toolsOutput.push(toolToAdd);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const finalTools = filterUniquePlugins(toolsOutput);
|
const finalTools = filterUniquePlugins(toolsOutput);
|
||||||
await cache.set(CacheKeys.TOOLS, finalTools);
|
await cache.set(CacheKeys.TOOLS, finalTools);
|
||||||
|
|
||||||
const dedupedTools = filterUniquePlugins([...userPlugins, ...finalTools]);
|
res.status(200).json(finalTools);
|
||||||
|
|
||||||
res.status(200).json(dedupedTools);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[getAvailableTools]', error);
|
logger.error('[getAvailableTools]', error);
|
||||||
res.status(500).json({ message: error.message });
|
res.status(500).json({ message: error.message });
|
||||||
|
|||||||
@@ -1,30 +1,25 @@
|
|||||||
const { Constants } = require('librechat-data-provider');
|
const { getCachedTools, getAppConfig } = require('~/server/services/Config');
|
||||||
const { getCustomConfig, getCachedTools } = require('~/server/services/Config');
|
|
||||||
const { getLogStores } = require('~/cache');
|
const { getLogStores } = require('~/cache');
|
||||||
|
|
||||||
// Mock the dependencies
|
|
||||||
jest.mock('@librechat/data-schemas', () => ({
|
jest.mock('@librechat/data-schemas', () => ({
|
||||||
logger: {
|
logger: {
|
||||||
debug: jest.fn(),
|
debug: jest.fn(),
|
||||||
error: jest.fn(),
|
error: jest.fn(),
|
||||||
|
warn: jest.fn(),
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
jest.mock('~/server/services/Config', () => ({
|
jest.mock('~/server/services/Config', () => ({
|
||||||
getCustomConfig: jest.fn(),
|
|
||||||
getCachedTools: jest.fn(),
|
getCachedTools: jest.fn(),
|
||||||
|
getAppConfig: jest.fn().mockResolvedValue({
|
||||||
|
filteredTools: [],
|
||||||
|
includedTools: [],
|
||||||
|
}),
|
||||||
|
setCachedTools: jest.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
jest.mock('~/server/services/ToolService', () => ({
|
// loadAndFormatTools mock removed - no longer used in PluginController
|
||||||
getToolkitKey: jest.fn(),
|
// getMCPManager mock removed - no longer used in PluginController
|
||||||
}));
|
|
||||||
|
|
||||||
jest.mock('~/config', () => ({
|
|
||||||
getMCPManager: jest.fn(() => ({
|
|
||||||
loadManifestTools: jest.fn().mockResolvedValue([]),
|
|
||||||
})),
|
|
||||||
getFlowStateManager: jest.fn(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
jest.mock('~/app/clients/tools', () => ({
|
jest.mock('~/app/clients/tools', () => ({
|
||||||
availableTools: [],
|
availableTools: [],
|
||||||
@@ -35,71 +30,87 @@ jest.mock('~/cache', () => ({
|
|||||||
getLogStores: jest.fn(),
|
getLogStores: jest.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
jest.mock('@librechat/api', () => ({
|
|
||||||
getToolkitKey: jest.fn(),
|
|
||||||
checkPluginAuth: jest.fn(),
|
|
||||||
filterUniquePlugins: jest.fn(),
|
|
||||||
convertMCPToolsToPlugins: jest.fn(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Import the actual module with the function we want to test
|
|
||||||
const { getAvailableTools, getAvailablePluginsController } = require('./PluginController');
|
const { getAvailableTools, getAvailablePluginsController } = require('./PluginController');
|
||||||
const {
|
|
||||||
filterUniquePlugins,
|
|
||||||
checkPluginAuth,
|
|
||||||
convertMCPToolsToPlugins,
|
|
||||||
getToolkitKey,
|
|
||||||
} = require('@librechat/api');
|
|
||||||
|
|
||||||
describe('PluginController', () => {
|
describe('PluginController', () => {
|
||||||
let mockReq, mockRes, mockCache;
|
let mockReq, mockRes, mockCache;
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
mockReq = { user: { id: 'test-user-id' } };
|
mockReq = {
|
||||||
|
user: { id: 'test-user-id' },
|
||||||
|
config: {
|
||||||
|
filteredTools: [],
|
||||||
|
includedTools: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
mockRes = { status: jest.fn().mockReturnThis(), json: jest.fn() };
|
mockRes = { status: jest.fn().mockReturnThis(), json: jest.fn() };
|
||||||
mockCache = { get: jest.fn(), set: jest.fn() };
|
mockCache = { get: jest.fn(), set: jest.fn() };
|
||||||
getLogStores.mockReturnValue(mockCache);
|
getLogStores.mockReturnValue(mockCache);
|
||||||
|
|
||||||
|
// Clear availableTools and toolkits arrays before each test
|
||||||
|
require('~/app/clients/tools').availableTools.length = 0;
|
||||||
|
require('~/app/clients/tools').toolkits.length = 0;
|
||||||
|
|
||||||
|
// Reset getCachedTools mock to ensure clean state
|
||||||
|
getCachedTools.mockReset();
|
||||||
|
|
||||||
|
// Reset getAppConfig mock to ensure clean state with default values
|
||||||
|
getAppConfig.mockReset();
|
||||||
|
getAppConfig.mockResolvedValue({
|
||||||
|
filteredTools: [],
|
||||||
|
includedTools: [],
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getAvailablePluginsController', () => {
|
describe('getAvailablePluginsController', () => {
|
||||||
beforeEach(() => {
|
|
||||||
mockReq.app = { locals: { filteredTools: [], includedTools: [] } };
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should use filterUniquePlugins to remove duplicate plugins', async () => {
|
it('should use filterUniquePlugins to remove duplicate plugins', async () => {
|
||||||
|
// Add plugins with duplicates to availableTools
|
||||||
const mockPlugins = [
|
const mockPlugins = [
|
||||||
{ name: 'Plugin1', pluginKey: 'key1', description: 'First' },
|
{ name: 'Plugin1', pluginKey: 'key1', description: 'First' },
|
||||||
|
{ name: 'Plugin1', pluginKey: 'key1', description: 'First duplicate' },
|
||||||
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
|
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
|
||||||
];
|
];
|
||||||
|
|
||||||
|
require('~/app/clients/tools').availableTools.push(...mockPlugins);
|
||||||
|
|
||||||
mockCache.get.mockResolvedValue(null);
|
mockCache.get.mockResolvedValue(null);
|
||||||
filterUniquePlugins.mockReturnValue(mockPlugins);
|
|
||||||
checkPluginAuth.mockReturnValue(true);
|
// Configure getAppConfig to return the expected config
|
||||||
|
getAppConfig.mockResolvedValueOnce({
|
||||||
|
filteredTools: [],
|
||||||
|
includedTools: [],
|
||||||
|
});
|
||||||
|
|
||||||
await getAvailablePluginsController(mockReq, mockRes);
|
await getAvailablePluginsController(mockReq, mockRes);
|
||||||
|
|
||||||
expect(filterUniquePlugins).toHaveBeenCalled();
|
|
||||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||||
// The response includes authenticated: true for each plugin when checkPluginAuth returns true
|
const responseData = mockRes.json.mock.calls[0][0];
|
||||||
expect(mockRes.json).toHaveBeenCalledWith([
|
// The real filterUniquePlugins should have removed the duplicate
|
||||||
{ name: 'Plugin1', pluginKey: 'key1', description: 'First', authenticated: true },
|
expect(responseData).toHaveLength(2);
|
||||||
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second', authenticated: true },
|
expect(responseData[0].pluginKey).toBe('key1');
|
||||||
]);
|
expect(responseData[1].pluginKey).toBe('key2');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use checkPluginAuth to verify plugin authentication', async () => {
|
it('should use checkPluginAuth to verify plugin authentication', async () => {
|
||||||
|
// checkPluginAuth returns false for plugins without authConfig
|
||||||
|
// so authenticated property won't be added
|
||||||
const mockPlugin = { name: 'Plugin1', pluginKey: 'key1', description: 'First' };
|
const mockPlugin = { name: 'Plugin1', pluginKey: 'key1', description: 'First' };
|
||||||
|
|
||||||
|
require('~/app/clients/tools').availableTools.push(mockPlugin);
|
||||||
mockCache.get.mockResolvedValue(null);
|
mockCache.get.mockResolvedValue(null);
|
||||||
filterUniquePlugins.mockReturnValue([mockPlugin]);
|
|
||||||
checkPluginAuth.mockReturnValueOnce(true);
|
// Configure getAppConfig to return the expected config
|
||||||
|
getAppConfig.mockResolvedValueOnce({
|
||||||
|
filteredTools: [],
|
||||||
|
includedTools: [],
|
||||||
|
});
|
||||||
|
|
||||||
await getAvailablePluginsController(mockReq, mockRes);
|
await getAvailablePluginsController(mockReq, mockRes);
|
||||||
|
|
||||||
expect(checkPluginAuth).toHaveBeenCalledWith(mockPlugin);
|
|
||||||
const responseData = mockRes.json.mock.calls[0][0];
|
const responseData = mockRes.json.mock.calls[0][0];
|
||||||
expect(responseData[0].authenticated).toBe(true);
|
// The real checkPluginAuth returns false for plugins without authConfig, so authenticated property is not added
|
||||||
|
expect(responseData[0].authenticated).toBeUndefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return cached plugins when available', async () => {
|
it('should return cached plugins when available', async () => {
|
||||||
@@ -111,8 +122,7 @@ describe('PluginController', () => {
|
|||||||
|
|
||||||
await getAvailablePluginsController(mockReq, mockRes);
|
await getAvailablePluginsController(mockReq, mockRes);
|
||||||
|
|
||||||
expect(filterUniquePlugins).not.toHaveBeenCalled();
|
// When cache is hit, we return immediately without processing
|
||||||
expect(checkPluginAuth).not.toHaveBeenCalled();
|
|
||||||
expect(mockRes.json).toHaveBeenCalledWith(cachedPlugins);
|
expect(mockRes.json).toHaveBeenCalledWith(cachedPlugins);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -122,10 +132,14 @@ describe('PluginController', () => {
|
|||||||
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
|
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
|
||||||
];
|
];
|
||||||
|
|
||||||
mockReq.app.locals.includedTools = ['key1'];
|
require('~/app/clients/tools').availableTools.push(...mockPlugins);
|
||||||
mockCache.get.mockResolvedValue(null);
|
mockCache.get.mockResolvedValue(null);
|
||||||
filterUniquePlugins.mockReturnValue(mockPlugins);
|
|
||||||
checkPluginAuth.mockReturnValue(false);
|
// Configure getAppConfig to return config with includedTools
|
||||||
|
getAppConfig.mockResolvedValueOnce({
|
||||||
|
filteredTools: [],
|
||||||
|
includedTools: ['key1'],
|
||||||
|
});
|
||||||
|
|
||||||
await getAvailablePluginsController(mockReq, mockRes);
|
await getAvailablePluginsController(mockReq, mockRes);
|
||||||
|
|
||||||
@@ -136,73 +150,77 @@ describe('PluginController', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('getAvailableTools', () => {
|
describe('getAvailableTools', () => {
|
||||||
it('should use convertMCPToolsToPlugins for user-specific MCP tools', async () => {
|
it('should use filterUniquePlugins to deduplicate combined tools', async () => {
|
||||||
const mockUserTools = {
|
const mockUserTools = {
|
||||||
[`tool1${Constants.mcp_delimiter}server1`]: {
|
'user-tool': {
|
||||||
function: { name: 'tool1', description: 'Tool 1' },
|
type: 'function',
|
||||||
|
function: {
|
||||||
|
name: 'user-tool',
|
||||||
|
description: 'User tool',
|
||||||
|
parameters: { type: 'object', properties: {} },
|
||||||
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const mockConvertedPlugins = [
|
|
||||||
{
|
|
||||||
name: 'tool1',
|
|
||||||
pluginKey: `tool1${Constants.mcp_delimiter}server1`,
|
|
||||||
description: 'Tool 1',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
mockCache.get.mockResolvedValue(null);
|
const mockCachedPlugins = [
|
||||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
{ name: 'user-tool', pluginKey: 'user-tool', description: 'Duplicate user tool' },
|
||||||
convertMCPToolsToPlugins.mockReturnValue(mockConvertedPlugins);
|
|
||||||
filterUniquePlugins.mockImplementation((plugins) => plugins);
|
|
||||||
getCustomConfig.mockResolvedValue(null);
|
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
|
||||||
|
|
||||||
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
|
|
||||||
functionTools: mockUserTools,
|
|
||||||
customConfig: null,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should use filterUniquePlugins to deduplicate combined tools', async () => {
|
|
||||||
const mockUserPlugins = [
|
|
||||||
{ name: 'UserTool', pluginKey: 'user-tool', description: 'User tool' },
|
|
||||||
];
|
|
||||||
const mockManifestPlugins = [
|
|
||||||
{ name: 'ManifestTool', pluginKey: 'manifest-tool', description: 'Manifest tool' },
|
{ name: 'ManifestTool', pluginKey: 'manifest-tool', description: 'Manifest tool' },
|
||||||
];
|
];
|
||||||
|
|
||||||
mockCache.get.mockResolvedValue(mockManifestPlugins);
|
mockCache.get.mockResolvedValue(mockCachedPlugins);
|
||||||
getCachedTools.mockResolvedValueOnce({});
|
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||||
convertMCPToolsToPlugins.mockReturnValue(mockUserPlugins);
|
mockReq.config = {
|
||||||
filterUniquePlugins.mockReturnValue([...mockUserPlugins, ...mockManifestPlugins]);
|
mcpConfig: null,
|
||||||
getCustomConfig.mockResolvedValue(null);
|
paths: { structuredTools: '/mock/path' },
|
||||||
|
};
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
await getAvailableTools(mockReq, mockRes);
|
||||||
|
|
||||||
// Should be called to deduplicate the combined array
|
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||||
expect(filterUniquePlugins).toHaveBeenLastCalledWith([
|
const responseData = mockRes.json.mock.calls[0][0];
|
||||||
...mockUserPlugins,
|
expect(Array.isArray(responseData)).toBe(true);
|
||||||
...mockManifestPlugins,
|
// The real filterUniquePlugins should have deduplicated tools with same pluginKey
|
||||||
]);
|
const userToolCount = responseData.filter((tool) => tool.pluginKey === 'user-tool').length;
|
||||||
|
expect(userToolCount).toBe(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use checkPluginAuth to verify authentication status', async () => {
|
it('should use checkPluginAuth to verify authentication status', async () => {
|
||||||
const mockPlugin = { name: 'Tool1', pluginKey: 'tool1', description: 'Tool 1' };
|
// Add a plugin to availableTools that will be checked
|
||||||
|
const mockPlugin = {
|
||||||
|
name: 'Tool1',
|
||||||
|
pluginKey: 'tool1',
|
||||||
|
description: 'Tool 1',
|
||||||
|
// No authConfig means checkPluginAuth returns false
|
||||||
|
};
|
||||||
|
|
||||||
|
require('~/app/clients/tools').availableTools.push(mockPlugin);
|
||||||
|
|
||||||
mockCache.get.mockResolvedValue(null);
|
mockCache.get.mockResolvedValue(null);
|
||||||
getCachedTools.mockResolvedValue({});
|
// getCachedTools returns the tool definitions
|
||||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
getCachedTools.mockResolvedValueOnce({
|
||||||
filterUniquePlugins.mockReturnValue([mockPlugin]);
|
tool1: {
|
||||||
checkPluginAuth.mockReturnValue(true);
|
type: 'function',
|
||||||
getCustomConfig.mockResolvedValue(null);
|
function: {
|
||||||
|
name: 'tool1',
|
||||||
// Mock getCachedTools second call to return tool definitions
|
description: 'Tool 1',
|
||||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({ tool1: true });
|
parameters: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
mockReq.config = {
|
||||||
|
mcpConfig: null,
|
||||||
|
paths: { structuredTools: '/mock/path' },
|
||||||
|
};
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
await getAvailableTools(mockReq, mockRes);
|
||||||
|
|
||||||
expect(checkPluginAuth).toHaveBeenCalledWith(mockPlugin);
|
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||||
|
const responseData = mockRes.json.mock.calls[0][0];
|
||||||
|
expect(Array.isArray(responseData)).toBe(true);
|
||||||
|
const tool = responseData.find((t) => t.pluginKey === 'tool1');
|
||||||
|
expect(tool).toBeDefined();
|
||||||
|
// The real checkPluginAuth returns false for plugins without authConfig, so authenticated property is not added
|
||||||
|
expect(tool.authenticated).toBeUndefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use getToolkitKey for toolkit validation', async () => {
|
it('should use getToolkitKey for toolkit validation', async () => {
|
||||||
@@ -213,144 +231,43 @@ describe('PluginController', () => {
|
|||||||
toolkit: true,
|
toolkit: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
mockCache.get.mockResolvedValue(null);
|
require('~/app/clients/tools').availableTools.push(mockToolkit);
|
||||||
getCachedTools.mockResolvedValue({});
|
|
||||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
|
||||||
filterUniquePlugins.mockReturnValue([mockToolkit]);
|
|
||||||
checkPluginAuth.mockReturnValue(false);
|
|
||||||
getToolkitKey.mockReturnValue('toolkit1');
|
|
||||||
getCustomConfig.mockResolvedValue(null);
|
|
||||||
|
|
||||||
// Mock getCachedTools second call to return tool definitions
|
// Mock toolkits to have a mapping
|
||||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({
|
require('~/app/clients/tools').toolkits.push({
|
||||||
toolkit1_function: true,
|
name: 'Toolkit1',
|
||||||
|
pluginKey: 'toolkit1',
|
||||||
|
tools: ['toolkit1_function'],
|
||||||
});
|
});
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
|
||||||
|
|
||||||
expect(getToolkitKey).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('plugin.icon behavior', () => {
|
|
||||||
const callGetAvailableToolsWithMCPServer = async (mcpServers) => {
|
|
||||||
mockCache.get.mockResolvedValue(null);
|
mockCache.get.mockResolvedValue(null);
|
||||||
getCustomConfig.mockResolvedValue({ mcpServers });
|
// getCachedTools returns the tool definitions
|
||||||
|
|
||||||
const functionTools = {
|
|
||||||
[`test-tool${Constants.mcp_delimiter}test-server`]: {
|
|
||||||
function: { name: 'test-tool', description: 'A test tool' },
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockConvertedPlugin = {
|
|
||||||
name: 'test-tool',
|
|
||||||
pluginKey: `test-tool${Constants.mcp_delimiter}test-server`,
|
|
||||||
description: 'A test tool',
|
|
||||||
icon: mcpServers['test-server']?.iconPath,
|
|
||||||
authenticated: true,
|
|
||||||
authConfig: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
getCachedTools.mockResolvedValueOnce(functionTools);
|
|
||||||
convertMCPToolsToPlugins.mockReturnValue([mockConvertedPlugin]);
|
|
||||||
filterUniquePlugins.mockImplementation((plugins) => plugins);
|
|
||||||
checkPluginAuth.mockReturnValue(true);
|
|
||||||
getToolkitKey.mockReturnValue(undefined);
|
|
||||||
|
|
||||||
getCachedTools.mockResolvedValueOnce({
|
getCachedTools.mockResolvedValueOnce({
|
||||||
[`test-tool${Constants.mcp_delimiter}test-server`]: true,
|
toolkit1_function: {
|
||||||
|
type: 'function',
|
||||||
|
function: {
|
||||||
|
name: 'toolkit1_function',
|
||||||
|
description: 'Toolkit function',
|
||||||
|
parameters: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
mockReq.config = {
|
||||||
|
mcpConfig: null,
|
||||||
|
paths: { structuredTools: '/mock/path' },
|
||||||
|
};
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
await getAvailableTools(mockReq, mockRes);
|
||||||
|
|
||||||
|
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||||
const responseData = mockRes.json.mock.calls[0][0];
|
const responseData = mockRes.json.mock.calls[0][0];
|
||||||
return responseData.find((tool) => tool.name === 'test-tool');
|
expect(Array.isArray(responseData)).toBe(true);
|
||||||
};
|
const toolkit = responseData.find((t) => t.pluginKey === 'toolkit1');
|
||||||
|
expect(toolkit).toBeDefined();
|
||||||
it('should set plugin.icon when iconPath is defined', async () => {
|
|
||||||
const mcpServers = {
|
|
||||||
'test-server': {
|
|
||||||
iconPath: '/path/to/icon.png',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const testTool = await callGetAvailableToolsWithMCPServer(mcpServers);
|
|
||||||
expect(testTool.icon).toBe('/path/to/icon.png');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should set plugin.icon to undefined when iconPath is not defined', async () => {
|
|
||||||
const mcpServers = {
|
|
||||||
'test-server': {},
|
|
||||||
};
|
|
||||||
const testTool = await callGetAvailableToolsWithMCPServer(mcpServers);
|
|
||||||
expect(testTool.icon).toBeUndefined();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('helper function integration', () => {
|
describe('helper function integration', () => {
|
||||||
it('should properly handle MCP tools with custom user variables', async () => {
|
|
||||||
const customConfig = {
|
|
||||||
mcpServers: {
|
|
||||||
'test-server': {
|
|
||||||
customUserVars: {
|
|
||||||
API_KEY: { title: 'API Key', description: 'Your API key' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// We need to test the actual flow where MCP manager tools are included
|
|
||||||
const mcpManagerTools = [
|
|
||||||
{
|
|
||||||
name: 'tool1',
|
|
||||||
pluginKey: `tool1${Constants.mcp_delimiter}test-server`,
|
|
||||||
description: 'Tool 1',
|
|
||||||
authenticated: true,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
// Mock the MCP manager to return tools
|
|
||||||
const mockMCPManager = {
|
|
||||||
loadManifestTools: jest.fn().mockResolvedValue(mcpManagerTools),
|
|
||||||
};
|
|
||||||
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
|
|
||||||
|
|
||||||
mockCache.get.mockResolvedValue(null);
|
|
||||||
getCustomConfig.mockResolvedValue(customConfig);
|
|
||||||
|
|
||||||
// First call returns user tools (empty in this case)
|
|
||||||
getCachedTools.mockResolvedValueOnce({});
|
|
||||||
|
|
||||||
// Mock convertMCPToolsToPlugins to return empty array for user tools
|
|
||||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
|
||||||
|
|
||||||
// Mock filterUniquePlugins to pass through
|
|
||||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
|
||||||
|
|
||||||
// Mock checkPluginAuth
|
|
||||||
checkPluginAuth.mockReturnValue(true);
|
|
||||||
|
|
||||||
// Second call returns tool definitions
|
|
||||||
getCachedTools.mockResolvedValueOnce({
|
|
||||||
[`tool1${Constants.mcp_delimiter}test-server`]: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
|
||||||
|
|
||||||
const responseData = mockRes.json.mock.calls[0][0];
|
|
||||||
|
|
||||||
// Find the MCP tool in the response
|
|
||||||
const mcpTool = responseData.find(
|
|
||||||
(tool) => tool.pluginKey === `tool1${Constants.mcp_delimiter}test-server`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// The actual implementation adds authConfig and sets authenticated to false when customUserVars exist
|
|
||||||
expect(mcpTool).toBeDefined();
|
|
||||||
expect(mcpTool.authConfig).toEqual([
|
|
||||||
{ authField: 'API_KEY', label: 'API Key', description: 'Your API key' },
|
|
||||||
]);
|
|
||||||
expect(mcpTool.authenticated).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle error cases gracefully', async () => {
|
it('should handle error cases gracefully', async () => {
|
||||||
mockCache.get.mockRejectedValue(new Error('Cache error'));
|
mockCache.get.mockRejectedValue(new Error('Cache error'));
|
||||||
|
|
||||||
@@ -372,64 +289,47 @@ describe('PluginController', () => {
|
|||||||
|
|
||||||
it('should handle null cachedTools and cachedUserTools', async () => {
|
it('should handle null cachedTools and cachedUserTools', async () => {
|
||||||
mockCache.get.mockResolvedValue(null);
|
mockCache.get.mockResolvedValue(null);
|
||||||
getCachedTools.mockResolvedValue(null);
|
// getCachedTools returns empty object instead of null
|
||||||
convertMCPToolsToPlugins.mockReturnValue(undefined);
|
getCachedTools.mockResolvedValueOnce({});
|
||||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
mockReq.config = {
|
||||||
getCustomConfig.mockResolvedValue(null);
|
mcpConfig: null,
|
||||||
|
paths: { structuredTools: '/mock/path' },
|
||||||
|
};
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
await getAvailableTools(mockReq, mockRes);
|
||||||
|
|
||||||
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
|
// Should handle null values gracefully
|
||||||
functionTools: null,
|
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||||
customConfig: null,
|
expect(mockRes.json).toHaveBeenCalledWith([]);
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle when getCachedTools returns undefined', async () => {
|
it('should handle when getCachedTools returns undefined', async () => {
|
||||||
mockCache.get.mockResolvedValue(null);
|
mockCache.get.mockResolvedValue(null);
|
||||||
getCachedTools.mockResolvedValue(undefined);
|
mockReq.config = {
|
||||||
convertMCPToolsToPlugins.mockReturnValue(undefined);
|
mcpConfig: null,
|
||||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
paths: { structuredTools: '/mock/path' },
|
||||||
getCustomConfig.mockResolvedValue(null);
|
|
||||||
checkPluginAuth.mockReturnValue(false);
|
|
||||||
|
|
||||||
// Mock getCachedTools to return undefined for both calls
|
|
||||||
getCachedTools.mockReset();
|
|
||||||
getCachedTools.mockResolvedValueOnce(undefined).mockResolvedValueOnce(undefined);
|
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
|
||||||
|
|
||||||
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
|
|
||||||
functionTools: undefined,
|
|
||||||
customConfig: null,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle cachedToolsArray and userPlugins both being defined', async () => {
|
|
||||||
const cachedTools = [{ name: 'CachedTool', pluginKey: 'cached-tool', description: 'Cached' }];
|
|
||||||
const userTools = {
|
|
||||||
'user-tool': { function: { name: 'user-tool', description: 'User tool' } },
|
|
||||||
};
|
};
|
||||||
const userPlugins = [{ name: 'UserTool', pluginKey: 'user-tool', description: 'User tool' }];
|
|
||||||
|
|
||||||
mockCache.get.mockResolvedValue(cachedTools);
|
// Mock getCachedTools to return undefined
|
||||||
getCachedTools.mockResolvedValue(userTools);
|
getCachedTools.mockReset();
|
||||||
convertMCPToolsToPlugins.mockReturnValue(userPlugins);
|
getCachedTools.mockResolvedValueOnce(undefined);
|
||||||
filterUniquePlugins.mockReturnValue([...userPlugins, ...cachedTools]);
|
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
await getAvailableTools(mockReq, mockRes);
|
||||||
|
|
||||||
|
// Should handle undefined values gracefully
|
||||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||||
expect(mockRes.json).toHaveBeenCalledWith([...userPlugins, ...cachedTools]);
|
expect(mockRes.json).toHaveBeenCalledWith([]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle empty toolDefinitions object', async () => {
|
it('should handle empty toolDefinitions object', async () => {
|
||||||
mockCache.get.mockResolvedValue(null);
|
mockCache.get.mockResolvedValue(null);
|
||||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({});
|
// Reset getCachedTools to ensure clean state
|
||||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
getCachedTools.mockReset();
|
||||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
getCachedTools.mockResolvedValue({});
|
||||||
getCustomConfig.mockResolvedValue(null);
|
mockReq.config = {}; // No mcpConfig at all
|
||||||
checkPluginAuth.mockReturnValue(true);
|
|
||||||
|
// Ensure no plugins are available
|
||||||
|
require('~/app/clients/tools').availableTools.length = 0;
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
await getAvailableTools(mockReq, mockRes);
|
||||||
|
|
||||||
@@ -437,54 +337,13 @@ describe('PluginController', () => {
|
|||||||
expect(mockRes.json).toHaveBeenCalledWith([]);
|
expect(mockRes.json).toHaveBeenCalledWith([]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle MCP tools without customUserVars', async () => {
|
it('should handle undefined filteredTools and includedTools', async () => {
|
||||||
const customConfig = {
|
mockReq.config = {};
|
||||||
mcpServers: {
|
|
||||||
'test-server': {
|
|
||||||
// No customUserVars defined
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockUserTools = {
|
|
||||||
[`tool1${Constants.mcp_delimiter}test-server`]: {
|
|
||||||
function: { name: 'tool1', description: 'Tool 1' },
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
mockCache.get.mockResolvedValue(null);
|
mockCache.get.mockResolvedValue(null);
|
||||||
getCustomConfig.mockResolvedValue(customConfig);
|
|
||||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
|
||||||
|
|
||||||
const mockPlugin = {
|
// Configure getAppConfig to return config with undefined properties
|
||||||
name: 'tool1',
|
// The controller will use default values [] for filteredTools and includedTools
|
||||||
pluginKey: `tool1${Constants.mcp_delimiter}test-server`,
|
getAppConfig.mockResolvedValueOnce({});
|
||||||
description: 'Tool 1',
|
|
||||||
authenticated: true,
|
|
||||||
authConfig: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
convertMCPToolsToPlugins.mockReturnValue([mockPlugin]);
|
|
||||||
filterUniquePlugins.mockImplementation((plugins) => plugins);
|
|
||||||
checkPluginAuth.mockReturnValue(true);
|
|
||||||
|
|
||||||
getCachedTools.mockResolvedValueOnce({
|
|
||||||
[`tool1${Constants.mcp_delimiter}test-server`]: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
|
||||||
|
|
||||||
const responseData = mockRes.json.mock.calls[0][0];
|
|
||||||
expect(responseData[0].authenticated).toBe(true);
|
|
||||||
// The actual implementation doesn't set authConfig on tools without customUserVars
|
|
||||||
expect(responseData[0].authConfig).toEqual([]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle req.app.locals with undefined filteredTools and includedTools', async () => {
|
|
||||||
mockReq.app = { locals: {} };
|
|
||||||
mockCache.get.mockResolvedValue(null);
|
|
||||||
filterUniquePlugins.mockReturnValue([]);
|
|
||||||
checkPluginAuth.mockReturnValue(false);
|
|
||||||
|
|
||||||
await getAvailablePluginsController(mockReq, mockRes);
|
await getAvailablePluginsController(mockReq, mockRes);
|
||||||
|
|
||||||
@@ -500,21 +359,135 @@ describe('PluginController', () => {
|
|||||||
toolkit: true,
|
toolkit: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
mockCache.get.mockResolvedValue(null);
|
// No need to mock app.locals anymore as it's not used
|
||||||
getCachedTools.mockResolvedValue({});
|
|
||||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
|
||||||
filterUniquePlugins.mockReturnValue([mockToolkit]);
|
|
||||||
checkPluginAuth.mockReturnValue(false);
|
|
||||||
getToolkitKey.mockReturnValue(undefined);
|
|
||||||
getCustomConfig.mockResolvedValue(null);
|
|
||||||
|
|
||||||
// Mock getCachedTools second call to return null
|
// Add the toolkit to availableTools
|
||||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce(null);
|
require('~/app/clients/tools').availableTools.push(mockToolkit);
|
||||||
|
|
||||||
|
mockCache.get.mockResolvedValue(null);
|
||||||
|
// getCachedTools returns empty object to avoid null reference error
|
||||||
|
getCachedTools.mockResolvedValueOnce({});
|
||||||
|
mockReq.config = {
|
||||||
|
mcpConfig: null,
|
||||||
|
paths: { structuredTools: '/mock/path' },
|
||||||
|
};
|
||||||
|
|
||||||
await getAvailableTools(mockReq, mockRes);
|
await getAvailableTools(mockReq, mockRes);
|
||||||
|
|
||||||
// Should handle null toolDefinitions gracefully
|
// Should handle null toolDefinitions gracefully
|
||||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should handle undefined toolDefinitions when checking isToolDefined (traversaal_search bug)', async () => {
|
||||||
|
// This test reproduces the bug where toolDefinitions is undefined
|
||||||
|
// and accessing toolDefinitions[plugin.pluginKey] causes a TypeError
|
||||||
|
const mockPlugin = {
|
||||||
|
name: 'Traversaal Search',
|
||||||
|
pluginKey: 'traversaal_search',
|
||||||
|
description: 'Search plugin',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add the plugin to availableTools
|
||||||
|
require('~/app/clients/tools').availableTools.push(mockPlugin);
|
||||||
|
|
||||||
|
mockCache.get.mockResolvedValue(null);
|
||||||
|
|
||||||
|
mockReq.config = {
|
||||||
|
mcpConfig: null,
|
||||||
|
paths: { structuredTools: '/mock/path' },
|
||||||
|
};
|
||||||
|
|
||||||
|
// CRITICAL: getCachedTools returns undefined
|
||||||
|
// This is what causes the bug when trying to access toolDefinitions[plugin.pluginKey]
|
||||||
|
getCachedTools.mockResolvedValueOnce(undefined);
|
||||||
|
|
||||||
|
// This should not throw an error with the optional chaining fix
|
||||||
|
await getAvailableTools(mockReq, mockRes);
|
||||||
|
|
||||||
|
// Should handle undefined toolDefinitions gracefully and return empty array
|
||||||
|
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||||
|
expect(mockRes.json).toHaveBeenCalledWith([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should re-initialize tools from appConfig when cache returns null', async () => {
|
||||||
|
// Setup: Initial state with tools in appConfig
|
||||||
|
const mockAppTools = {
|
||||||
|
tool1: {
|
||||||
|
type: 'function',
|
||||||
|
function: {
|
||||||
|
name: 'tool1',
|
||||||
|
description: 'Tool 1',
|
||||||
|
parameters: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tool2: {
|
||||||
|
type: 'function',
|
||||||
|
function: {
|
||||||
|
name: 'tool2',
|
||||||
|
description: 'Tool 2',
|
||||||
|
parameters: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add matching plugins to availableTools
|
||||||
|
require('~/app/clients/tools').availableTools.push(
|
||||||
|
{ name: 'Tool 1', pluginKey: 'tool1', description: 'Tool 1' },
|
||||||
|
{ name: 'Tool 2', pluginKey: 'tool2', description: 'Tool 2' },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Simulate cache cleared state (returns null)
|
||||||
|
mockCache.get.mockResolvedValue(null);
|
||||||
|
getCachedTools.mockResolvedValueOnce(null); // Global tools (cache cleared)
|
||||||
|
|
||||||
|
mockReq.config = {
|
||||||
|
filteredTools: [],
|
||||||
|
includedTools: [],
|
||||||
|
availableTools: mockAppTools,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mock setCachedTools to verify it's called to re-initialize
|
||||||
|
const { setCachedTools } = require('~/server/services/Config');
|
||||||
|
|
||||||
|
await getAvailableTools(mockReq, mockRes);
|
||||||
|
|
||||||
|
// Should have re-initialized the cache with tools from appConfig
|
||||||
|
expect(setCachedTools).toHaveBeenCalledWith(mockAppTools);
|
||||||
|
|
||||||
|
// Should still return tools successfully
|
||||||
|
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||||
|
const responseData = mockRes.json.mock.calls[0][0];
|
||||||
|
expect(responseData).toHaveLength(2);
|
||||||
|
expect(responseData.find((t) => t.pluginKey === 'tool1')).toBeDefined();
|
||||||
|
expect(responseData.find((t) => t.pluginKey === 'tool2')).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle cache clear without appConfig.availableTools gracefully', async () => {
|
||||||
|
// Setup: appConfig without availableTools
|
||||||
|
getAppConfig.mockResolvedValue({
|
||||||
|
filteredTools: [],
|
||||||
|
includedTools: [],
|
||||||
|
// No availableTools property
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clear availableTools array
|
||||||
|
require('~/app/clients/tools').availableTools.length = 0;
|
||||||
|
|
||||||
|
// Cache returns null (cleared state)
|
||||||
|
mockCache.get.mockResolvedValue(null);
|
||||||
|
getCachedTools.mockResolvedValueOnce(null); // Global tools (cache cleared)
|
||||||
|
|
||||||
|
mockReq.config = {
|
||||||
|
filteredTools: [],
|
||||||
|
includedTools: [],
|
||||||
|
// No availableTools
|
||||||
|
};
|
||||||
|
|
||||||
|
await getAvailableTools(mockReq, mockRes);
|
||||||
|
|
||||||
|
// Should handle gracefully without crashing
|
||||||
|
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||||
|
expect(mockRes.json).toHaveBeenCalledWith([]);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ const verify2FA = async (req, res) => {
|
|||||||
try {
|
try {
|
||||||
const userId = req.user.id;
|
const userId = req.user.id;
|
||||||
const { token, backupCode } = req.body;
|
const { token, backupCode } = req.body;
|
||||||
const user = await getUserById(userId);
|
const user = await getUserById(userId, '_id totpSecret backupCodes');
|
||||||
|
|
||||||
if (!user || !user.totpSecret) {
|
if (!user || !user.totpSecret) {
|
||||||
return res.status(400).json({ message: '2FA not initiated' });
|
return res.status(400).json({ message: '2FA not initiated' });
|
||||||
@@ -79,7 +79,7 @@ const confirm2FA = async (req, res) => {
|
|||||||
try {
|
try {
|
||||||
const userId = req.user.id;
|
const userId = req.user.id;
|
||||||
const { token } = req.body;
|
const { token } = req.body;
|
||||||
const user = await getUserById(userId);
|
const user = await getUserById(userId, '_id totpSecret');
|
||||||
|
|
||||||
if (!user || !user.totpSecret) {
|
if (!user || !user.totpSecret) {
|
||||||
return res.status(400).json({ message: '2FA not initiated' });
|
return res.status(400).json({ message: '2FA not initiated' });
|
||||||
@@ -105,7 +105,7 @@ const disable2FA = async (req, res) => {
|
|||||||
try {
|
try {
|
||||||
const userId = req.user.id;
|
const userId = req.user.id;
|
||||||
const { token, backupCode } = req.body;
|
const { token, backupCode } = req.body;
|
||||||
const user = await getUserById(userId);
|
const user = await getUserById(userId, '_id totpSecret backupCodes');
|
||||||
|
|
||||||
if (!user || !user.totpSecret) {
|
if (!user || !user.totpSecret) {
|
||||||
return res.status(400).json({ message: '2FA is not setup for this user' });
|
return res.status(400).json({ message: '2FA is not setup for this user' });
|
||||||
|
|||||||
@@ -1,31 +1,46 @@
|
|||||||
const { logger } = require('@librechat/data-schemas');
|
const { logger, webSearchKeys } = require('@librechat/data-schemas');
|
||||||
const { webSearchKeys, extractWebSearchEnvVars, normalizeHttpError } = require('@librechat/api');
|
const { Tools, CacheKeys, Constants, FileSources } = require('librechat-data-provider');
|
||||||
|
const {
|
||||||
|
MCPOAuthHandler,
|
||||||
|
MCPTokenStorage,
|
||||||
|
normalizeHttpError,
|
||||||
|
extractWebSearchEnvVars,
|
||||||
|
} = require('@librechat/api');
|
||||||
const {
|
const {
|
||||||
getFiles,
|
getFiles,
|
||||||
|
findToken,
|
||||||
updateUser,
|
updateUser,
|
||||||
deleteFiles,
|
deleteFiles,
|
||||||
deleteConvos,
|
deleteConvos,
|
||||||
deletePresets,
|
deletePresets,
|
||||||
deleteMessages,
|
deleteMessages,
|
||||||
deleteUserById,
|
deleteUserById,
|
||||||
|
deleteAllSharedLinks,
|
||||||
deleteAllUserSessions,
|
deleteAllUserSessions,
|
||||||
} = require('~/models');
|
} = require('~/models');
|
||||||
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
|
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
|
||||||
const { updateUserPluginsService, deleteUserKey } = require('~/server/services/UserService');
|
const { updateUserPluginsService, deleteUserKey } = require('~/server/services/UserService');
|
||||||
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
|
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
|
||||||
const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud');
|
const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud');
|
||||||
const { Tools, Constants, FileSources } = require('librechat-data-provider');
|
|
||||||
const { processDeleteRequest } = require('~/server/services/Files/process');
|
const { processDeleteRequest } = require('~/server/services/Files/process');
|
||||||
const { Transaction, Balance, User } = require('~/db/models');
|
const { Transaction, Balance, User, Token } = require('~/db/models');
|
||||||
|
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||||
|
const { getAppConfig } = require('~/server/services/Config');
|
||||||
const { deleteToolCalls } = require('~/models/ToolCall');
|
const { deleteToolCalls } = require('~/models/ToolCall');
|
||||||
const { deleteAllSharedLinks } = require('~/models');
|
const { getLogStores } = require('~/cache');
|
||||||
const { getMCPManager } = require('~/config');
|
|
||||||
|
|
||||||
const getUserController = async (req, res) => {
|
const getUserController = async (req, res) => {
|
||||||
/** @type {MongoUser} */
|
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||||
|
/** @type {IUser} */
|
||||||
const userData = req.user.toObject != null ? req.user.toObject() : { ...req.user };
|
const userData = req.user.toObject != null ? req.user.toObject() : { ...req.user };
|
||||||
|
/**
|
||||||
|
* These fields should not exist due to secure field selection, but deletion
|
||||||
|
* is done in case of alternate database incompatibility with Mongo API
|
||||||
|
* */
|
||||||
|
delete userData.password;
|
||||||
delete userData.totpSecret;
|
delete userData.totpSecret;
|
||||||
if (req.app.locals.fileStrategy === FileSources.s3 && userData.avatar) {
|
delete userData.backupCodes;
|
||||||
|
if (appConfig.fileStrategy === FileSources.s3 && userData.avatar) {
|
||||||
const avatarNeedsRefresh = needsRefresh(userData.avatar, 3600);
|
const avatarNeedsRefresh = needsRefresh(userData.avatar, 3600);
|
||||||
if (!avatarNeedsRefresh) {
|
if (!avatarNeedsRefresh) {
|
||||||
return res.status(200).send(userData);
|
return res.status(200).send(userData);
|
||||||
@@ -81,6 +96,7 @@ const deleteUserFiles = async (req) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const updateUserPluginsController = async (req, res) => {
|
const updateUserPluginsController = async (req, res) => {
|
||||||
|
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||||
const { user } = req;
|
const { user } = req;
|
||||||
const { pluginKey, action, auth, isEntityTool } = req.body;
|
const { pluginKey, action, auth, isEntityTool } = req.body;
|
||||||
try {
|
try {
|
||||||
@@ -125,7 +141,7 @@ const updateUserPluginsController = async (req, res) => {
|
|||||||
|
|
||||||
if (pluginKey === Tools.web_search) {
|
if (pluginKey === Tools.web_search) {
|
||||||
/** @type {TCustomConfig['webSearch']} */
|
/** @type {TCustomConfig['webSearch']} */
|
||||||
const webSearchConfig = req.app.locals?.webSearch;
|
const webSearchConfig = appConfig?.webSearch;
|
||||||
keys = extractWebSearchEnvVars({
|
keys = extractWebSearchEnvVars({
|
||||||
keys: action === 'install' ? keys : webSearchKeys,
|
keys: action === 'install' ? keys : webSearchKeys,
|
||||||
config: webSearchConfig,
|
config: webSearchConfig,
|
||||||
@@ -153,6 +169,15 @@ const updateUserPluginsController = async (req, res) => {
|
|||||||
);
|
);
|
||||||
({ status, message } = normalizeHttpError(authService));
|
({ status, message } = normalizeHttpError(authService));
|
||||||
}
|
}
|
||||||
|
try {
|
||||||
|
// if the MCP server uses OAuth, perform a full cleanup and token revocation
|
||||||
|
await maybeUninstallOAuthMCP(user.id, pluginKey, appConfig);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
`[updateUserPluginsController] Error uninstalling OAuth MCP for ${pluginKey}:`,
|
||||||
|
error,
|
||||||
|
);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// This handles:
|
// This handles:
|
||||||
// 1. Web_search uninstall (keys will be populated with all webSearchKeys if auth was {}).
|
// 1. Web_search uninstall (keys will be populated with all webSearchKeys if auth was {}).
|
||||||
@@ -178,7 +203,7 @@ const updateUserPluginsController = async (req, res) => {
|
|||||||
// Extract server name from pluginKey (format: "mcp_<serverName>")
|
// Extract server name from pluginKey (format: "mcp_<serverName>")
|
||||||
const serverName = pluginKey.replace(Constants.mcp_prefix, '');
|
const serverName = pluginKey.replace(Constants.mcp_prefix, '');
|
||||||
logger.info(
|
logger.info(
|
||||||
`[updateUserPluginsController] Disconnecting MCP server ${serverName} for user ${user.id} after plugin auth update for ${pluginKey}.`,
|
`[updateUserPluginsController] Attempting disconnect of MCP server "${serverName}" for user ${user.id} after plugin auth update.`,
|
||||||
);
|
);
|
||||||
await mcpManager.disconnectUserConnection(user.id, serverName);
|
await mcpManager.disconnectUserConnection(user.id, serverName);
|
||||||
}
|
}
|
||||||
@@ -260,6 +285,107 @@ const resendVerificationController = async (req, res) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* OAuth MCP specific uninstall logic
|
||||||
|
*/
|
||||||
|
const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
|
||||||
|
if (!pluginKey.startsWith(Constants.mcp_prefix)) {
|
||||||
|
// this is not an MCP server, so nothing to do here
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const serverName = pluginKey.replace(Constants.mcp_prefix, '');
|
||||||
|
const mcpManager = getMCPManager(userId);
|
||||||
|
const serverConfig = mcpManager.getRawConfig(serverName) ?? appConfig?.mcpServers?.[serverName];
|
||||||
|
|
||||||
|
if (!mcpManager.getOAuthServers().has(serverName)) {
|
||||||
|
// this server does not use OAuth, so nothing to do here as well
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. get client info used for revocation (client id, secret)
|
||||||
|
const clientTokenData = await MCPTokenStorage.getClientInfoAndMetadata({
|
||||||
|
userId,
|
||||||
|
serverName,
|
||||||
|
findToken,
|
||||||
|
});
|
||||||
|
if (clientTokenData == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const { clientInfo, clientMetadata } = clientTokenData;
|
||||||
|
|
||||||
|
// 2. get decrypted tokens before deletion
|
||||||
|
const tokens = await MCPTokenStorage.getTokens({
|
||||||
|
userId,
|
||||||
|
serverName,
|
||||||
|
findToken,
|
||||||
|
});
|
||||||
|
|
||||||
|
// 3. revoke OAuth tokens at the provider
|
||||||
|
const revocationEndpoint =
|
||||||
|
serverConfig.oauth?.revocation_endpoint ?? clientMetadata.revocation_endpoint;
|
||||||
|
const revocationEndpointAuthMethodsSupported =
|
||||||
|
serverConfig.oauth?.revocation_endpoint_auth_methods_supported ??
|
||||||
|
clientMetadata.revocation_endpoint_auth_methods_supported;
|
||||||
|
const oauthHeaders = serverConfig.oauth_headers ?? {};
|
||||||
|
|
||||||
|
if (tokens?.access_token) {
|
||||||
|
try {
|
||||||
|
await MCPOAuthHandler.revokeOAuthToken(
|
||||||
|
serverName,
|
||||||
|
tokens.access_token,
|
||||||
|
'access',
|
||||||
|
{
|
||||||
|
serverUrl: serverConfig.url,
|
||||||
|
clientId: clientInfo.client_id,
|
||||||
|
clientSecret: clientInfo.client_secret ?? '',
|
||||||
|
revocationEndpoint,
|
||||||
|
revocationEndpointAuthMethodsSupported,
|
||||||
|
},
|
||||||
|
oauthHeaders,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Error revoking OAuth access token for ${serverName}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tokens?.refresh_token) {
|
||||||
|
try {
|
||||||
|
await MCPOAuthHandler.revokeOAuthToken(
|
||||||
|
serverName,
|
||||||
|
tokens.refresh_token,
|
||||||
|
'refresh',
|
||||||
|
{
|
||||||
|
serverUrl: serverConfig.url,
|
||||||
|
clientId: clientInfo.client_id,
|
||||||
|
clientSecret: clientInfo.client_secret ?? '',
|
||||||
|
revocationEndpoint,
|
||||||
|
revocationEndpointAuthMethodsSupported,
|
||||||
|
},
|
||||||
|
oauthHeaders,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Error revoking OAuth refresh token for ${serverName}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. delete tokens from the DB after revocation attempts
|
||||||
|
await MCPTokenStorage.deleteUserTokens({
|
||||||
|
userId,
|
||||||
|
serverName,
|
||||||
|
deleteToken: async (filter) => {
|
||||||
|
await Token.deleteOne(filter);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// 5. clear the flow state for the OAuth tokens
|
||||||
|
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||||
|
const flowManager = getFlowStateManager(flowsCache);
|
||||||
|
const flowId = MCPOAuthHandler.generateFlowId(userId, serverName);
|
||||||
|
await flowManager.deleteFlow(flowId, 'mcp_get_tokens');
|
||||||
|
await flowManager.deleteFlow(flowId, 'mcp_oauth');
|
||||||
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getUserController,
|
getUserController,
|
||||||
getTermsStatusController,
|
getTermsStatusController,
|
||||||
|
|||||||
342
api/server/controllers/agents/__tests__/callbacks.spec.js
Normal file
342
api/server/controllers/agents/__tests__/callbacks.spec.js
Normal file
@@ -0,0 +1,342 @@
|
|||||||
|
const { Tools } = require('librechat-data-provider');
|
||||||
|
|
||||||
|
// Mock all dependencies before requiring the module
|
||||||
|
jest.mock('nanoid', () => ({
|
||||||
|
nanoid: jest.fn(() => 'mock-id'),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('@librechat/api', () => ({
|
||||||
|
sendEvent: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('@librechat/data-schemas', () => ({
|
||||||
|
logger: {
|
||||||
|
error: jest.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('@librechat/agents', () => ({
|
||||||
|
EnvVar: { CODE_API_KEY: 'CODE_API_KEY' },
|
||||||
|
Providers: { GOOGLE: 'google' },
|
||||||
|
GraphEvents: {},
|
||||||
|
getMessageId: jest.fn(),
|
||||||
|
ToolEndHandler: jest.fn(),
|
||||||
|
handleToolCalls: jest.fn(),
|
||||||
|
ChatModelStreamHandler: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('~/server/services/Files/Citations', () => ({
|
||||||
|
processFileCitations: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('~/server/services/Files/Code/process', () => ({
|
||||||
|
processCodeOutput: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('~/server/services/Tools/credentials', () => ({
|
||||||
|
loadAuthValues: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('~/server/services/Files/process', () => ({
|
||||||
|
saveBase64Image: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('createToolEndCallback', () => {
|
||||||
|
let req, res, artifactPromises, createToolEndCallback;
|
||||||
|
let logger;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
|
||||||
|
// Get the mocked logger
|
||||||
|
logger = require('@librechat/data-schemas').logger;
|
||||||
|
|
||||||
|
// Now require the module after all mocks are set up
|
||||||
|
const callbacks = require('../callbacks');
|
||||||
|
createToolEndCallback = callbacks.createToolEndCallback;
|
||||||
|
|
||||||
|
req = {
|
||||||
|
user: { id: 'user123' },
|
||||||
|
};
|
||||||
|
res = {
|
||||||
|
headersSent: false,
|
||||||
|
write: jest.fn(),
|
||||||
|
};
|
||||||
|
artifactPromises = [];
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('ui_resources artifact handling', () => {
|
||||||
|
it('should process ui_resources artifact and return attachment when headers not sent', async () => {
|
||||||
|
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||||
|
|
||||||
|
const output = {
|
||||||
|
tool_call_id: 'tool123',
|
||||||
|
artifact: {
|
||||||
|
[Tools.ui_resources]: {
|
||||||
|
data: {
|
||||||
|
0: { type: 'button', label: 'Click me' },
|
||||||
|
1: { type: 'input', placeholder: 'Enter text' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const metadata = {
|
||||||
|
run_id: 'run456',
|
||||||
|
thread_id: 'thread789',
|
||||||
|
};
|
||||||
|
|
||||||
|
await toolEndCallback({ output }, metadata);
|
||||||
|
|
||||||
|
// Wait for all promises to resolve
|
||||||
|
const results = await Promise.all(artifactPromises);
|
||||||
|
|
||||||
|
// When headers are not sent, it returns attachment without writing
|
||||||
|
expect(res.write).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
const attachment = results[0];
|
||||||
|
expect(attachment).toEqual({
|
||||||
|
type: Tools.ui_resources,
|
||||||
|
messageId: 'run456',
|
||||||
|
toolCallId: 'tool123',
|
||||||
|
conversationId: 'thread789',
|
||||||
|
[Tools.ui_resources]: {
|
||||||
|
0: { type: 'button', label: 'Click me' },
|
||||||
|
1: { type: 'input', placeholder: 'Enter text' },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should write to response when headers are already sent', async () => {
|
||||||
|
res.headersSent = true;
|
||||||
|
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||||
|
|
||||||
|
const output = {
|
||||||
|
tool_call_id: 'tool123',
|
||||||
|
artifact: {
|
||||||
|
[Tools.ui_resources]: {
|
||||||
|
data: {
|
||||||
|
0: { type: 'carousel', items: [] },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const metadata = {
|
||||||
|
run_id: 'run456',
|
||||||
|
thread_id: 'thread789',
|
||||||
|
};
|
||||||
|
|
||||||
|
await toolEndCallback({ output }, metadata);
|
||||||
|
const results = await Promise.all(artifactPromises);
|
||||||
|
|
||||||
|
expect(res.write).toHaveBeenCalled();
|
||||||
|
expect(results[0]).toEqual({
|
||||||
|
type: Tools.ui_resources,
|
||||||
|
messageId: 'run456',
|
||||||
|
toolCallId: 'tool123',
|
||||||
|
conversationId: 'thread789',
|
||||||
|
[Tools.ui_resources]: {
|
||||||
|
0: { type: 'carousel', items: [] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle errors when processing ui_resources', async () => {
|
||||||
|
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||||
|
|
||||||
|
// Mock res.write to throw an error
|
||||||
|
res.headersSent = true;
|
||||||
|
res.write.mockImplementation(() => {
|
||||||
|
throw new Error('Write failed');
|
||||||
|
});
|
||||||
|
|
||||||
|
const output = {
|
||||||
|
tool_call_id: 'tool123',
|
||||||
|
artifact: {
|
||||||
|
[Tools.ui_resources]: {
|
||||||
|
data: {
|
||||||
|
0: { type: 'test' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const metadata = {
|
||||||
|
run_id: 'run456',
|
||||||
|
thread_id: 'thread789',
|
||||||
|
};
|
||||||
|
|
||||||
|
await toolEndCallback({ output }, metadata);
|
||||||
|
const results = await Promise.all(artifactPromises);
|
||||||
|
|
||||||
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
'Error processing artifact content:',
|
||||||
|
expect.any(Error),
|
||||||
|
);
|
||||||
|
expect(results[0]).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multiple artifacts including ui_resources', async () => {
|
||||||
|
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||||
|
|
||||||
|
const output = {
|
||||||
|
tool_call_id: 'tool123',
|
||||||
|
artifact: {
|
||||||
|
[Tools.ui_resources]: {
|
||||||
|
data: {
|
||||||
|
0: { type: 'chart', data: [] },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[Tools.web_search]: {
|
||||||
|
results: ['result1', 'result2'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const metadata = {
|
||||||
|
run_id: 'run456',
|
||||||
|
thread_id: 'thread789',
|
||||||
|
};
|
||||||
|
|
||||||
|
await toolEndCallback({ output }, metadata);
|
||||||
|
const results = await Promise.all(artifactPromises);
|
||||||
|
|
||||||
|
// Both ui_resources and web_search should be processed
|
||||||
|
expect(artifactPromises).toHaveLength(2);
|
||||||
|
expect(results).toHaveLength(2);
|
||||||
|
|
||||||
|
// Check ui_resources attachment
|
||||||
|
const uiResourceAttachment = results.find((r) => r?.type === Tools.ui_resources);
|
||||||
|
expect(uiResourceAttachment).toBeTruthy();
|
||||||
|
expect(uiResourceAttachment[Tools.ui_resources]).toEqual({
|
||||||
|
0: { type: 'chart', data: [] },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check web_search attachment
|
||||||
|
const webSearchAttachment = results.find((r) => r?.type === Tools.web_search);
|
||||||
|
expect(webSearchAttachment).toBeTruthy();
|
||||||
|
expect(webSearchAttachment[Tools.web_search]).toEqual({
|
||||||
|
results: ['result1', 'result2'],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not process artifacts when output has no artifacts', async () => {
|
||||||
|
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||||
|
|
||||||
|
const output = {
|
||||||
|
tool_call_id: 'tool123',
|
||||||
|
content: 'Some regular content',
|
||||||
|
// No artifact property
|
||||||
|
};
|
||||||
|
|
||||||
|
const metadata = {
|
||||||
|
run_id: 'run456',
|
||||||
|
thread_id: 'thread789',
|
||||||
|
};
|
||||||
|
|
||||||
|
await toolEndCallback({ output }, metadata);
|
||||||
|
|
||||||
|
expect(artifactPromises).toHaveLength(0);
|
||||||
|
expect(res.write).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should handle empty ui_resources data object', async () => {
|
||||||
|
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||||
|
|
||||||
|
const output = {
|
||||||
|
tool_call_id: 'tool123',
|
||||||
|
artifact: {
|
||||||
|
[Tools.ui_resources]: {
|
||||||
|
data: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const metadata = {
|
||||||
|
run_id: 'run456',
|
||||||
|
thread_id: 'thread789',
|
||||||
|
};
|
||||||
|
|
||||||
|
await toolEndCallback({ output }, metadata);
|
||||||
|
const results = await Promise.all(artifactPromises);
|
||||||
|
|
||||||
|
expect(results[0]).toEqual({
|
||||||
|
type: Tools.ui_resources,
|
||||||
|
messageId: 'run456',
|
||||||
|
toolCallId: 'tool123',
|
||||||
|
conversationId: 'thread789',
|
||||||
|
[Tools.ui_resources]: {},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle ui_resources with complex nested data', async () => {
|
||||||
|
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||||
|
|
||||||
|
const complexData = {
|
||||||
|
0: {
|
||||||
|
type: 'form',
|
||||||
|
fields: [
|
||||||
|
{ name: 'field1', type: 'text', required: true },
|
||||||
|
{ name: 'field2', type: 'select', options: ['a', 'b', 'c'] },
|
||||||
|
],
|
||||||
|
nested: {
|
||||||
|
deep: {
|
||||||
|
value: 123,
|
||||||
|
array: [1, 2, 3],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const output = {
|
||||||
|
tool_call_id: 'tool123',
|
||||||
|
artifact: {
|
||||||
|
[Tools.ui_resources]: {
|
||||||
|
data: complexData,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const metadata = {
|
||||||
|
run_id: 'run456',
|
||||||
|
thread_id: 'thread789',
|
||||||
|
};
|
||||||
|
|
||||||
|
await toolEndCallback({ output }, metadata);
|
||||||
|
const results = await Promise.all(artifactPromises);
|
||||||
|
|
||||||
|
expect(results[0][Tools.ui_resources]).toEqual(complexData);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle when output is undefined', async () => {
|
||||||
|
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||||
|
|
||||||
|
const metadata = {
|
||||||
|
run_id: 'run456',
|
||||||
|
thread_id: 'thread789',
|
||||||
|
};
|
||||||
|
|
||||||
|
await toolEndCallback({ output: undefined }, metadata);
|
||||||
|
|
||||||
|
expect(artifactPromises).toHaveLength(0);
|
||||||
|
expect(res.write).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle when data parameter is undefined', async () => {
|
||||||
|
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||||
|
|
||||||
|
const metadata = {
|
||||||
|
run_id: 'run456',
|
||||||
|
thread_id: 'thread789',
|
||||||
|
};
|
||||||
|
|
||||||
|
await toolEndCallback(undefined, metadata);
|
||||||
|
|
||||||
|
expect(artifactPromises).toHaveLength(0);
|
||||||
|
expect(res.write).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -158,7 +158,7 @@ describe('duplicateAgent', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle tool_resources.ocr correctly', async () => {
|
it('should convert `tool_resources.ocr` to `tool_resources.context`', async () => {
|
||||||
const mockAgent = {
|
const mockAgent = {
|
||||||
id: 'agent_123',
|
id: 'agent_123',
|
||||||
name: 'Test Agent',
|
name: 'Test Agent',
|
||||||
@@ -178,7 +178,7 @@ describe('duplicateAgent', () => {
|
|||||||
expect(createAgent).toHaveBeenCalledWith(
|
expect(createAgent).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
tool_resources: {
|
tool_resources: {
|
||||||
ocr: { enabled: true, config: 'test' },
|
context: { enabled: true, config: 'test' },
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ const {
|
|||||||
handleToolCalls,
|
handleToolCalls,
|
||||||
ChatModelStreamHandler,
|
ChatModelStreamHandler,
|
||||||
} = require('@librechat/agents');
|
} = require('@librechat/agents');
|
||||||
|
const { processFileCitations } = require('~/server/services/Files/Citations');
|
||||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||||
const { saveBase64Image } = require('~/server/services/Files/process');
|
const { saveBase64Image } = require('~/server/services/Files/process');
|
||||||
@@ -238,6 +239,56 @@ function createToolEndCallback({ req, res, artifactPromises }) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (output.artifact[Tools.file_search]) {
|
||||||
|
artifactPromises.push(
|
||||||
|
(async () => {
|
||||||
|
const user = req.user;
|
||||||
|
const attachment = await processFileCitations({
|
||||||
|
user,
|
||||||
|
metadata,
|
||||||
|
appConfig: req.config,
|
||||||
|
toolArtifact: output.artifact,
|
||||||
|
toolCallId: output.tool_call_id,
|
||||||
|
});
|
||||||
|
if (!attachment) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (!res.headersSent) {
|
||||||
|
return attachment;
|
||||||
|
}
|
||||||
|
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||||
|
return attachment;
|
||||||
|
})().catch((error) => {
|
||||||
|
logger.error('Error processing file citations:', error);
|
||||||
|
return null;
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: a lot of duplicated code in createToolEndCallback
|
||||||
|
// we should refactor this to use a helper function in a follow-up PR
|
||||||
|
if (output.artifact[Tools.ui_resources]) {
|
||||||
|
artifactPromises.push(
|
||||||
|
(async () => {
|
||||||
|
const attachment = {
|
||||||
|
type: Tools.ui_resources,
|
||||||
|
messageId: metadata.run_id,
|
||||||
|
toolCallId: output.tool_call_id,
|
||||||
|
conversationId: metadata.thread_id,
|
||||||
|
[Tools.ui_resources]: output.artifact[Tools.ui_resources].data,
|
||||||
|
};
|
||||||
|
if (!res.headersSent) {
|
||||||
|
return attachment;
|
||||||
|
}
|
||||||
|
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||||
|
return attachment;
|
||||||
|
})().catch((error) => {
|
||||||
|
logger.error('Error processing artifact content:', error);
|
||||||
|
return null;
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (output.artifact[Tools.web_search]) {
|
if (output.artifact[Tools.web_search]) {
|
||||||
artifactPromises.push(
|
artifactPromises.push(
|
||||||
(async () => {
|
(async () => {
|
||||||
|
|||||||
@@ -7,8 +7,12 @@ const {
|
|||||||
createRun,
|
createRun,
|
||||||
Tokenizer,
|
Tokenizer,
|
||||||
checkAccess,
|
checkAccess,
|
||||||
|
logAxiosError,
|
||||||
|
resolveHeaders,
|
||||||
|
getBalanceConfig,
|
||||||
memoryInstructions,
|
memoryInstructions,
|
||||||
formatContentStrings,
|
formatContentStrings,
|
||||||
|
getTransactionsConfig,
|
||||||
createMemoryProcessor,
|
createMemoryProcessor,
|
||||||
} = require('@librechat/api');
|
} = require('@librechat/api');
|
||||||
const {
|
const {
|
||||||
@@ -33,18 +37,13 @@ const {
|
|||||||
bedrockInputSchema,
|
bedrockInputSchema,
|
||||||
removeNullishValues,
|
removeNullishValues,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const {
|
|
||||||
findPluginAuthsByKeys,
|
|
||||||
getFormattedMemories,
|
|
||||||
deleteMemory,
|
|
||||||
setMemory,
|
|
||||||
} = require('~/models');
|
|
||||||
const { getMCPAuthMap, checkCapability, hasCustomUserVars } = require('~/server/services/Config');
|
|
||||||
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
|
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
|
||||||
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
|
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
|
||||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||||
|
const { getFormattedMemories, deleteMemory, setMemory } = require('~/models');
|
||||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||||
const { getProviderConfig } = require('~/server/services/Endpoints');
|
const { getProviderConfig } = require('~/server/services/Endpoints');
|
||||||
|
const { checkCapability } = require('~/server/services/Config');
|
||||||
const BaseClient = require('~/app/clients/BaseClient');
|
const BaseClient = require('~/app/clients/BaseClient');
|
||||||
const { getRoleByName } = require('~/models/Role');
|
const { getRoleByName } = require('~/models/Role');
|
||||||
const { loadAgent } = require('~/models/Agent');
|
const { loadAgent } = require('~/models/Agent');
|
||||||
@@ -90,11 +89,10 @@ function createTokenCounter(encoding) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function logToolError(graph, error, toolId) {
|
function logToolError(graph, error, toolId) {
|
||||||
logger.error(
|
logAxiosError({
|
||||||
'[api/server/controllers/agents/client.js #chatCompletion] Tool Error',
|
|
||||||
error,
|
error,
|
||||||
toolId,
|
message: `[api/server/controllers/agents/client.js #chatCompletion] Tool Error "${toolId}"`,
|
||||||
);
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
class AgentClient extends BaseClient {
|
class AgentClient extends BaseClient {
|
||||||
@@ -213,16 +211,13 @@ class AgentClient extends BaseClient {
|
|||||||
* @returns {Promise<Array<Partial<MongoFile>>>}
|
* @returns {Promise<Array<Partial<MongoFile>>>}
|
||||||
*/
|
*/
|
||||||
async addImageURLs(message, attachments) {
|
async addImageURLs(message, attachments) {
|
||||||
const { files, text, image_urls } = await encodeAndFormat(
|
const { files, image_urls } = await encodeAndFormat(
|
||||||
this.options.req,
|
this.options.req,
|
||||||
attachments,
|
attachments,
|
||||||
this.options.agent.provider,
|
this.options.agent.provider,
|
||||||
VisionModes.agents,
|
VisionModes.agents,
|
||||||
);
|
);
|
||||||
message.image_urls = image_urls.length ? image_urls : undefined;
|
message.image_urls = image_urls.length ? image_urls : undefined;
|
||||||
if (text && text.length) {
|
|
||||||
message.ocr = text;
|
|
||||||
}
|
|
||||||
return files;
|
return files;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -250,19 +245,18 @@ class AgentClient extends BaseClient {
|
|||||||
|
|
||||||
if (this.options.attachments) {
|
if (this.options.attachments) {
|
||||||
const attachments = await this.options.attachments;
|
const attachments = await this.options.attachments;
|
||||||
|
const latestMessage = orderedMessages[orderedMessages.length - 1];
|
||||||
|
|
||||||
if (this.message_file_map) {
|
if (this.message_file_map) {
|
||||||
this.message_file_map[orderedMessages[orderedMessages.length - 1].messageId] = attachments;
|
this.message_file_map[latestMessage.messageId] = attachments;
|
||||||
} else {
|
} else {
|
||||||
this.message_file_map = {
|
this.message_file_map = {
|
||||||
[orderedMessages[orderedMessages.length - 1].messageId]: attachments,
|
[latestMessage.messageId]: attachments,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const files = await this.addImageURLs(
|
await this.addFileContextToMessage(latestMessage, attachments);
|
||||||
orderedMessages[orderedMessages.length - 1],
|
const files = await this.processAttachments(latestMessage, attachments);
|
||||||
attachments,
|
|
||||||
);
|
|
||||||
|
|
||||||
this.options.attachments = files;
|
this.options.attachments = files;
|
||||||
}
|
}
|
||||||
@@ -282,21 +276,21 @@ class AgentClient extends BaseClient {
|
|||||||
assistantName: this.options?.modelLabel,
|
assistantName: this.options?.modelLabel,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (message.ocr && i !== orderedMessages.length - 1) {
|
if (message.fileContext && i !== orderedMessages.length - 1) {
|
||||||
if (typeof formattedMessage.content === 'string') {
|
if (typeof formattedMessage.content === 'string') {
|
||||||
formattedMessage.content = message.ocr + '\n' + formattedMessage.content;
|
formattedMessage.content = message.fileContext + '\n' + formattedMessage.content;
|
||||||
} else {
|
} else {
|
||||||
const textPart = formattedMessage.content.find((part) => part.type === 'text');
|
const textPart = formattedMessage.content.find((part) => part.type === 'text');
|
||||||
textPart
|
textPart
|
||||||
? (textPart.text = message.ocr + '\n' + textPart.text)
|
? (textPart.text = message.fileContext + '\n' + textPart.text)
|
||||||
: formattedMessage.content.unshift({ type: 'text', text: message.ocr });
|
: formattedMessage.content.unshift({ type: 'text', text: message.fileContext });
|
||||||
}
|
}
|
||||||
} else if (message.ocr && i === orderedMessages.length - 1) {
|
} else if (message.fileContext && i === orderedMessages.length - 1) {
|
||||||
systemContent = [systemContent, message.ocr].join('\n');
|
systemContent = [systemContent, message.fileContext].join('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
const needsTokenCount =
|
const needsTokenCount =
|
||||||
(this.contextStrategy && !orderedMessages[i].tokenCount) || message.ocr;
|
(this.contextStrategy && !orderedMessages[i].tokenCount) || message.fileContext;
|
||||||
|
|
||||||
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
|
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
|
||||||
if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) {
|
if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) {
|
||||||
@@ -451,8 +445,8 @@ class AgentClient extends BaseClient {
|
|||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
/** @type {TCustomConfig['memory']} */
|
const appConfig = this.options.req.config;
|
||||||
const memoryConfig = this.options.req?.app?.locals?.memory;
|
const memoryConfig = appConfig.memory;
|
||||||
if (!memoryConfig || memoryConfig.disabled === true) {
|
if (!memoryConfig || memoryConfig.disabled === true) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -460,7 +454,7 @@ class AgentClient extends BaseClient {
|
|||||||
/** @type {Agent} */
|
/** @type {Agent} */
|
||||||
let prelimAgent;
|
let prelimAgent;
|
||||||
const allowedProviders = new Set(
|
const allowedProviders = new Set(
|
||||||
this.options.req?.app?.locals?.[EModelEndpoint.agents]?.allowedProviders,
|
appConfig?.endpoints?.[EModelEndpoint.agents]?.allowedProviders,
|
||||||
);
|
);
|
||||||
try {
|
try {
|
||||||
if (memoryConfig.agent?.id != null && memoryConfig.agent.id !== this.options.agent.id) {
|
if (memoryConfig.agent?.id != null && memoryConfig.agent.id !== this.options.agent.id) {
|
||||||
@@ -582,8 +576,8 @@ class AgentClient extends BaseClient {
|
|||||||
if (this.processMemory == null) {
|
if (this.processMemory == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
/** @type {TCustomConfig['memory']} */
|
const appConfig = this.options.req.config;
|
||||||
const memoryConfig = this.options.req?.app?.locals?.memory;
|
const memoryConfig = appConfig.memory;
|
||||||
const messageWindowSize = memoryConfig?.messageWindowSize ?? 5;
|
const messageWindowSize = memoryConfig?.messageWindowSize ?? 5;
|
||||||
|
|
||||||
let messagesToProcess = [...messages];
|
let messagesToProcess = [...messages];
|
||||||
@@ -615,6 +609,7 @@ class AgentClient extends BaseClient {
|
|||||||
await this.chatCompletion({
|
await this.chatCompletion({
|
||||||
payload,
|
payload,
|
||||||
onProgress: opts.onProgress,
|
onProgress: opts.onProgress,
|
||||||
|
userMCPAuthMap: opts.userMCPAuthMap,
|
||||||
abortController: opts.abortController,
|
abortController: opts.abortController,
|
||||||
});
|
});
|
||||||
return this.contentParts;
|
return this.contentParts;
|
||||||
@@ -624,9 +619,17 @@ class AgentClient extends BaseClient {
|
|||||||
* @param {Object} params
|
* @param {Object} params
|
||||||
* @param {string} [params.model]
|
* @param {string} [params.model]
|
||||||
* @param {string} [params.context='message']
|
* @param {string} [params.context='message']
|
||||||
|
* @param {AppConfig['balance']} [params.balance]
|
||||||
|
* @param {AppConfig['transactions']} [params.transactions]
|
||||||
* @param {UsageMetadata[]} [params.collectedUsage=this.collectedUsage]
|
* @param {UsageMetadata[]} [params.collectedUsage=this.collectedUsage]
|
||||||
*/
|
*/
|
||||||
async recordCollectedUsage({ model, context = 'message', collectedUsage = this.collectedUsage }) {
|
async recordCollectedUsage({
|
||||||
|
model,
|
||||||
|
balance,
|
||||||
|
transactions,
|
||||||
|
context = 'message',
|
||||||
|
collectedUsage = this.collectedUsage,
|
||||||
|
}) {
|
||||||
if (!collectedUsage || !collectedUsage.length) {
|
if (!collectedUsage || !collectedUsage.length) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -648,6 +651,8 @@ class AgentClient extends BaseClient {
|
|||||||
|
|
||||||
const txMetadata = {
|
const txMetadata = {
|
||||||
context,
|
context,
|
||||||
|
balance,
|
||||||
|
transactions,
|
||||||
conversationId: this.conversationId,
|
conversationId: this.conversationId,
|
||||||
user: this.user ?? this.options.req.user?.id,
|
user: this.user ?? this.options.req.user?.id,
|
||||||
endpointTokenConfig: this.options.endpointTokenConfig,
|
endpointTokenConfig: this.options.endpointTokenConfig,
|
||||||
@@ -747,7 +752,13 @@ class AgentClient extends BaseClient {
|
|||||||
return currentMessageTokens > 0 ? currentMessageTokens : originalEstimate;
|
return currentMessageTokens > 0 ? currentMessageTokens : originalEstimate;
|
||||||
}
|
}
|
||||||
|
|
||||||
async chatCompletion({ payload, abortController = null }) {
|
/**
|
||||||
|
* @param {object} params
|
||||||
|
* @param {string | ChatCompletionMessageParam[]} params.payload
|
||||||
|
* @param {Record<string, Record<string, string>>} [params.userMCPAuthMap]
|
||||||
|
* @param {AbortController} [params.abortController]
|
||||||
|
*/
|
||||||
|
async chatCompletion({ payload, userMCPAuthMap, abortController = null }) {
|
||||||
/** @type {Partial<GraphRunnableConfig>} */
|
/** @type {Partial<GraphRunnableConfig>} */
|
||||||
let config;
|
let config;
|
||||||
/** @type {ReturnType<createRun>} */
|
/** @type {ReturnType<createRun>} */
|
||||||
@@ -759,8 +770,9 @@ class AgentClient extends BaseClient {
|
|||||||
abortController = new AbortController();
|
abortController = new AbortController();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @type {TCustomConfig['endpoints']['agents']} */
|
const appConfig = this.options.req.config;
|
||||||
const agentsEConfig = this.options.req.app.locals[EModelEndpoint.agents];
|
/** @type {AppConfig['endpoints']['agents']} */
|
||||||
|
const agentsEConfig = appConfig.endpoints?.[EModelEndpoint.agents];
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
configurable: {
|
configurable: {
|
||||||
@@ -768,6 +780,11 @@ class AgentClient extends BaseClient {
|
|||||||
last_agent_index: this.agentConfigs?.size ?? 0,
|
last_agent_index: this.agentConfigs?.size ?? 0,
|
||||||
user_id: this.user ?? this.options.req.user?.id,
|
user_id: this.user ?? this.options.req.user?.id,
|
||||||
hide_sequential_outputs: this.options.agent.hide_sequential_outputs,
|
hide_sequential_outputs: this.options.agent.hide_sequential_outputs,
|
||||||
|
requestBody: {
|
||||||
|
messageId: this.responseMessageId,
|
||||||
|
conversationId: this.conversationId,
|
||||||
|
parentMessageId: this.parentMessageId,
|
||||||
|
},
|
||||||
user: this.options.req.user,
|
user: this.options.req.user,
|
||||||
},
|
},
|
||||||
recursionLimit: agentsEConfig?.recursionLimit ?? 25,
|
recursionLimit: agentsEConfig?.recursionLimit ?? 25,
|
||||||
@@ -838,7 +855,7 @@ class AgentClient extends BaseClient {
|
|||||||
|
|
||||||
if (noSystemMessages === true && systemContent?.length) {
|
if (noSystemMessages === true && systemContent?.length) {
|
||||||
const latestMessageContent = _messages.pop().content;
|
const latestMessageContent = _messages.pop().content;
|
||||||
if (typeof latestMessage !== 'string') {
|
if (typeof latestMessageContent !== 'string') {
|
||||||
latestMessageContent[0].text = [systemContent, latestMessageContent[0].text].join('\n');
|
latestMessageContent[0].text = [systemContent, latestMessageContent[0].text].join('\n');
|
||||||
_messages.push(new HumanMessage({ content: latestMessageContent }));
|
_messages.push(new HumanMessage({ content: latestMessageContent }));
|
||||||
} else {
|
} else {
|
||||||
@@ -851,11 +868,10 @@ class AgentClient extends BaseClient {
|
|||||||
if (agent.useLegacyContent === true) {
|
if (agent.useLegacyContent === true) {
|
||||||
messages = formatContentStrings(messages);
|
messages = formatContentStrings(messages);
|
||||||
}
|
}
|
||||||
if (
|
const defaultHeaders =
|
||||||
agent.model_parameters?.clientOptions?.defaultHeaders?.['anthropic-beta']?.includes(
|
agent.model_parameters?.clientOptions?.defaultHeaders ??
|
||||||
'prompt-caching',
|
agent.model_parameters?.configuration?.defaultHeaders;
|
||||||
)
|
if (defaultHeaders?.['anthropic-beta']?.includes('prompt-caching')) {
|
||||||
) {
|
|
||||||
messages = addCacheControl(messages);
|
messages = addCacheControl(messages);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -863,6 +879,16 @@ class AgentClient extends BaseClient {
|
|||||||
memoryPromise = this.runMemory(messages);
|
memoryPromise = this.runMemory(messages);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Resolve request-based headers for Custom Endpoints. Note: if this is added to
|
||||||
|
* non-custom endpoints, needs consideration of varying provider header configs.
|
||||||
|
*/
|
||||||
|
if (agent.model_parameters?.configuration?.defaultHeaders != null) {
|
||||||
|
agent.model_parameters.configuration.defaultHeaders = resolveHeaders({
|
||||||
|
headers: agent.model_parameters.configuration.defaultHeaders,
|
||||||
|
body: config.configurable.requestBody,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
run = await createRun({
|
run = await createRun({
|
||||||
agent,
|
agent,
|
||||||
req: this.options.req,
|
req: this.options.req,
|
||||||
@@ -898,21 +924,9 @@ class AgentClient extends BaseClient {
|
|||||||
run.Graph.contentData = contentData;
|
run.Graph.contentData = contentData;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
if (userMCPAuthMap != null) {
|
||||||
if (await hasCustomUserVars()) {
|
config.configurable.userMCPAuthMap = userMCPAuthMap;
|
||||||
config.configurable.userMCPAuthMap = await getMCPAuthMap({
|
|
||||||
tools: agent.tools,
|
|
||||||
userId: this.options.req.user.id,
|
|
||||||
findPluginAuthsByKeys,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(
|
|
||||||
`[api/server/controllers/agents/client.js #chatCompletion] Error getting custom user vars for agent ${agent.id}`,
|
|
||||||
err,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
await run.processStream({ messages }, config, {
|
await run.processStream({ messages }, config, {
|
||||||
keepContent: i !== 0,
|
keepContent: i !== 0,
|
||||||
tokenCounter: createTokenCounter(this.getEncoding()),
|
tokenCounter: createTokenCounter(this.getEncoding()),
|
||||||
@@ -1034,7 +1048,14 @@ class AgentClient extends BaseClient {
|
|||||||
if (attachments && attachments.length > 0) {
|
if (attachments && attachments.length > 0) {
|
||||||
this.artifactPromises.push(...attachments);
|
this.artifactPromises.push(...attachments);
|
||||||
}
|
}
|
||||||
await this.recordCollectedUsage({ context: 'message' });
|
|
||||||
|
const balanceConfig = getBalanceConfig(appConfig);
|
||||||
|
const transactionsConfig = getTransactionsConfig(appConfig);
|
||||||
|
await this.recordCollectedUsage({
|
||||||
|
context: 'message',
|
||||||
|
balance: balanceConfig,
|
||||||
|
transactions: transactionsConfig,
|
||||||
|
});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error(
|
logger.error(
|
||||||
'[api/server/controllers/agents/client.js #chatCompletion] Error recording collected usage',
|
'[api/server/controllers/agents/client.js #chatCompletion] Error recording collected usage',
|
||||||
@@ -1075,37 +1096,49 @@ class AgentClient extends BaseClient {
|
|||||||
}
|
}
|
||||||
const { handleLLMEnd, collected: collectedMetadata } = createMetadataAggregator();
|
const { handleLLMEnd, collected: collectedMetadata } = createMetadataAggregator();
|
||||||
const { req, res, agent } = this.options;
|
const { req, res, agent } = this.options;
|
||||||
|
const appConfig = req.config;
|
||||||
let endpoint = agent.endpoint;
|
let endpoint = agent.endpoint;
|
||||||
|
|
||||||
/** @type {import('@librechat/agents').ClientOptions} */
|
/** @type {import('@librechat/agents').ClientOptions} */
|
||||||
let clientOptions = {
|
let clientOptions = {
|
||||||
maxTokens: 75,
|
|
||||||
model: agent.model || agent.model_parameters.model,
|
model: agent.model || agent.model_parameters.model,
|
||||||
};
|
};
|
||||||
|
|
||||||
let titleProviderConfig = await getProviderConfig(endpoint);
|
let titleProviderConfig = getProviderConfig({ provider: endpoint, appConfig });
|
||||||
|
|
||||||
/** @type {TEndpoint | undefined} */
|
/** @type {TEndpoint | undefined} */
|
||||||
const endpointConfig =
|
const endpointConfig =
|
||||||
req.app.locals.all ?? req.app.locals[endpoint] ?? titleProviderConfig.customEndpointConfig;
|
appConfig.endpoints?.all ??
|
||||||
|
appConfig.endpoints?.[endpoint] ??
|
||||||
|
titleProviderConfig.customEndpointConfig;
|
||||||
if (!endpointConfig) {
|
if (!endpointConfig) {
|
||||||
logger.warn(
|
logger.debug(
|
||||||
'[api/server/controllers/agents/client.js #titleConvo] Error getting endpoint config',
|
`[api/server/controllers/agents/client.js #titleConvo] No endpoint config for "${endpoint}"`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (endpointConfig?.titleConvo === false) {
|
||||||
|
logger.debug(
|
||||||
|
`[api/server/controllers/agents/client.js #titleConvo] Title generation disabled for endpoint "${endpoint}"`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (endpointConfig?.titleEndpoint && endpointConfig.titleEndpoint !== endpoint) {
|
if (endpointConfig?.titleEndpoint && endpointConfig.titleEndpoint !== endpoint) {
|
||||||
try {
|
try {
|
||||||
titleProviderConfig = await getProviderConfig(endpointConfig.titleEndpoint);
|
titleProviderConfig = getProviderConfig({
|
||||||
|
provider: endpointConfig.titleEndpoint,
|
||||||
|
appConfig,
|
||||||
|
});
|
||||||
endpoint = endpointConfig.titleEndpoint;
|
endpoint = endpointConfig.titleEndpoint;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`[api/server/controllers/agents/client.js #titleConvo] Error getting title endpoint config for ${endpointConfig.titleEndpoint}, falling back to default`,
|
`[api/server/controllers/agents/client.js #titleConvo] Error getting title endpoint config for "${endpointConfig.titleEndpoint}", falling back to default`,
|
||||||
error,
|
error,
|
||||||
);
|
);
|
||||||
// Fall back to original provider config
|
// Fall back to original provider config
|
||||||
endpoint = agent.endpoint;
|
endpoint = agent.endpoint;
|
||||||
titleProviderConfig = await getProviderConfig(endpoint);
|
titleProviderConfig = getProviderConfig({ provider: endpoint, appConfig });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1146,15 +1179,13 @@ class AgentClient extends BaseClient {
|
|||||||
clientOptions.configuration = options.configOptions;
|
clientOptions.configuration = options.configOptions;
|
||||||
}
|
}
|
||||||
|
|
||||||
const shouldRemoveMaxTokens = /\b(o\d|gpt-[5-9])\b/i.test(clientOptions.model);
|
if (clientOptions.maxTokens != null) {
|
||||||
if (shouldRemoveMaxTokens && clientOptions.maxTokens != null) {
|
|
||||||
delete clientOptions.maxTokens;
|
delete clientOptions.maxTokens;
|
||||||
} else if (!shouldRemoveMaxTokens && !clientOptions.maxTokens) {
|
|
||||||
clientOptions.maxTokens = 75;
|
|
||||||
}
|
}
|
||||||
if (shouldRemoveMaxTokens && clientOptions?.modelKwargs?.max_completion_tokens != null) {
|
if (clientOptions?.modelKwargs?.max_completion_tokens != null) {
|
||||||
delete clientOptions.modelKwargs.max_completion_tokens;
|
delete clientOptions.modelKwargs.max_completion_tokens;
|
||||||
} else if (shouldRemoveMaxTokens && clientOptions?.modelKwargs?.max_output_tokens != null) {
|
}
|
||||||
|
if (clientOptions?.modelKwargs?.max_output_tokens != null) {
|
||||||
delete clientOptions.modelKwargs.max_output_tokens;
|
delete clientOptions.modelKwargs.max_output_tokens;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1172,6 +1203,20 @@ class AgentClient extends BaseClient {
|
|||||||
clientOptions.json = true;
|
clientOptions.json = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Resolve request-based headers for Custom Endpoints. Note: if this is added to
|
||||||
|
* non-custom endpoints, needs consideration of varying provider header configs.
|
||||||
|
*/
|
||||||
|
if (clientOptions?.configuration?.defaultHeaders != null) {
|
||||||
|
clientOptions.configuration.defaultHeaders = resolveHeaders({
|
||||||
|
headers: clientOptions.configuration.defaultHeaders,
|
||||||
|
body: {
|
||||||
|
messageId: this.responseMessageId,
|
||||||
|
conversationId: this.conversationId,
|
||||||
|
parentMessageId: this.parentMessageId,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const titleResult = await this.run.generateTitle({
|
const titleResult = await this.run.generateTitle({
|
||||||
provider,
|
provider,
|
||||||
@@ -1210,10 +1255,14 @@ class AgentClient extends BaseClient {
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const balanceConfig = getBalanceConfig(appConfig);
|
||||||
|
const transactionsConfig = getTransactionsConfig(appConfig);
|
||||||
await this.recordCollectedUsage({
|
await this.recordCollectedUsage({
|
||||||
model: clientOptions.model,
|
|
||||||
context: 'title',
|
|
||||||
collectedUsage,
|
collectedUsage,
|
||||||
|
context: 'title',
|
||||||
|
model: clientOptions.model,
|
||||||
|
balance: balanceConfig,
|
||||||
|
transactions: transactionsConfig,
|
||||||
}).catch((err) => {
|
}).catch((err) => {
|
||||||
logger.error(
|
logger.error(
|
||||||
'[api/server/controllers/agents/client.js #titleConvo] Error recording collected usage',
|
'[api/server/controllers/agents/client.js #titleConvo] Error recording collected usage',
|
||||||
@@ -1232,17 +1281,26 @@ class AgentClient extends BaseClient {
|
|||||||
* @param {object} params
|
* @param {object} params
|
||||||
* @param {number} params.promptTokens
|
* @param {number} params.promptTokens
|
||||||
* @param {number} params.completionTokens
|
* @param {number} params.completionTokens
|
||||||
* @param {OpenAIUsageMetadata} [params.usage]
|
|
||||||
* @param {string} [params.model]
|
* @param {string} [params.model]
|
||||||
|
* @param {OpenAIUsageMetadata} [params.usage]
|
||||||
|
* @param {AppConfig['balance']} [params.balance]
|
||||||
* @param {string} [params.context='message']
|
* @param {string} [params.context='message']
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async recordTokenUsage({ model, promptTokens, completionTokens, usage, context = 'message' }) {
|
async recordTokenUsage({
|
||||||
|
model,
|
||||||
|
usage,
|
||||||
|
balance,
|
||||||
|
promptTokens,
|
||||||
|
completionTokens,
|
||||||
|
context = 'message',
|
||||||
|
}) {
|
||||||
try {
|
try {
|
||||||
await spendTokens(
|
await spendTokens(
|
||||||
{
|
{
|
||||||
model,
|
model,
|
||||||
context,
|
context,
|
||||||
|
balance,
|
||||||
conversationId: this.conversationId,
|
conversationId: this.conversationId,
|
||||||
user: this.user ?? this.options.req.user?.id,
|
user: this.user ?? this.options.req.user?.id,
|
||||||
endpointTokenConfig: this.options.endpointTokenConfig,
|
endpointTokenConfig: this.options.endpointTokenConfig,
|
||||||
@@ -1259,6 +1317,7 @@ class AgentClient extends BaseClient {
|
|||||||
await spendTokens(
|
await spendTokens(
|
||||||
{
|
{
|
||||||
model,
|
model,
|
||||||
|
balance,
|
||||||
context: 'reasoning',
|
context: 'reasoning',
|
||||||
conversationId: this.conversationId,
|
conversationId: this.conversationId,
|
||||||
user: this.user ?? this.options.req.user?.id,
|
user: this.user ?? this.options.req.user?.id,
|
||||||
|
|||||||
@@ -41,8 +41,16 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
|
|
||||||
// Mock request and response
|
// Mock request and response
|
||||||
mockReq = {
|
mockReq = {
|
||||||
app: {
|
user: {
|
||||||
locals: {
|
id: 'user-123',
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
model: 'gpt-4',
|
||||||
|
endpoint: EModelEndpoint.openAI,
|
||||||
|
key: null,
|
||||||
|
},
|
||||||
|
config: {
|
||||||
|
endpoints: {
|
||||||
[EModelEndpoint.openAI]: {
|
[EModelEndpoint.openAI]: {
|
||||||
// Match the agent endpoint
|
// Match the agent endpoint
|
||||||
titleModel: 'gpt-3.5-turbo',
|
titleModel: 'gpt-3.5-turbo',
|
||||||
@@ -52,14 +60,6 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
user: {
|
|
||||||
id: 'user-123',
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
model: 'gpt-4',
|
|
||||||
endpoint: EModelEndpoint.openAI,
|
|
||||||
key: null,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
mockRes = {};
|
mockRes = {};
|
||||||
@@ -143,7 +143,7 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
|
|
||||||
it('should handle missing endpoint config gracefully', async () => {
|
it('should handle missing endpoint config gracefully', async () => {
|
||||||
// Remove endpoint config
|
// Remove endpoint config
|
||||||
mockReq.app.locals[EModelEndpoint.openAI] = undefined;
|
mockReq.config = { endpoints: {} };
|
||||||
|
|
||||||
const text = 'Test conversation text';
|
const text = 'Test conversation text';
|
||||||
const abortController = new AbortController();
|
const abortController = new AbortController();
|
||||||
@@ -161,7 +161,16 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
|
|
||||||
it('should use agent model when titleModel is not provided', async () => {
|
it('should use agent model when titleModel is not provided', async () => {
|
||||||
// Remove titleModel from config
|
// Remove titleModel from config
|
||||||
delete mockReq.app.locals[EModelEndpoint.openAI].titleModel;
|
mockReq.config = {
|
||||||
|
endpoints: {
|
||||||
|
[EModelEndpoint.openAI]: {
|
||||||
|
titlePrompt: 'Custom title prompt',
|
||||||
|
titleMethod: 'structured',
|
||||||
|
titlePromptTemplate: 'Template: {{content}}',
|
||||||
|
// titleModel is omitted
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
const text = 'Test conversation text';
|
const text = 'Test conversation text';
|
||||||
const abortController = new AbortController();
|
const abortController = new AbortController();
|
||||||
@@ -173,7 +182,16 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should not use titleModel when it equals CURRENT_MODEL constant', async () => {
|
it('should not use titleModel when it equals CURRENT_MODEL constant', async () => {
|
||||||
mockReq.app.locals[EModelEndpoint.openAI].titleModel = Constants.CURRENT_MODEL;
|
mockReq.config = {
|
||||||
|
endpoints: {
|
||||||
|
[EModelEndpoint.openAI]: {
|
||||||
|
titleModel: Constants.CURRENT_MODEL,
|
||||||
|
titlePrompt: 'Custom title prompt',
|
||||||
|
titleMethod: 'structured',
|
||||||
|
titlePromptTemplate: 'Template: {{content}}',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
const text = 'Test conversation text';
|
const text = 'Test conversation text';
|
||||||
const abortController = new AbortController();
|
const abortController = new AbortController();
|
||||||
@@ -216,6 +234,12 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
model: 'gpt-3.5-turbo',
|
model: 'gpt-3.5-turbo',
|
||||||
context: 'title',
|
context: 'title',
|
||||||
collectedUsage: expect.any(Array),
|
collectedUsage: expect.any(Array),
|
||||||
|
balance: {
|
||||||
|
enabled: false,
|
||||||
|
},
|
||||||
|
transactions: {
|
||||||
|
enabled: true,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -239,16 +263,142 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
expect(result).toBeUndefined();
|
expect(result).toBeUndefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should skip title generation when titleConvo is set to false', async () => {
|
||||||
|
// Set titleConvo to false in endpoint config
|
||||||
|
mockReq.config = {
|
||||||
|
endpoints: {
|
||||||
|
[EModelEndpoint.openAI]: {
|
||||||
|
titleConvo: false,
|
||||||
|
titleModel: 'gpt-3.5-turbo',
|
||||||
|
titlePrompt: 'Custom title prompt',
|
||||||
|
titleMethod: 'structured',
|
||||||
|
titlePromptTemplate: 'Template: {{content}}',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const text = 'Test conversation text';
|
||||||
|
const abortController = new AbortController();
|
||||||
|
|
||||||
|
const result = await client.titleConvo({ text, abortController });
|
||||||
|
|
||||||
|
// Should return undefined without generating title
|
||||||
|
expect(result).toBeUndefined();
|
||||||
|
|
||||||
|
// generateTitle should NOT have been called
|
||||||
|
expect(mockRun.generateTitle).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
// recordCollectedUsage should NOT have been called
|
||||||
|
expect(client.recordCollectedUsage).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip title generation when titleConvo is false in all config', async () => {
|
||||||
|
// Set titleConvo to false in "all" config
|
||||||
|
mockReq.config = {
|
||||||
|
endpoints: {
|
||||||
|
all: {
|
||||||
|
titleConvo: false,
|
||||||
|
titleModel: 'gpt-4o-mini',
|
||||||
|
titlePrompt: 'All config title prompt',
|
||||||
|
titleMethod: 'completion',
|
||||||
|
titlePromptTemplate: 'All config template',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const text = 'Test conversation text';
|
||||||
|
const abortController = new AbortController();
|
||||||
|
|
||||||
|
const result = await client.titleConvo({ text, abortController });
|
||||||
|
|
||||||
|
// Should return undefined without generating title
|
||||||
|
expect(result).toBeUndefined();
|
||||||
|
|
||||||
|
// generateTitle should NOT have been called
|
||||||
|
expect(mockRun.generateTitle).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
// recordCollectedUsage should NOT have been called
|
||||||
|
expect(client.recordCollectedUsage).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip title generation when titleConvo is false for custom endpoint scenario', async () => {
|
||||||
|
// This test validates the behavior when customEndpointConfig (retrieved via
|
||||||
|
// getProviderConfig for custom endpoints) has titleConvo: false.
|
||||||
|
//
|
||||||
|
// The code path is:
|
||||||
|
// 1. endpoints?.all is checked (undefined in this test)
|
||||||
|
// 2. endpoints?.[endpoint] is checked (our test config)
|
||||||
|
// 3. Would fall back to titleProviderConfig.customEndpointConfig (for real custom endpoints)
|
||||||
|
//
|
||||||
|
// We simulate a custom endpoint scenario using a dynamically named endpoint config
|
||||||
|
|
||||||
|
// Create a unique endpoint name that represents a custom endpoint
|
||||||
|
const customEndpointName = 'customEndpoint';
|
||||||
|
|
||||||
|
// Configure the endpoint to have titleConvo: false
|
||||||
|
// This simulates what would be in customEndpointConfig for a real custom endpoint
|
||||||
|
mockReq.config = {
|
||||||
|
endpoints: {
|
||||||
|
// No 'all' config - so it will check endpoints[endpoint]
|
||||||
|
// This config represents what customEndpointConfig would contain
|
||||||
|
[customEndpointName]: {
|
||||||
|
titleConvo: false,
|
||||||
|
titleModel: 'custom-model-v1',
|
||||||
|
titlePrompt: 'Custom endpoint title prompt',
|
||||||
|
titleMethod: 'completion',
|
||||||
|
titlePromptTemplate: 'Custom template: {{content}}',
|
||||||
|
baseURL: 'https://api.custom-llm.com/v1',
|
||||||
|
apiKey: 'test-custom-key',
|
||||||
|
// Additional custom endpoint properties
|
||||||
|
models: {
|
||||||
|
default: ['custom-model-v1', 'custom-model-v2'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Set up agent to use our custom endpoint
|
||||||
|
// Use openAI as base but override with custom endpoint name for this test
|
||||||
|
mockAgent.endpoint = EModelEndpoint.openAI;
|
||||||
|
mockAgent.provider = EModelEndpoint.openAI;
|
||||||
|
|
||||||
|
// Override the endpoint in the config to point to our custom config
|
||||||
|
mockReq.config.endpoints[EModelEndpoint.openAI] =
|
||||||
|
mockReq.config.endpoints[customEndpointName];
|
||||||
|
delete mockReq.config.endpoints[customEndpointName];
|
||||||
|
|
||||||
|
const text = 'Test custom endpoint conversation';
|
||||||
|
const abortController = new AbortController();
|
||||||
|
|
||||||
|
const result = await client.titleConvo({ text, abortController });
|
||||||
|
|
||||||
|
// Should return undefined without generating title because titleConvo is false
|
||||||
|
expect(result).toBeUndefined();
|
||||||
|
|
||||||
|
// generateTitle should NOT have been called
|
||||||
|
expect(mockRun.generateTitle).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
// recordCollectedUsage should NOT have been called
|
||||||
|
expect(client.recordCollectedUsage).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
it('should pass titleEndpoint configuration to generateTitle', async () => {
|
it('should pass titleEndpoint configuration to generateTitle', async () => {
|
||||||
// Mock the API key just for this test
|
// Mock the API key just for this test
|
||||||
const originalApiKey = process.env.ANTHROPIC_API_KEY;
|
const originalApiKey = process.env.ANTHROPIC_API_KEY;
|
||||||
process.env.ANTHROPIC_API_KEY = 'test-api-key';
|
process.env.ANTHROPIC_API_KEY = 'test-api-key';
|
||||||
|
|
||||||
// Add titleEndpoint to the config
|
// Add titleEndpoint to the config
|
||||||
mockReq.app.locals[EModelEndpoint.openAI].titleEndpoint = EModelEndpoint.anthropic;
|
mockReq.config = {
|
||||||
mockReq.app.locals[EModelEndpoint.openAI].titleMethod = 'structured';
|
endpoints: {
|
||||||
mockReq.app.locals[EModelEndpoint.openAI].titlePrompt = 'Custom title prompt';
|
[EModelEndpoint.openAI]: {
|
||||||
mockReq.app.locals[EModelEndpoint.openAI].titlePromptTemplate = 'Custom template';
|
titleModel: 'gpt-3.5-turbo',
|
||||||
|
titleEndpoint: EModelEndpoint.anthropic,
|
||||||
|
titleMethod: 'structured',
|
||||||
|
titlePrompt: 'Custom title prompt',
|
||||||
|
titlePromptTemplate: 'Custom template',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
const text = 'Test conversation text';
|
const text = 'Test conversation text';
|
||||||
const abortController = new AbortController();
|
const abortController = new AbortController();
|
||||||
@@ -274,18 +424,16 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should use all config when endpoint config is missing', async () => {
|
it('should use all config when endpoint config is missing', async () => {
|
||||||
// Remove endpoint-specific config
|
// Set 'all' config without endpoint-specific config
|
||||||
delete mockReq.app.locals[EModelEndpoint.openAI].titleModel;
|
mockReq.config = {
|
||||||
delete mockReq.app.locals[EModelEndpoint.openAI].titlePrompt;
|
endpoints: {
|
||||||
delete mockReq.app.locals[EModelEndpoint.openAI].titleMethod;
|
all: {
|
||||||
delete mockReq.app.locals[EModelEndpoint.openAI].titlePromptTemplate;
|
titleModel: 'gpt-4o-mini',
|
||||||
|
titlePrompt: 'All config title prompt',
|
||||||
// Set 'all' config
|
titleMethod: 'completion',
|
||||||
mockReq.app.locals.all = {
|
titlePromptTemplate: 'All config template: {{content}}',
|
||||||
titleModel: 'gpt-4o-mini',
|
},
|
||||||
titlePrompt: 'All config title prompt',
|
},
|
||||||
titleMethod: 'completion',
|
|
||||||
titlePromptTemplate: 'All config template: {{content}}',
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const text = 'Test conversation text';
|
const text = 'Test conversation text';
|
||||||
@@ -309,17 +457,21 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
|
|
||||||
it('should prioritize all config over endpoint config for title settings', async () => {
|
it('should prioritize all config over endpoint config for title settings', async () => {
|
||||||
// Set both endpoint and 'all' config
|
// Set both endpoint and 'all' config
|
||||||
mockReq.app.locals[EModelEndpoint.openAI].titleModel = 'gpt-3.5-turbo';
|
mockReq.config = {
|
||||||
mockReq.app.locals[EModelEndpoint.openAI].titlePrompt = 'Endpoint title prompt';
|
endpoints: {
|
||||||
mockReq.app.locals[EModelEndpoint.openAI].titleMethod = 'structured';
|
[EModelEndpoint.openAI]: {
|
||||||
// Remove titlePromptTemplate from endpoint config to test fallback
|
titleModel: 'gpt-3.5-turbo',
|
||||||
delete mockReq.app.locals[EModelEndpoint.openAI].titlePromptTemplate;
|
titlePrompt: 'Endpoint title prompt',
|
||||||
|
titleMethod: 'structured',
|
||||||
mockReq.app.locals.all = {
|
// titlePromptTemplate is omitted to test fallback
|
||||||
titleModel: 'gpt-4o-mini',
|
},
|
||||||
titlePrompt: 'All config title prompt',
|
all: {
|
||||||
titleMethod: 'completion',
|
titleModel: 'gpt-4o-mini',
|
||||||
titlePromptTemplate: 'All config template',
|
titlePrompt: 'All config title prompt',
|
||||||
|
titleMethod: 'completion',
|
||||||
|
titlePromptTemplate: 'All config template',
|
||||||
|
},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const text = 'Test conversation text';
|
const text = 'Test conversation text';
|
||||||
@@ -346,17 +498,18 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
const originalApiKey = process.env.ANTHROPIC_API_KEY;
|
const originalApiKey = process.env.ANTHROPIC_API_KEY;
|
||||||
process.env.ANTHROPIC_API_KEY = 'test-anthropic-key';
|
process.env.ANTHROPIC_API_KEY = 'test-anthropic-key';
|
||||||
|
|
||||||
// Remove endpoint-specific config to test 'all' config
|
|
||||||
delete mockReq.app.locals[EModelEndpoint.openAI];
|
|
||||||
|
|
||||||
// Set comprehensive 'all' config with all new title options
|
// Set comprehensive 'all' config with all new title options
|
||||||
mockReq.app.locals.all = {
|
mockReq.config = {
|
||||||
titleConvo: true,
|
endpoints: {
|
||||||
titleModel: 'claude-3-haiku-20240307',
|
all: {
|
||||||
titleMethod: 'completion', // Testing the new default method
|
titleConvo: true,
|
||||||
titlePrompt: 'Generate a concise, descriptive title for this conversation',
|
titleModel: 'claude-3-haiku-20240307',
|
||||||
titlePromptTemplate: 'Conversation summary: {{content}}',
|
titleMethod: 'completion', // Testing the new default method
|
||||||
titleEndpoint: EModelEndpoint.anthropic, // Should switch provider to Anthropic
|
titlePrompt: 'Generate a concise, descriptive title for this conversation',
|
||||||
|
titlePromptTemplate: 'Conversation summary: {{content}}',
|
||||||
|
titleEndpoint: EModelEndpoint.anthropic, // Should switch provider to Anthropic
|
||||||
|
},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const text = 'Test conversation about AI and machine learning';
|
const text = 'Test conversation about AI and machine learning';
|
||||||
@@ -402,15 +555,16 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
// Clear previous calls
|
// Clear previous calls
|
||||||
mockRun.generateTitle.mockClear();
|
mockRun.generateTitle.mockClear();
|
||||||
|
|
||||||
// Remove endpoint config
|
|
||||||
delete mockReq.app.locals[EModelEndpoint.openAI];
|
|
||||||
|
|
||||||
// Set 'all' config with specific titleMethod
|
// Set 'all' config with specific titleMethod
|
||||||
mockReq.app.locals.all = {
|
mockReq.config = {
|
||||||
titleModel: 'gpt-4o-mini',
|
endpoints: {
|
||||||
titleMethod: method,
|
all: {
|
||||||
titlePrompt: `Testing ${method} method`,
|
titleModel: 'gpt-4o-mini',
|
||||||
titlePromptTemplate: `Template for ${method}: {{content}}`,
|
titleMethod: method,
|
||||||
|
titlePrompt: `Testing ${method} method`,
|
||||||
|
titlePromptTemplate: `Template for ${method}: {{content}}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const text = `Test conversation for ${method} method`;
|
const text = `Test conversation for ${method} method`;
|
||||||
@@ -455,29 +609,33 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
// Set up Azure endpoint with serverless config
|
// Set up Azure endpoint with serverless config
|
||||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||||
mockReq.app.locals[EModelEndpoint.azureOpenAI] = {
|
mockReq.config = {
|
||||||
titleConvo: true,
|
endpoints: {
|
||||||
titleModel: 'grok-3',
|
[EModelEndpoint.azureOpenAI]: {
|
||||||
titleMethod: 'completion',
|
titleConvo: true,
|
||||||
titlePrompt: 'Azure serverless title prompt',
|
titleModel: 'grok-3',
|
||||||
streamRate: 35,
|
titleMethod: 'completion',
|
||||||
modelGroupMap: {
|
titlePrompt: 'Azure serverless title prompt',
|
||||||
'grok-3': {
|
streamRate: 35,
|
||||||
group: 'Azure AI Foundry',
|
modelGroupMap: {
|
||||||
deploymentName: 'grok-3',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
groupMap: {
|
|
||||||
'Azure AI Foundry': {
|
|
||||||
apiKey: '${AZURE_API_KEY}',
|
|
||||||
baseURL: 'https://test.services.ai.azure.com/models',
|
|
||||||
version: '2024-05-01-preview',
|
|
||||||
serverless: true,
|
|
||||||
models: {
|
|
||||||
'grok-3': {
|
'grok-3': {
|
||||||
|
group: 'Azure AI Foundry',
|
||||||
deploymentName: 'grok-3',
|
deploymentName: 'grok-3',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
groupMap: {
|
||||||
|
'Azure AI Foundry': {
|
||||||
|
apiKey: '${AZURE_API_KEY}',
|
||||||
|
baseURL: 'https://test.services.ai.azure.com/models',
|
||||||
|
version: '2024-05-01-preview',
|
||||||
|
serverless: true,
|
||||||
|
models: {
|
||||||
|
'grok-3': {
|
||||||
|
deploymentName: 'grok-3',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -503,28 +661,32 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
// Set up Azure endpoint
|
// Set up Azure endpoint
|
||||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||||
mockReq.app.locals[EModelEndpoint.azureOpenAI] = {
|
mockReq.config = {
|
||||||
titleConvo: true,
|
endpoints: {
|
||||||
titleModel: 'gpt-4o',
|
[EModelEndpoint.azureOpenAI]: {
|
||||||
titleMethod: 'structured',
|
titleConvo: true,
|
||||||
titlePrompt: 'Azure instance title prompt',
|
titleModel: 'gpt-4o',
|
||||||
streamRate: 35,
|
titleMethod: 'structured',
|
||||||
modelGroupMap: {
|
titlePrompt: 'Azure instance title prompt',
|
||||||
'gpt-4o': {
|
streamRate: 35,
|
||||||
group: 'eastus',
|
modelGroupMap: {
|
||||||
deploymentName: 'gpt-4o',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
groupMap: {
|
|
||||||
eastus: {
|
|
||||||
apiKey: '${EASTUS_API_KEY}',
|
|
||||||
instanceName: 'region-instance',
|
|
||||||
version: '2024-02-15-preview',
|
|
||||||
models: {
|
|
||||||
'gpt-4o': {
|
'gpt-4o': {
|
||||||
|
group: 'eastus',
|
||||||
deploymentName: 'gpt-4o',
|
deploymentName: 'gpt-4o',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
groupMap: {
|
||||||
|
eastus: {
|
||||||
|
apiKey: '${EASTUS_API_KEY}',
|
||||||
|
instanceName: 'region-instance',
|
||||||
|
version: '2024-02-15-preview',
|
||||||
|
models: {
|
||||||
|
'gpt-4o': {
|
||||||
|
deploymentName: 'gpt-4o',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -551,29 +713,33 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||||
mockAgent.model_parameters.model = 'gpt-4o-latest';
|
mockAgent.model_parameters.model = 'gpt-4o-latest';
|
||||||
mockReq.app.locals[EModelEndpoint.azureOpenAI] = {
|
mockReq.config = {
|
||||||
titleConvo: true,
|
endpoints: {
|
||||||
titleModel: Constants.CURRENT_MODEL,
|
[EModelEndpoint.azureOpenAI]: {
|
||||||
titleMethod: 'functions',
|
titleConvo: true,
|
||||||
streamRate: 35,
|
titleModel: Constants.CURRENT_MODEL,
|
||||||
modelGroupMap: {
|
titleMethod: 'functions',
|
||||||
'gpt-4o-latest': {
|
streamRate: 35,
|
||||||
group: 'region-eastus',
|
modelGroupMap: {
|
||||||
deploymentName: 'gpt-4o-mini',
|
|
||||||
version: '2024-02-15-preview',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
groupMap: {
|
|
||||||
'region-eastus': {
|
|
||||||
apiKey: '${EASTUS2_API_KEY}',
|
|
||||||
instanceName: 'test-instance',
|
|
||||||
version: '2024-12-01-preview',
|
|
||||||
models: {
|
|
||||||
'gpt-4o-latest': {
|
'gpt-4o-latest': {
|
||||||
|
group: 'region-eastus',
|
||||||
deploymentName: 'gpt-4o-mini',
|
deploymentName: 'gpt-4o-mini',
|
||||||
version: '2024-02-15-preview',
|
version: '2024-02-15-preview',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
groupMap: {
|
||||||
|
'region-eastus': {
|
||||||
|
apiKey: '${EASTUS2_API_KEY}',
|
||||||
|
instanceName: 'test-instance',
|
||||||
|
version: '2024-12-01-preview',
|
||||||
|
models: {
|
||||||
|
'gpt-4o-latest': {
|
||||||
|
deploymentName: 'gpt-4o-mini',
|
||||||
|
version: '2024-02-15-preview',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -598,56 +764,60 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
// Set up Azure endpoint
|
// Set up Azure endpoint
|
||||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||||
mockReq.app.locals[EModelEndpoint.azureOpenAI] = {
|
mockReq.config = {
|
||||||
titleConvo: true,
|
endpoints: {
|
||||||
titleModel: 'o1-mini',
|
[EModelEndpoint.azureOpenAI]: {
|
||||||
titleMethod: 'completion',
|
titleConvo: true,
|
||||||
streamRate: 35,
|
titleModel: 'o1-mini',
|
||||||
modelGroupMap: {
|
titleMethod: 'completion',
|
||||||
'gpt-4o': {
|
streamRate: 35,
|
||||||
group: 'eastus',
|
modelGroupMap: {
|
||||||
deploymentName: 'gpt-4o',
|
|
||||||
},
|
|
||||||
'o1-mini': {
|
|
||||||
group: 'region-eastus',
|
|
||||||
deploymentName: 'o1-mini',
|
|
||||||
},
|
|
||||||
'codex-mini': {
|
|
||||||
group: 'codex-mini',
|
|
||||||
deploymentName: 'codex-mini',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
groupMap: {
|
|
||||||
eastus: {
|
|
||||||
apiKey: '${EASTUS_API_KEY}',
|
|
||||||
instanceName: 'region-eastus',
|
|
||||||
version: '2024-02-15-preview',
|
|
||||||
models: {
|
|
||||||
'gpt-4o': {
|
'gpt-4o': {
|
||||||
|
group: 'eastus',
|
||||||
deploymentName: 'gpt-4o',
|
deploymentName: 'gpt-4o',
|
||||||
},
|
},
|
||||||
},
|
|
||||||
},
|
|
||||||
'region-eastus': {
|
|
||||||
apiKey: '${EASTUS2_API_KEY}',
|
|
||||||
instanceName: 'region-eastus2',
|
|
||||||
version: '2024-12-01-preview',
|
|
||||||
models: {
|
|
||||||
'o1-mini': {
|
'o1-mini': {
|
||||||
|
group: 'region-eastus',
|
||||||
deploymentName: 'o1-mini',
|
deploymentName: 'o1-mini',
|
||||||
},
|
},
|
||||||
},
|
|
||||||
},
|
|
||||||
'codex-mini': {
|
|
||||||
apiKey: '${AZURE_API_KEY}',
|
|
||||||
baseURL: 'https://example.cognitiveservices.azure.com/openai/',
|
|
||||||
version: '2025-04-01-preview',
|
|
||||||
serverless: true,
|
|
||||||
models: {
|
|
||||||
'codex-mini': {
|
'codex-mini': {
|
||||||
|
group: 'codex-mini',
|
||||||
deploymentName: 'codex-mini',
|
deploymentName: 'codex-mini',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
groupMap: {
|
||||||
|
eastus: {
|
||||||
|
apiKey: '${EASTUS_API_KEY}',
|
||||||
|
instanceName: 'region-eastus',
|
||||||
|
version: '2024-02-15-preview',
|
||||||
|
models: {
|
||||||
|
'gpt-4o': {
|
||||||
|
deploymentName: 'gpt-4o',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'region-eastus': {
|
||||||
|
apiKey: '${EASTUS2_API_KEY}',
|
||||||
|
instanceName: 'region-eastus2',
|
||||||
|
version: '2024-12-01-preview',
|
||||||
|
models: {
|
||||||
|
'o1-mini': {
|
||||||
|
deploymentName: 'o1-mini',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'codex-mini': {
|
||||||
|
apiKey: '${AZURE_API_KEY}',
|
||||||
|
baseURL: 'https://example.cognitiveservices.azure.com/openai/',
|
||||||
|
version: '2025-04-01-preview',
|
||||||
|
serverless: true,
|
||||||
|
models: {
|
||||||
|
'codex-mini': {
|
||||||
|
deploymentName: 'codex-mini',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -679,33 +849,34 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
mockReq.body.endpoint = EModelEndpoint.azureOpenAI;
|
mockReq.body.endpoint = EModelEndpoint.azureOpenAI;
|
||||||
mockReq.body.model = 'gpt-4';
|
mockReq.body.model = 'gpt-4';
|
||||||
|
|
||||||
// Remove Azure-specific config
|
|
||||||
delete mockReq.app.locals[EModelEndpoint.azureOpenAI];
|
|
||||||
|
|
||||||
// Set 'all' config as fallback with a serverless Azure config
|
// Set 'all' config as fallback with a serverless Azure config
|
||||||
mockReq.app.locals.all = {
|
mockReq.config = {
|
||||||
titleConvo: true,
|
endpoints: {
|
||||||
titleModel: 'gpt-4',
|
all: {
|
||||||
titleMethod: 'structured',
|
titleConvo: true,
|
||||||
titlePrompt: 'Fallback title prompt from all config',
|
titleModel: 'gpt-4',
|
||||||
titlePromptTemplate: 'Template: {{content}}',
|
titleMethod: 'structured',
|
||||||
modelGroupMap: {
|
titlePrompt: 'Fallback title prompt from all config',
|
||||||
'gpt-4': {
|
titlePromptTemplate: 'Template: {{content}}',
|
||||||
group: 'default-group',
|
modelGroupMap: {
|
||||||
deploymentName: 'gpt-4',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
groupMap: {
|
|
||||||
'default-group': {
|
|
||||||
apiKey: '${AZURE_API_KEY}',
|
|
||||||
baseURL: 'https://default.openai.azure.com/',
|
|
||||||
version: '2024-02-15-preview',
|
|
||||||
serverless: true,
|
|
||||||
models: {
|
|
||||||
'gpt-4': {
|
'gpt-4': {
|
||||||
|
group: 'default-group',
|
||||||
deploymentName: 'gpt-4',
|
deploymentName: 'gpt-4',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
groupMap: {
|
||||||
|
'default-group': {
|
||||||
|
apiKey: '${AZURE_API_KEY}',
|
||||||
|
baseURL: 'https://default.openai.azure.com/',
|
||||||
|
version: '2024-02-15-preview',
|
||||||
|
serverless: true,
|
||||||
|
models: {
|
||||||
|
'gpt-4': {
|
||||||
|
deploymentName: 'gpt-4',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -982,13 +1153,6 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
mockReq = {
|
mockReq = {
|
||||||
app: {
|
|
||||||
locals: {
|
|
||||||
memory: {
|
|
||||||
messageWindowSize: 3,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
user: {
|
user: {
|
||||||
id: 'user-123',
|
id: 'user-123',
|
||||||
personalization: {
|
personalization: {
|
||||||
@@ -997,6 +1161,13 @@ describe('AgentClient - titleConvo', () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Mock getAppConfig for memory tests
|
||||||
|
mockReq.config = {
|
||||||
|
memory: {
|
||||||
|
messageWindowSize: 3,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
mockRes = {};
|
mockRes = {};
|
||||||
|
|
||||||
mockOptions = {
|
mockOptions = {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user