Compare commits
192 Commits
v0.7.1
...
refactor/c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d69465ea3d | ||
|
|
5ef71a7a36 | ||
|
|
326069d7a6 | ||
|
|
785430daf5 | ||
|
|
03fe361917 | ||
|
|
b34a4ddac1 | ||
|
|
7d5b03dd98 | ||
|
|
f959ee302c | ||
|
|
cd00df69bb | ||
|
|
a05e2c1dcc | ||
|
|
87bdbda10a | ||
|
|
605a8ae8c9 | ||
|
|
a724635998 | ||
|
|
6c306a662c | ||
|
|
55f8d9910e | ||
|
|
7edb54889b | ||
|
|
71d9e841b1 | ||
|
|
e76777d298 | ||
|
|
1edbfdbce2 | ||
|
|
1aad315de6 | ||
|
|
5d985746cb | ||
|
|
04654014b2 | ||
|
|
456793772b | ||
|
|
a87d4e0b75 | ||
|
|
a2fd975cd5 | ||
|
|
83619de158 | ||
|
|
b8f2bee3fc | ||
|
|
81292bb4dd | ||
|
|
ed5ee1f86f | ||
|
|
791b0139bc | ||
|
|
156c52e293 | ||
|
|
eef894e608 | ||
|
|
e2867eecc9 | ||
|
|
dd563e0796 | ||
|
|
c99cf1b4b1 | ||
|
|
b5081bfe86 | ||
|
|
aac01df80c | ||
|
|
24467dd626 | ||
|
|
b2b469bd3d | ||
|
|
cec2e57ee9 | ||
|
|
a8c874267f | ||
|
|
a53312bbd4 | ||
|
|
ab74685476 | ||
|
|
015215b790 | ||
|
|
4e4de88faa | ||
|
|
3172381bad | ||
|
|
54b1095239 | ||
|
|
0424f8fe55 | ||
|
|
4319c62e66 | ||
|
|
d3a0b862db | ||
|
|
5d8793c5d1 | ||
|
|
54db67449a | ||
|
|
0cd3c83328 | ||
|
|
302b28fc9b | ||
|
|
dad25bd297 | ||
|
|
a338decf90 | ||
|
|
2cf5228021 | ||
|
|
0294cfc881 | ||
|
|
8d8b17e7ed | ||
|
|
04502e9525 | ||
|
|
bcaa7d5d29 | ||
|
|
c288b458b6 | ||
|
|
447bbcb8ca | ||
|
|
68bf7ac7c0 | ||
|
|
97d12d03d1 | ||
|
|
4416f69a9b | ||
|
|
29e71e98ad | ||
|
|
e9bbf39618 | ||
|
|
08b8ae120e | ||
|
|
803fd63121 | ||
|
|
ef76cc195e | ||
|
|
2e559137ae | ||
|
|
92232afaca | ||
|
|
084cf266a2 | ||
|
|
baf0848021 | ||
|
|
1da92111aa | ||
|
|
35f8053f45 | ||
|
|
ee673d682e | ||
|
|
b7fef6958b | ||
|
|
5452d4c20c | ||
|
|
a7f5b57272 | ||
|
|
f69b317171 | ||
|
|
4469ba72fc | ||
|
|
0e3e45e77d | ||
|
|
9f0c1914a5 | ||
|
|
37ae484fbc | ||
|
|
8939d8af37 | ||
|
|
f9a0166352 | ||
|
|
248dfb8b5b | ||
|
|
b8e35002f4 | ||
|
|
8318f26d66 | ||
|
|
08d6bea359 | ||
|
|
a6058c5669 | ||
|
|
e0402b71f0 | ||
|
|
a618266905 | ||
|
|
d5a7806e32 | ||
|
|
e2cb2905e7 | ||
|
|
3f600f0d3f | ||
|
|
c9e7d4ac18 | ||
|
|
40685f6eb4 | ||
|
|
0ee060d730 | ||
|
|
5dc5d875ba | ||
|
|
9f2538fcd9 | ||
|
|
2b7a973a33 | ||
|
|
c704a23749 | ||
|
|
eb5733083e | ||
|
|
b80f38e49e | ||
|
|
4369e75ca7 | ||
|
|
35ba4ba1a4 | ||
|
|
dcd2e3e62d | ||
|
|
514a502b9c | ||
|
|
8e66683577 | ||
|
|
dc1778b11f | ||
|
|
795bb9c568 | ||
|
|
a937650df6 | ||
|
|
6cf1c85363 | ||
|
|
b3e03b75d0 | ||
|
|
9d8fd92dd3 | ||
|
|
f00a8f87f7 | ||
|
|
79840763e7 | ||
|
|
1a452121fa | ||
|
|
af8bcb08d6 | ||
|
|
f0e8cca5df | ||
|
|
38ad36c1c5 | ||
|
|
53fe2f6453 | ||
|
|
31479d6a48 | ||
|
|
612a58737d | ||
|
|
8a7f36f581 | ||
|
|
4a5d06a774 | ||
|
|
fc9368e0e7 | ||
|
|
64bf0800a0 | ||
|
|
94eeec354e | ||
|
|
e42709bd1f | ||
|
|
638ac5bba6 | ||
|
|
5920672a8c | ||
|
|
a0d1e2a5f8 | ||
|
|
4ffc1414a8 | ||
|
|
df6183db0f | ||
|
|
3816219936 | ||
|
|
6fc664e4a3 | ||
|
|
89899164ed | ||
|
|
c83d9d61d4 | ||
|
|
bcdddaed72 | ||
|
|
a4de635719 | ||
|
|
4a32d7466a | ||
|
|
2ec821ea4c | ||
|
|
978009787c | ||
|
|
27e7621b6a | ||
|
|
2b37a44b8d | ||
|
|
98c96cd020 | ||
|
|
8f20fb28e5 | ||
|
|
d73ea8e1f2 | ||
|
|
83bae9e9d9 | ||
|
|
6ba7f60eec | ||
|
|
5293b73b6d | ||
|
|
b6d1f5fa53 | ||
|
|
c94278be85 | ||
|
|
3c5fa40435 | ||
|
|
b6d6343f54 | ||
|
|
89b1e33be0 | ||
|
|
436f7195b5 | ||
|
|
2aec4a6250 | ||
|
|
b77bd19092 | ||
|
|
446ffe0417 | ||
|
|
b9bcaee656 | ||
|
|
110c0535fb | ||
|
|
25fceb78b7 | ||
|
|
c8baceac76 | ||
|
|
a0288f1c5c | ||
|
|
5d3c90be26 | ||
|
|
ab6fbe48f1 | ||
|
|
3b44741cf9 | ||
|
|
d21a05606e | ||
|
|
0e50c07e3f | ||
|
|
a5cac03fa4 | ||
|
|
ba4fa6150e | ||
|
|
463ca5d613 | ||
|
|
039c7ae880 | ||
|
|
63ef15ab63 | ||
|
|
8a78500fe2 | ||
|
|
144fd5f6aa | ||
|
|
2720327aa1 | ||
|
|
4d0806d3e8 | ||
|
|
5b5f9b950b | ||
|
|
3ccff19821 | ||
|
|
11d5e232b3 | ||
|
|
099aa9dead | ||
|
|
4121818124 | ||
|
|
ca9a0fe629 | ||
|
|
bde6bb0152 | ||
|
|
667f5f91fe | ||
|
|
75da75be08 |
95
.env.example
95
.env.example
@@ -2,11 +2,9 @@
|
|||||||
# LibreChat Configuration #
|
# LibreChat Configuration #
|
||||||
#=====================================================================#
|
#=====================================================================#
|
||||||
# Please refer to the reference documentation for assistance #
|
# Please refer to the reference documentation for assistance #
|
||||||
# with configuring your LibreChat environment. The guide is #
|
# with configuring your LibreChat environment. #
|
||||||
# available both online and within your local LibreChat #
|
# #
|
||||||
# directory: #
|
# https://www.librechat.ai/docs/configuration/dotenv #
|
||||||
# Online: https://docs.librechat.ai/install/configuration/dotenv.html #
|
|
||||||
# Locally: ./docs/install/configuration/dotenv.md #
|
|
||||||
#=====================================================================#
|
#=====================================================================#
|
||||||
|
|
||||||
#==================================================#
|
#==================================================#
|
||||||
@@ -62,15 +60,19 @@ PROXY=
|
|||||||
#===================================#
|
#===================================#
|
||||||
# Known Endpoints - librechat.yaml #
|
# Known Endpoints - librechat.yaml #
|
||||||
#===================================#
|
#===================================#
|
||||||
# https://docs.librechat.ai/install/configuration/ai_endpoints.html
|
# https://www.librechat.ai/docs/configuration/librechat_yaml/ai_endpoints
|
||||||
|
|
||||||
# GROQ_API_KEY=
|
|
||||||
# SHUTTLEAI_KEY=
|
|
||||||
# OPENROUTER_KEY=
|
|
||||||
# MISTRAL_API_KEY=
|
|
||||||
# ANYSCALE_API_KEY=
|
# ANYSCALE_API_KEY=
|
||||||
|
# APIPIE_API_KEY=
|
||||||
|
# COHERE_API_KEY=
|
||||||
|
# DATABRICKS_API_KEY=
|
||||||
# FIREWORKS_API_KEY=
|
# FIREWORKS_API_KEY=
|
||||||
|
# GROQ_API_KEY=
|
||||||
|
# HUGGINGFACE_TOKEN=
|
||||||
|
# MISTRAL_API_KEY=
|
||||||
|
# OPENROUTER_KEY=
|
||||||
# PERPLEXITY_API_KEY=
|
# PERPLEXITY_API_KEY=
|
||||||
|
# SHUTTLEAI_API_KEY=
|
||||||
# TOGETHERAI_API_KEY=
|
# TOGETHERAI_API_KEY=
|
||||||
|
|
||||||
#============#
|
#============#
|
||||||
@@ -78,7 +80,7 @@ PROXY=
|
|||||||
#============#
|
#============#
|
||||||
|
|
||||||
ANTHROPIC_API_KEY=user_provided
|
ANTHROPIC_API_KEY=user_provided
|
||||||
# ANTHROPIC_MODELS=claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
# ANTHROPIC_MODELS=claude-3-5-sonnet-20240620,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
||||||
# ANTHROPIC_REVERSE_PROXY=
|
# ANTHROPIC_REVERSE_PROXY=
|
||||||
|
|
||||||
#============#
|
#============#
|
||||||
@@ -113,15 +115,36 @@ BINGAI_TOKEN=user_provided
|
|||||||
#============#
|
#============#
|
||||||
|
|
||||||
GOOGLE_KEY=user_provided
|
GOOGLE_KEY=user_provided
|
||||||
# GOOGLE_MODELS=gemini-pro,gemini-pro-vision,chat-bison,chat-bison-32k,codechat-bison,codechat-bison-32k,text-bison,text-bison-32k,text-unicorn,code-gecko,code-bison,code-bison-32k
|
|
||||||
# GOOGLE_REVERSE_PROXY=
|
# GOOGLE_REVERSE_PROXY=
|
||||||
|
|
||||||
|
# Gemini API
|
||||||
|
# GOOGLE_MODELS=gemini-1.5-flash-latest,gemini-1.0-pro,gemini-1.0-pro-001,gemini-1.0-pro-latest,gemini-1.0-pro-vision-latest,gemini-1.5-pro-latest,gemini-pro,gemini-pro-vision
|
||||||
|
|
||||||
|
# Vertex AI
|
||||||
|
# GOOGLE_MODELS=gemini-1.5-flash-preview-0514,gemini-1.5-pro-preview-0514,gemini-1.0-pro-vision-001,gemini-1.0-pro-002,gemini-1.0-pro-001,gemini-pro-vision,gemini-1.0-pro
|
||||||
|
|
||||||
|
# GOOGLE_TITLE_MODEL=gemini-pro
|
||||||
|
|
||||||
|
# Google Gemini Safety Settings
|
||||||
|
# NOTE (Vertex AI): You do not have access to the BLOCK_NONE setting by default.
|
||||||
|
# To use this restricted HarmBlockThreshold setting, you will need to either:
|
||||||
|
#
|
||||||
|
# (a) Get access through an allowlist via your Google account team
|
||||||
|
# (b) Switch your account type to monthly invoiced billing following this instruction:
|
||||||
|
# https://cloud.google.com/billing/docs/how-to/invoiced-billing
|
||||||
|
#
|
||||||
|
# GOOGLE_SAFETY_SEXUALLY_EXPLICIT=BLOCK_ONLY_HIGH
|
||||||
|
# GOOGLE_SAFETY_HATE_SPEECH=BLOCK_ONLY_HIGH
|
||||||
|
# GOOGLE_SAFETY_HARASSMENT=BLOCK_ONLY_HIGH
|
||||||
|
# GOOGLE_SAFETY_DANGEROUS_CONTENT=BLOCK_ONLY_HIGH
|
||||||
|
|
||||||
|
|
||||||
#============#
|
#============#
|
||||||
# OpenAI #
|
# OpenAI #
|
||||||
#============#
|
#============#
|
||||||
|
|
||||||
OPENAI_API_KEY=user_provided
|
OPENAI_API_KEY=user_provided
|
||||||
# OPENAI_MODELS=gpt-3.5-turbo-0125,gpt-3.5-turbo-0301,gpt-3.5-turbo,gpt-4,gpt-4-0613,gpt-4-vision-preview,gpt-3.5-turbo-0613,gpt-3.5-turbo-16k-0613,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-instruct-0914,gpt-3.5-turbo-16k
|
# OPENAI_MODELS=gpt-4o,gpt-3.5-turbo-0125,gpt-3.5-turbo-0301,gpt-3.5-turbo,gpt-4,gpt-4-0613,gpt-4-vision-preview,gpt-3.5-turbo-0613,gpt-3.5-turbo-16k-0613,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-instruct-0914,gpt-3.5-turbo-16k
|
||||||
|
|
||||||
DEBUG_OPENAI=false
|
DEBUG_OPENAI=false
|
||||||
|
|
||||||
@@ -143,7 +166,17 @@ DEBUG_OPENAI=false
|
|||||||
|
|
||||||
ASSISTANTS_API_KEY=user_provided
|
ASSISTANTS_API_KEY=user_provided
|
||||||
# ASSISTANTS_BASE_URL=
|
# ASSISTANTS_BASE_URL=
|
||||||
# ASSISTANTS_MODELS=gpt-3.5-turbo-0125,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-16k,gpt-3.5-turbo,gpt-4,gpt-4-0314,gpt-4-32k-0314,gpt-4-0613,gpt-3.5-turbo-0613,gpt-3.5-turbo-1106,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview
|
# ASSISTANTS_MODELS=gpt-4o,gpt-3.5-turbo-0125,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-16k,gpt-3.5-turbo,gpt-4,gpt-4-0314,gpt-4-32k-0314,gpt-4-0613,gpt-3.5-turbo-0613,gpt-3.5-turbo-1106,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview
|
||||||
|
|
||||||
|
#==========================#
|
||||||
|
# Azure Assistants API #
|
||||||
|
#==========================#
|
||||||
|
|
||||||
|
# Note: You should map your credentials with custom variables according to your Azure OpenAI Configuration
|
||||||
|
# The models for Azure Assistants are also determined by your Azure OpenAI configuration.
|
||||||
|
|
||||||
|
# More info, including how to enable use of Assistants with Azure here:
|
||||||
|
# https://www.librechat.ai/docs/configuration/librechat_yaml/ai_endpoints/azure#using-assistants-with-azure
|
||||||
|
|
||||||
#============#
|
#============#
|
||||||
# OpenRouter #
|
# OpenRouter #
|
||||||
@@ -155,7 +188,7 @@ ASSISTANTS_API_KEY=user_provided
|
|||||||
# Plugins #
|
# Plugins #
|
||||||
#============#
|
#============#
|
||||||
|
|
||||||
# PLUGIN_MODELS=gpt-4,gpt-4-turbo-preview,gpt-4-0125-preview,gpt-4-1106-preview,gpt-4-0613,gpt-3.5-turbo,gpt-3.5-turbo-0125,gpt-3.5-turbo-1106,gpt-3.5-turbo-0613
|
# PLUGIN_MODELS=gpt-4o,gpt-4,gpt-4-turbo-preview,gpt-4-0125-preview,gpt-4-1106-preview,gpt-4-0613,gpt-3.5-turbo,gpt-3.5-turbo-0125,gpt-3.5-turbo-1106,gpt-3.5-turbo-0613
|
||||||
|
|
||||||
DEBUG_PLUGINS=true
|
DEBUG_PLUGINS=true
|
||||||
|
|
||||||
@@ -228,6 +261,14 @@ MEILI_NO_ANALYTICS=true
|
|||||||
MEILI_HOST=http://0.0.0.0:7700
|
MEILI_HOST=http://0.0.0.0:7700
|
||||||
MEILI_MASTER_KEY=DrhYf7zENyR6AlUCKmnz0eYASOQdl6zxH7s7MKFSfFCt
|
MEILI_MASTER_KEY=DrhYf7zENyR6AlUCKmnz0eYASOQdl6zxH7s7MKFSfFCt
|
||||||
|
|
||||||
|
|
||||||
|
#==================================================#
|
||||||
|
# Speech to Text & Text to Speech #
|
||||||
|
#==================================================#
|
||||||
|
|
||||||
|
STT_API_KEY=
|
||||||
|
TTS_API_KEY=
|
||||||
|
|
||||||
#===================================================#
|
#===================================================#
|
||||||
# User System #
|
# User System #
|
||||||
#===================================================#
|
#===================================================#
|
||||||
@@ -282,6 +323,9 @@ ALLOW_EMAIL_LOGIN=true
|
|||||||
ALLOW_REGISTRATION=true
|
ALLOW_REGISTRATION=true
|
||||||
ALLOW_SOCIAL_LOGIN=false
|
ALLOW_SOCIAL_LOGIN=false
|
||||||
ALLOW_SOCIAL_REGISTRATION=false
|
ALLOW_SOCIAL_REGISTRATION=false
|
||||||
|
ALLOW_PASSWORD_RESET=false
|
||||||
|
# ALLOW_ACCOUNT_DELETION=true # note: enabled by default if omitted/commented out
|
||||||
|
ALLOW_UNVERIFIED_EMAIL_LOGIN=true
|
||||||
|
|
||||||
SESSION_EXPIRY=1000 * 60 * 15
|
SESSION_EXPIRY=1000 * 60 * 15
|
||||||
REFRESH_TOKEN_EXPIRY=(1000 * 60 * 60 * 24) * 7
|
REFRESH_TOKEN_EXPIRY=(1000 * 60 * 60 * 24) * 7
|
||||||
@@ -323,6 +367,17 @@ OPENID_REQUIRED_ROLE_PARAMETER_PATH=
|
|||||||
OPENID_BUTTON_LABEL=
|
OPENID_BUTTON_LABEL=
|
||||||
OPENID_IMAGE_URL=
|
OPENID_IMAGE_URL=
|
||||||
|
|
||||||
|
# LDAP
|
||||||
|
LDAP_URL=
|
||||||
|
LDAP_BIND_DN=
|
||||||
|
LDAP_BIND_CREDENTIALS=
|
||||||
|
LDAP_USER_SEARCH_BASE=
|
||||||
|
LDAP_SEARCH_FILTER=mail={{username}}
|
||||||
|
LDAP_CA_CERT_PATH=
|
||||||
|
# LDAP_ID=
|
||||||
|
# LDAP_USERNAME=
|
||||||
|
# LDAP_FULL_NAME=
|
||||||
|
|
||||||
#========================#
|
#========================#
|
||||||
# Email Password Reset #
|
# Email Password Reset #
|
||||||
#========================#
|
#========================#
|
||||||
@@ -349,6 +404,13 @@ FIREBASE_STORAGE_BUCKET=
|
|||||||
FIREBASE_MESSAGING_SENDER_ID=
|
FIREBASE_MESSAGING_SENDER_ID=
|
||||||
FIREBASE_APP_ID=
|
FIREBASE_APP_ID=
|
||||||
|
|
||||||
|
#========================#
|
||||||
|
# Shared Links #
|
||||||
|
#========================#
|
||||||
|
|
||||||
|
ALLOW_SHARED_LINKS=true
|
||||||
|
ALLOW_SHARED_LINKS_PUBLIC=true
|
||||||
|
|
||||||
#===================================================#
|
#===================================================#
|
||||||
# UI #
|
# UI #
|
||||||
#===================================================#
|
#===================================================#
|
||||||
@@ -359,6 +421,9 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
|||||||
|
|
||||||
# SHOW_BIRTHDAY_ICON=true
|
# SHOW_BIRTHDAY_ICON=true
|
||||||
|
|
||||||
|
# Google tag manager id
|
||||||
|
#ANALYTICS_GTM_ID=user provided google tag manager id
|
||||||
|
|
||||||
#==================================================#
|
#==================================================#
|
||||||
# Others #
|
# Others #
|
||||||
#==================================================#
|
#==================================================#
|
||||||
|
|||||||
@@ -132,6 +132,13 @@ module.exports = {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
files: './config/translations/**/*.ts',
|
||||||
|
parser: '@typescript-eslint/parser',
|
||||||
|
parserOptions: {
|
||||||
|
project: './config/translations/tsconfig.json',
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
files: ['./packages/data-provider/specs/**/*.ts'],
|
files: ['./packages/data-provider/specs/**/*.ts'],
|
||||||
parserOptions: {
|
parserOptions: {
|
||||||
|
|||||||
12
.github/CONTRIBUTING.md
vendored
12
.github/CONTRIBUTING.md
vendored
@@ -126,6 +126,18 @@ Apply the following naming conventions to branches, labels, and other Git-relate
|
|||||||
|
|
||||||
- **Current Stance**: At present, this backend transition is of lower priority and might not be pursued.
|
- **Current Stance**: At present, this backend transition is of lower priority and might not be pursued.
|
||||||
|
|
||||||
|
## 7. Module Import Conventions
|
||||||
|
|
||||||
|
- `npm` packages first,
|
||||||
|
- from shortest line (top) to longest (bottom)
|
||||||
|
|
||||||
|
- Followed by typescript types (pertains to data-provider and client workspaces)
|
||||||
|
- longest line (top) to shortest (bottom)
|
||||||
|
- types from package come first
|
||||||
|
|
||||||
|
- Lastly, local imports
|
||||||
|
- longest line (top) to shortest (bottom)
|
||||||
|
- imports with alias `~` treated the same as relative import with respect to line length
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml
vendored
2
.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml
vendored
@@ -43,7 +43,7 @@ body:
|
|||||||
id: terms
|
id: terms
|
||||||
attributes:
|
attributes:
|
||||||
label: Code of Conduct
|
label: Code of Conduct
|
||||||
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/danny-avila/LibreChat/blob/main/CODE_OF_CONDUCT.md)
|
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/danny-avila/LibreChat/blob/main/.github/CODE_OF_CONDUCT.md)
|
||||||
options:
|
options:
|
||||||
- label: I agree to follow this project's Code of Conduct
|
- label: I agree to follow this project's Code of Conduct
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/QUESTION.yml
vendored
2
.github/ISSUE_TEMPLATE/QUESTION.yml
vendored
@@ -44,7 +44,7 @@ body:
|
|||||||
id: terms
|
id: terms
|
||||||
attributes:
|
attributes:
|
||||||
label: Code of Conduct
|
label: Code of Conduct
|
||||||
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/danny-avila/LibreChat/blob/main/CODE_OF_CONDUCT.md)
|
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/danny-avila/LibreChat/blob/main/.github/CODE_OF_CONDUCT.md)
|
||||||
options:
|
options:
|
||||||
- label: I agree to follow this project's Code of Conduct
|
- label: I agree to follow this project's Code of Conduct
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
9
.github/pull_request_template.md
vendored
9
.github/pull_request_template.md
vendored
@@ -1,7 +1,10 @@
|
|||||||
# Pull Request Template
|
# Pull Request Template
|
||||||
|
|
||||||
|
⚠️ Before Submitting a PR, Please Review:
|
||||||
|
- Please ensure that you have thoroughly read and understood the [Contributing Docs](https://github.com/danny-avila/LibreChat/blob/main/.github/CONTRIBUTING.md) before submitting your Pull Request.
|
||||||
|
|
||||||
### ⚠️ Before Submitting a PR, read the [Contributing Docs](https://github.com/danny-avila/LibreChat/blob/main/.github/CONTRIBUTING.md) in full!
|
⚠️ Documentation Updates Notice:
|
||||||
|
- Kindly note that documentation updates are managed in this repository: [librechat.ai](https://github.com/LibreChat-AI/librechat.ai)
|
||||||
|
|
||||||
## Summary
|
## Summary
|
||||||
|
|
||||||
@@ -16,8 +19,6 @@ Please delete any irrelevant options.
|
|||||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||||
- [ ] This change requires a documentation update
|
- [ ] This change requires a documentation update
|
||||||
- [ ] Translation update
|
- [ ] Translation update
|
||||||
- [ ] Documentation update
|
|
||||||
|
|
||||||
|
|
||||||
## Testing
|
## Testing
|
||||||
|
|
||||||
@@ -37,4 +38,4 @@ Please delete any irrelevant options.
|
|||||||
- [ ] I have written tests demonstrating that my changes are effective or that my feature works
|
- [ ] I have written tests demonstrating that my changes are effective or that my feature works
|
||||||
- [ ] Local unit tests pass with my changes
|
- [ ] Local unit tests pass with my changes
|
||||||
- [ ] Any changes dependent on mine have been merged and published in downstream modules.
|
- [ ] Any changes dependent on mine have been merged and published in downstream modules.
|
||||||
- [ ] New documents have been locally validated with mkdocs
|
- [ ] A pull request for updating the documentation has been submitted.
|
||||||
|
|||||||
36
.github/workflows/frontend-review.yml
vendored
36
.github/workflows/frontend-review.yml
vendored
@@ -1,11 +1,6 @@
|
|||||||
#github action to run unit tests for frontend with jest
|
|
||||||
name: Frontend Unit Tests
|
name: Frontend Unit Tests
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# push:
|
|
||||||
# branches:
|
|
||||||
# - main
|
|
||||||
# - dev
|
|
||||||
# - release/*
|
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
@@ -14,9 +9,10 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- 'client/**'
|
- 'client/**'
|
||||||
- 'packages/**'
|
- 'packages/**'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests_frontend:
|
tests_frontend_ubuntu:
|
||||||
name: Run frontend unit tests
|
name: Run frontend unit tests on Ubuntu
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@@ -35,4 +31,26 @@ jobs:
|
|||||||
|
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
run: npm run test:ci --verbose
|
run: npm run test:ci --verbose
|
||||||
working-directory: client
|
working-directory: client
|
||||||
|
|
||||||
|
tests_frontend_windows:
|
||||||
|
name: Run frontend unit tests on Windows
|
||||||
|
timeout-minutes: 60
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Use Node.js 20.x
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Build Client
|
||||||
|
run: npm run frontend:ci
|
||||||
|
|
||||||
|
- name: Run unit tests
|
||||||
|
run: npm run test:ci --verbose
|
||||||
|
working-directory: client
|
||||||
|
|||||||
27
.github/workflows/mkdocs.yaml
vendored
27
.github/workflows/mkdocs.yaml
vendored
@@ -1,27 +0,0 @@
|
|||||||
name: mkdocs
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: 3.x
|
|
||||||
- run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV
|
|
||||||
- uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
key: mkdocs-material-${{ env.cache_id }}
|
|
||||||
path: .cache
|
|
||||||
restore-keys: |
|
|
||||||
mkdocs-material-
|
|
||||||
- run: pip install mkdocs-material
|
|
||||||
- run: pip install mkdocs-nav-weight
|
|
||||||
- run: pip install mkdocs-publisher
|
|
||||||
- run: pip install mkdocs-exclude
|
|
||||||
- run: mkdocs gh-deploy --force
|
|
||||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -11,6 +11,7 @@ logs
|
|||||||
pids
|
pids
|
||||||
*.pid
|
*.pid
|
||||||
*.seed
|
*.seed
|
||||||
|
.git
|
||||||
|
|
||||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
lib-cov
|
lib-cov
|
||||||
@@ -21,6 +22,10 @@ coverage
|
|||||||
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
.grunt
|
.grunt
|
||||||
|
|
||||||
|
# translation services
|
||||||
|
config/translations/stores/*
|
||||||
|
client/src/localization/languages/*_missing_keys.json
|
||||||
|
|
||||||
# Compiled Dirs (http://nodejs.org/api/addons.html)
|
# Compiled Dirs (http://nodejs.org/api/addons.html)
|
||||||
build/
|
build/
|
||||||
dist/
|
dist/
|
||||||
@@ -41,6 +46,7 @@ api/node_modules/
|
|||||||
client/node_modules/
|
client/node_modules/
|
||||||
bower_components/
|
bower_components/
|
||||||
*.d.ts
|
*.d.ts
|
||||||
|
!vite-env.d.ts
|
||||||
|
|
||||||
# Floobits
|
# Floobits
|
||||||
.floo
|
.floo
|
||||||
@@ -69,6 +75,8 @@ src/style - official.css
|
|||||||
/playwright/.cache/
|
/playwright/.cache/
|
||||||
.DS_Store
|
.DS_Store
|
||||||
*.code-workspace
|
*.code-workspace
|
||||||
|
.idx
|
||||||
|
monospace.json
|
||||||
.idea
|
.idea
|
||||||
*.iml
|
*.iml
|
||||||
*.pem
|
*.pem
|
||||||
@@ -76,6 +84,7 @@ config.local.ts
|
|||||||
**/storageState.json
|
**/storageState.json
|
||||||
junit.xml
|
junit.xml
|
||||||
**/.venv/
|
**/.venv/
|
||||||
|
**/venv/
|
||||||
|
|
||||||
# docker override file
|
# docker override file
|
||||||
docker-compose.override.yaml
|
docker-compose.override.yaml
|
||||||
|
|||||||
32
Dockerfile
32
Dockerfile
@@ -1,10 +1,8 @@
|
|||||||
# v0.7.1
|
# v0.7.3
|
||||||
|
|
||||||
# Base node image
|
# Base node image
|
||||||
FROM node:18-alpine3.18 AS node
|
FROM node:20-alpine AS node
|
||||||
|
|
||||||
RUN apk add g++ make py3-pip
|
|
||||||
RUN npm install -g node-gyp
|
|
||||||
RUN apk --no-cache add curl
|
RUN apk --no-cache add curl
|
||||||
|
|
||||||
RUN mkdir -p /app && chown node:node /app
|
RUN mkdir -p /app && chown node:node /app
|
||||||
@@ -14,20 +12,20 @@ USER node
|
|||||||
|
|
||||||
COPY --chown=node:node . .
|
COPY --chown=node:node . .
|
||||||
|
|
||||||
# Allow mounting of these files, which have no default
|
RUN \
|
||||||
# values.
|
# Allow mounting of these files, which have no default
|
||||||
RUN touch .env
|
touch .env ; \
|
||||||
RUN npm config set fetch-retry-maxtimeout 600000
|
# Create directories for the volumes to inherit the correct permissions
|
||||||
RUN npm config set fetch-retries 5
|
mkdir -p /app/client/public/images /app/api/logs ; \
|
||||||
RUN npm config set fetch-retry-mintimeout 15000
|
npm config set fetch-retry-maxtimeout 600000 ; \
|
||||||
RUN npm install --no-audit
|
npm config set fetch-retries 5 ; \
|
||||||
|
npm config set fetch-retry-mintimeout 15000 ; \
|
||||||
|
npm install --no-audit; \
|
||||||
|
# React client build
|
||||||
|
NODE_OPTIONS="--max-old-space-size=2048" npm run frontend; \
|
||||||
|
npm prune --production; \
|
||||||
|
npm cache clean --force
|
||||||
|
|
||||||
# React client build
|
|
||||||
ENV NODE_OPTIONS="--max-old-space-size=2048"
|
|
||||||
RUN npm run frontend
|
|
||||||
|
|
||||||
# Create directories for the volumes to inherit
|
|
||||||
# the correct permissions
|
|
||||||
RUN mkdir -p /app/client/public/images /app/api/logs
|
RUN mkdir -p /app/client/public/images /app/api/logs
|
||||||
|
|
||||||
# Node API setup
|
# Node API setup
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# v0.7.1
|
# v0.7.3
|
||||||
|
|
||||||
# Build API, Client and Data Provider
|
# Build API, Client and Data Provider
|
||||||
FROM node:20-alpine AS base
|
FROM node:20-alpine AS base
|
||||||
@@ -7,32 +7,31 @@ FROM node:20-alpine AS base
|
|||||||
FROM base AS data-provider-build
|
FROM base AS data-provider-build
|
||||||
WORKDIR /app/packages/data-provider
|
WORKDIR /app/packages/data-provider
|
||||||
COPY ./packages/data-provider ./
|
COPY ./packages/data-provider ./
|
||||||
RUN npm install
|
RUN npm install; npm cache clean --force
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
RUN npm prune --production
|
||||||
|
|
||||||
# React client build
|
# React client build
|
||||||
FROM data-provider-build AS client-build
|
FROM base AS client-build
|
||||||
WORKDIR /app/client
|
WORKDIR /app/client
|
||||||
COPY ./client/package*.json ./
|
COPY ./client/package*.json ./
|
||||||
# Copy data-provider to client's node_modules
|
# Copy data-provider to client's node_modules
|
||||||
RUN mkdir -p /app/client/node_modules/librechat-data-provider/
|
COPY --from=data-provider-build /app/packages/data-provider/ /app/client/node_modules/librechat-data-provider/
|
||||||
RUN cp -R /app/packages/data-provider/* /app/client/node_modules/librechat-data-provider/
|
RUN npm install; npm cache clean --force
|
||||||
RUN npm install
|
|
||||||
COPY ./client/ ./
|
COPY ./client/ ./
|
||||||
ENV NODE_OPTIONS="--max-old-space-size=2048"
|
ENV NODE_OPTIONS="--max-old-space-size=2048"
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Node API setup
|
# Node API setup
|
||||||
FROM data-provider-build AS api-build
|
FROM base AS api-build
|
||||||
WORKDIR /app/api
|
WORKDIR /app/api
|
||||||
COPY api/package*.json ./
|
COPY api/package*.json ./
|
||||||
COPY api/ ./
|
COPY api/ ./
|
||||||
# Copy helper scripts
|
# Copy helper scripts
|
||||||
COPY config/ ./
|
COPY config/ ./
|
||||||
# Copy data-provider to API's node_modules
|
# Copy data-provider to API's node_modules
|
||||||
RUN mkdir -p /app/api/node_modules/librechat-data-provider/
|
COPY --from=data-provider-build /app/packages/data-provider/ /app/api/node_modules/librechat-data-provider/
|
||||||
RUN cp -R /app/packages/data-provider/* /app/api/node_modules/librechat-data-provider/
|
RUN npm install --include prod; npm cache clean --force
|
||||||
RUN npm install
|
|
||||||
COPY --from=client-build /app/client/dist /app/client/dist
|
COPY --from=client-build /app/client/dist /app/client/dist
|
||||||
EXPOSE 3080
|
EXPOSE 3080
|
||||||
ENV HOST=0.0.0.0
|
ENV HOST=0.0.0.0
|
||||||
|
|||||||
68
README.md
68
README.md
@@ -1,6 +1,6 @@
|
|||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://librechat.ai">
|
<a href="https://librechat.ai">
|
||||||
<img src="docs/assets/LibreChat.svg" height="256">
|
<img src="client/public/assets/logo.svg" height="256">
|
||||||
</a>
|
</a>
|
||||||
<h1 align="center">
|
<h1 align="center">
|
||||||
<a href="https://librechat.ai">LibreChat</a>
|
<a href="https://librechat.ai">LibreChat</a>
|
||||||
@@ -27,7 +27,7 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://railway.app/template/b5k2mn?referralCode=myKrVZ">
|
<a href="https://railway.app/template/b5k2mn?referralCode=HI9hWz">
|
||||||
<img src="https://railway.app/button.svg" alt="Deploy on Railway" height="30">
|
<img src="https://railway.app/button.svg" alt="Deploy on Railway" height="30">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://zeabur.com/templates/0X2ZY8">
|
<a href="https://zeabur.com/templates/0X2ZY8">
|
||||||
@@ -41,8 +41,16 @@
|
|||||||
# 📃 Features
|
# 📃 Features
|
||||||
|
|
||||||
- 🖥️ UI matching ChatGPT, including Dark mode, Streaming, and latest updates
|
- 🖥️ UI matching ChatGPT, including Dark mode, Streaming, and latest updates
|
||||||
|
- 🤖 AI model selection:
|
||||||
|
- OpenAI, Azure OpenAI, BingAI, ChatGPT, Google Vertex AI, Anthropic (Claude), Plugins, Assistants API (including Azure Assistants)
|
||||||
|
- ✅ Compatible across both **[Remote & Local AI services](https://www.librechat.ai/docs/configuration/librechat_yaml/ai_endpoints):**
|
||||||
|
- groq, Ollama, Cohere, Mistral AI, Apple MLX, koboldcpp, OpenRouter, together.ai, Perplexity, ShuttleAI, and more
|
||||||
|
- 💾 Create, Save, & Share Custom Presets
|
||||||
|
- 🔀 Switch between AI Endpoints and Presets, mid-chat
|
||||||
|
- 🔄 Edit, Resubmit, and Continue Messages with Conversation branching
|
||||||
|
- 🌿 Fork Messages & Conversations for Advanced Context control
|
||||||
- 💬 Multimodal Chat:
|
- 💬 Multimodal Chat:
|
||||||
- Upload and analyze images with Claude 3, GPT-4, and Gemini Vision 📸
|
- Upload and analyze images with Claude 3, GPT-4 (including `gpt-4o`), and Gemini Vision 📸
|
||||||
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, & Google. 🗃️
|
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, & Google. 🗃️
|
||||||
- Advanced Agents with Files, Code Interpreter, Tools, and API Actions 🔦
|
- Advanced Agents with Files, Code Interpreter, Tools, and API Actions 🔦
|
||||||
- Available through the [OpenAI Assistants API](https://platform.openai.com/docs/assistants/overview) 🌤️
|
- Available through the [OpenAI Assistants API](https://platform.openai.com/docs/assistants/overview) 🌤️
|
||||||
@@ -50,18 +58,22 @@
|
|||||||
- 🌎 Multilingual UI:
|
- 🌎 Multilingual UI:
|
||||||
- English, 中文, Deutsch, Español, Français, Italiano, Polski, Português Brasileiro,
|
- English, 中文, Deutsch, Español, Français, Italiano, Polski, Português Brasileiro,
|
||||||
- Русский, 日本語, Svenska, 한국어, Tiếng Việt, 繁體中文, العربية, Türkçe, Nederlands, עברית
|
- Русский, 日本語, Svenska, 한국어, Tiếng Việt, 繁體中文, العربية, Türkçe, Nederlands, עברית
|
||||||
- 🤖 AI model selection: OpenAI, Azure OpenAI, BingAI, ChatGPT, Google Vertex AI, Anthropic (Claude), Plugins, Assistants API (including Azure Assistants)
|
- 🎨 Customizable Dropdown & Interface: Adapts to both power users and newcomers
|
||||||
- 💾 Create, Save, & Share Custom Presets
|
- 📧 Verify your email to ensure secure access
|
||||||
- 🔄 Edit, Resubmit, and Continue messages with conversation branching
|
- 🗣️ Chat hands-free with Speech-to-Text and Text-to-Speech magic
|
||||||
- 📤 Export conversations as screenshots, markdown, text, json.
|
- Automatically send and play Audio
|
||||||
|
- Supports OpenAI, Azure OpenAI, and Elevenlabs
|
||||||
|
- 📥 Import Conversations from LibreChat, ChatGPT, Chatbot UI
|
||||||
|
- 📤 Export conversations as screenshots, markdown, text, json
|
||||||
- 🔍 Search all messages/conversations
|
- 🔍 Search all messages/conversations
|
||||||
- 🔌 Plugins, including web access, image generation with DALL-E-3 and more
|
- 🔌 Plugins, including web access, image generation with DALL-E-3 and more
|
||||||
- 👥 Multi-User, Secure Authentication with Moderation and Token spend tools
|
- 👥 Multi-User, Secure Authentication with Moderation and Token spend tools
|
||||||
- ⚙️ Configure Proxy, Reverse Proxy, Docker, & many Deployment options
|
- ⚙️ Configure Proxy, Reverse Proxy, Docker, & many Deployment options:
|
||||||
|
- Use completely local or deploy on the cloud
|
||||||
- 📖 Completely Open-Source & Built in Public
|
- 📖 Completely Open-Source & Built in Public
|
||||||
- 🧑🤝🧑 Community-driven development, support, and feedback
|
- 🧑🤝🧑 Community-driven development, support, and feedback
|
||||||
|
|
||||||
[For a thorough review of our features, see our docs here](https://docs.librechat.ai/features/plugins/introduction.html) 📚
|
[For a thorough review of our features, see our docs here](https://docs.librechat.ai/) 📚
|
||||||
|
|
||||||
## 🪶 All-In-One AI Conversations with LibreChat
|
## 🪶 All-In-One AI Conversations with LibreChat
|
||||||
|
|
||||||
@@ -69,37 +81,49 @@ LibreChat brings together the future of assistant AIs with the revolutionary tec
|
|||||||
|
|
||||||
With LibreChat, you no longer need to opt for ChatGPT Plus and can instead use free or pay-per-call APIs. We welcome contributions, cloning, and forking to enhance the capabilities of this advanced chatbot platform.
|
With LibreChat, you no longer need to opt for ChatGPT Plus and can instead use free or pay-per-call APIs. We welcome contributions, cloning, and forking to enhance the capabilities of this advanced chatbot platform.
|
||||||
|
|
||||||
<!-- https://github.com/danny-avila/LibreChat/assets/110412045/c1eb0c0f-41f6-4335-b982-84b278b53d59 -->
|
[](https://www.youtube.com/watch?v=bSVHEbVPNl4)
|
||||||
|
|
||||||
[](https://youtu.be/pNIOs1ovsXw)
|
|
||||||
Click on the thumbnail to open the video☝️
|
Click on the thumbnail to open the video☝️
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 📚 Documentation
|
## 🌐 Resources
|
||||||
|
|
||||||
For more information on how to use our advanced features, install and configure our software, and access our guidelines and tutorials, please check out our documentation at [docs.librechat.ai](https://docs.librechat.ai)
|
**GitHub Repo:**
|
||||||
|
- **RAG API:** [github.com/danny-avila/rag_api](https://github.com/danny-avila/rag_api)
|
||||||
|
- **Website:** [github.com/LibreChat-AI/librechat.ai](https://github.com/LibreChat-AI/librechat.ai)
|
||||||
|
|
||||||
|
**Other:**
|
||||||
|
- **Website:** [librechat.ai](https://librechat.ai)
|
||||||
|
- **Documentation:** [docs.librechat.ai](https://docs.librechat.ai)
|
||||||
|
- **Blog:** [blog.librechat.ai](https://docs.librechat.ai)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 📝 Changelog
|
## 📝 Changelog
|
||||||
|
|
||||||
Keep up with the latest updates by visiting the releases page - [Releases](https://github.com/danny-avila/LibreChat/releases)
|
Keep up with the latest updates by visiting the releases page and notes:
|
||||||
|
- [Releases](https://github.com/danny-avila/LibreChat/releases)
|
||||||
|
- [Changelog](https://www.librechat.ai/changelog)
|
||||||
|
|
||||||
**⚠️ [Breaking Changes](docs/general_info/breaking_changes.md)**
|
**⚠️ Please consult the [changelog](https://www.librechat.ai/changelog) for breaking changes before updating.**
|
||||||
Please consult the breaking changes before updating.
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## ⭐ Star History
|
## ⭐ Star History
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://trendshift.io/repositories/4685" target="_blank"><img src="https://trendshift.io/api/badge/repositories/4685" alt="danny-avila%2FLibreChat | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
<a href="https://star-history.com/#danny-avila/LibreChat&Date">
|
||||||
|
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=danny-avila/LibreChat&type=Date&theme=dark" onerror="this.src='https://api.star-history.com/svg?repos=danny-avila/LibreChat&type=Date'" />
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://trendshift.io/repositories/4685" target="_blank" style="padding: 10px;">
|
||||||
|
<img src="https://trendshift.io/api/badge/repositories/4685" alt="danny-avila%2FLibreChat | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/>
|
||||||
|
</a>
|
||||||
|
<a href="https://runacap.com/ross-index/q1-24/" target="_blank" rel="noopener" style="margin-left: 20px;">
|
||||||
|
<img style="width: 260px; height: 56px" src="https://runacap.com/wp-content/uploads/2024/04/ROSS_badge_white_Q1_2024.svg" alt="ROSS Index - Fastest Growing Open-Source Startups in Q1 2024 | Runa Capital" width="260" height="56"/>
|
||||||
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<a href="https://star-history.com/#danny-avila/LibreChat&Date">
|
|
||||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=danny-avila/LibreChat&type=Date&theme=dark" onerror="this.src='https://api.star-history.com/svg?repos=danny-avila/LibreChat&type=Date'" />
|
|
||||||
</a>
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
const Anthropic = require('@anthropic-ai/sdk');
|
const Anthropic = require('@anthropic-ai/sdk');
|
||||||
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
||||||
const {
|
const {
|
||||||
getResponseSender,
|
getResponseSender,
|
||||||
@@ -7,10 +8,10 @@ const {
|
|||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||||
const {
|
const {
|
||||||
titleFunctionPrompt,
|
|
||||||
parseTitleFromPrompt,
|
|
||||||
truncateText,
|
truncateText,
|
||||||
formatMessage,
|
formatMessage,
|
||||||
|
titleFunctionPrompt,
|
||||||
|
parseParamFromPrompt,
|
||||||
createContextHandlers,
|
createContextHandlers,
|
||||||
} = require('./prompts');
|
} = require('./prompts');
|
||||||
const spendTokens = require('~/models/spendTokens');
|
const spendTokens = require('~/models/spendTokens');
|
||||||
@@ -75,7 +76,9 @@ class AnthropicClient extends BaseClient {
|
|||||||
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
|
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
|
||||||
|
|
||||||
this.maxContextTokens =
|
this.maxContextTokens =
|
||||||
getModelMaxTokens(this.modelOptions.model, EModelEndpoint.anthropic) ?? 100000;
|
this.options.maxContextTokens ??
|
||||||
|
getModelMaxTokens(this.modelOptions.model, EModelEndpoint.anthropic) ??
|
||||||
|
100000;
|
||||||
this.maxResponseTokens = this.modelOptions.maxOutputTokens || 1500;
|
this.maxResponseTokens = this.modelOptions.maxOutputTokens || 1500;
|
||||||
this.maxPromptTokens =
|
this.maxPromptTokens =
|
||||||
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
|
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
|
||||||
@@ -121,9 +124,14 @@ class AnthropicClient extends BaseClient {
|
|||||||
getClient() {
|
getClient() {
|
||||||
/** @type {Anthropic.default.RequestOptions} */
|
/** @type {Anthropic.default.RequestOptions} */
|
||||||
const options = {
|
const options = {
|
||||||
|
fetch: this.fetch,
|
||||||
apiKey: this.apiKey,
|
apiKey: this.apiKey,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (this.options.proxy) {
|
||||||
|
options.httpAgent = new HttpsProxyAgent(this.options.proxy);
|
||||||
|
}
|
||||||
|
|
||||||
if (this.options.reverseProxyUrl) {
|
if (this.options.reverseProxyUrl) {
|
||||||
options.baseURL = this.options.reverseProxyUrl;
|
options.baseURL = this.options.reverseProxyUrl;
|
||||||
}
|
}
|
||||||
@@ -652,9 +660,13 @@ class AnthropicClient extends BaseClient {
|
|||||||
|
|
||||||
getSaveOptions() {
|
getSaveOptions() {
|
||||||
return {
|
return {
|
||||||
|
maxContextTokens: this.options.maxContextTokens,
|
||||||
promptPrefix: this.options.promptPrefix,
|
promptPrefix: this.options.promptPrefix,
|
||||||
modelLabel: this.options.modelLabel,
|
modelLabel: this.options.modelLabel,
|
||||||
resendFiles: this.options.resendFiles,
|
resendFiles: this.options.resendFiles,
|
||||||
|
iconURL: this.options.iconURL,
|
||||||
|
greeting: this.options.greeting,
|
||||||
|
spec: this.options.spec,
|
||||||
...this.modelOptions,
|
...this.modelOptions,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -742,7 +754,7 @@ class AnthropicClient extends BaseClient {
|
|||||||
context: 'title',
|
context: 'title',
|
||||||
});
|
});
|
||||||
const text = response.content[0].text;
|
const text = response.content[0].text;
|
||||||
title = parseTitleFromPrompt(text);
|
title = parseParamFromPrompt(text, 'title');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.error('[AnthropicClient] There was an issue generating the title', e);
|
logger.error('[AnthropicClient] There was an issue generating the title', e);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
|
const fetch = require('node-fetch');
|
||||||
const { supportsBalanceCheck, Constants } = require('librechat-data-provider');
|
const { supportsBalanceCheck, Constants } = require('librechat-data-provider');
|
||||||
const { getConvo, getMessages, saveMessage, updateMessage, saveConvo } = require('~/models');
|
const { getMessages, saveMessage, updateMessage, saveConvo } = require('~/models');
|
||||||
const { addSpaceIfNeeded, isEnabled } = require('~/server/utils');
|
const { addSpaceIfNeeded, isEnabled } = require('~/server/utils');
|
||||||
const checkBalance = require('~/models/checkBalance');
|
const checkBalance = require('~/models/checkBalance');
|
||||||
const { getFiles } = require('~/models/File');
|
const { getFiles } = require('~/models/File');
|
||||||
@@ -17,6 +18,15 @@ class BaseClient {
|
|||||||
month: 'long',
|
month: 'long',
|
||||||
day: 'numeric',
|
day: 'numeric',
|
||||||
});
|
});
|
||||||
|
this.fetch = this.fetch.bind(this);
|
||||||
|
/** @type {boolean} */
|
||||||
|
this.skipSaveConvo = false;
|
||||||
|
/** @type {boolean} */
|
||||||
|
this.skipSaveUserMessage = false;
|
||||||
|
/** @type {ClientDatabaseSavePromise} */
|
||||||
|
this.userMessagePromise;
|
||||||
|
/** @type {ClientDatabaseSavePromise} */
|
||||||
|
this.responsePromise;
|
||||||
}
|
}
|
||||||
|
|
||||||
setOptions() {
|
setOptions() {
|
||||||
@@ -54,6 +64,25 @@ class BaseClient {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Makes an HTTP request and logs the process.
|
||||||
|
*
|
||||||
|
* @param {RequestInfo} url - The URL to make the request to. Can be a string or a Request object.
|
||||||
|
* @param {RequestInit} [init] - Optional init options for the request.
|
||||||
|
* @returns {Promise<Response>} - A promise that resolves to the response of the fetch request.
|
||||||
|
*/
|
||||||
|
async fetch(_url, init) {
|
||||||
|
let url = _url;
|
||||||
|
if (this.options.directEndpoint) {
|
||||||
|
url = this.options.reverseProxyUrl;
|
||||||
|
}
|
||||||
|
logger.debug(`Making request to ${url}`);
|
||||||
|
if (typeof Bun !== 'undefined') {
|
||||||
|
return await fetch(url, init);
|
||||||
|
}
|
||||||
|
return await fetch(url, init);
|
||||||
|
}
|
||||||
|
|
||||||
getBuildMessagesOptions() {
|
getBuildMessagesOptions() {
|
||||||
throw new Error('Subclasses must implement getBuildMessagesOptions');
|
throw new Error('Subclasses must implement getBuildMessagesOptions');
|
||||||
}
|
}
|
||||||
@@ -63,19 +92,45 @@ class BaseClient {
|
|||||||
await stream.processTextStream(onProgress);
|
await stream.processTextStream(onProgress);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns {[string|undefined, string|undefined]}
|
||||||
|
*/
|
||||||
|
processOverideIds() {
|
||||||
|
/** @type {Record<string, string | undefined>} */
|
||||||
|
let { overrideConvoId, overrideUserMessageId } = this.options?.req?.body ?? {};
|
||||||
|
if (overrideConvoId) {
|
||||||
|
const [conversationId, index] = overrideConvoId.split(Constants.COMMON_DIVIDER);
|
||||||
|
overrideConvoId = conversationId;
|
||||||
|
if (index !== '0') {
|
||||||
|
this.skipSaveConvo = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (overrideUserMessageId) {
|
||||||
|
const [userMessageId, index] = overrideUserMessageId.split(Constants.COMMON_DIVIDER);
|
||||||
|
overrideUserMessageId = userMessageId;
|
||||||
|
if (index !== '0') {
|
||||||
|
this.skipSaveUserMessage = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [overrideConvoId, overrideUserMessageId];
|
||||||
|
}
|
||||||
|
|
||||||
async setMessageOptions(opts = {}) {
|
async setMessageOptions(opts = {}) {
|
||||||
if (opts && opts.replaceOptions) {
|
if (opts && opts.replaceOptions) {
|
||||||
this.setOptions(opts);
|
this.setOptions(opts);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const [overrideConvoId, overrideUserMessageId] = this.processOverideIds();
|
||||||
const { isEdited, isContinued } = opts;
|
const { isEdited, isContinued } = opts;
|
||||||
const user = opts.user ?? null;
|
const user = opts.user ?? null;
|
||||||
this.user = user;
|
this.user = user;
|
||||||
const saveOptions = this.getSaveOptions();
|
const saveOptions = this.getSaveOptions();
|
||||||
this.abortController = opts.abortController ?? new AbortController();
|
this.abortController = opts.abortController ?? new AbortController();
|
||||||
const conversationId = opts.conversationId ?? crypto.randomUUID();
|
const conversationId = overrideConvoId ?? opts.conversationId ?? crypto.randomUUID();
|
||||||
const parentMessageId = opts.parentMessageId ?? Constants.NO_PARENT;
|
const parentMessageId = opts.parentMessageId ?? Constants.NO_PARENT;
|
||||||
const userMessageId = opts.overrideParentMessageId ?? crypto.randomUUID();
|
const userMessageId =
|
||||||
|
overrideUserMessageId ?? opts.overrideParentMessageId ?? crypto.randomUUID();
|
||||||
let responseMessageId = opts.responseMessageId ?? crypto.randomUUID();
|
let responseMessageId = opts.responseMessageId ?? crypto.randomUUID();
|
||||||
let head = isEdited ? responseMessageId : parentMessageId;
|
let head = isEdited ? responseMessageId : parentMessageId;
|
||||||
this.currentMessages = (await this.loadHistory(conversationId, head)) ?? [];
|
this.currentMessages = (await this.loadHistory(conversationId, head)) ?? [];
|
||||||
@@ -139,7 +194,7 @@ class BaseClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (typeof opts?.onStart === 'function') {
|
if (typeof opts?.onStart === 'function') {
|
||||||
opts.onStart(userMessage);
|
opts.onStart(userMessage, responseMessageId);
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -373,6 +428,14 @@ class BaseClient {
|
|||||||
const { user, head, isEdited, conversationId, responseMessageId, saveOptions, userMessage } =
|
const { user, head, isEdited, conversationId, responseMessageId, saveOptions, userMessage } =
|
||||||
await this.handleStartMethods(message, opts);
|
await this.handleStartMethods(message, opts);
|
||||||
|
|
||||||
|
if (opts.progressCallback) {
|
||||||
|
opts.onProgress = opts.progressCallback.call(null, {
|
||||||
|
...(opts.progressOptions ?? {}),
|
||||||
|
parentMessageId: userMessage.messageId,
|
||||||
|
messageId: responseMessageId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const { generation = '' } = opts;
|
const { generation = '' } = opts;
|
||||||
|
|
||||||
// It's not necessary to push to currentMessages
|
// It's not necessary to push to currentMessages
|
||||||
@@ -421,8 +484,13 @@ class BaseClient {
|
|||||||
this.handleTokenCountMap(tokenCountMap);
|
this.handleTokenCountMap(tokenCountMap);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!isEdited) {
|
if (!isEdited && !this.skipSaveUserMessage) {
|
||||||
await this.saveMessageToDatabase(userMessage, saveOptions, user);
|
this.userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user);
|
||||||
|
if (typeof opts?.getReqData === 'function') {
|
||||||
|
opts.getReqData({
|
||||||
|
userMessagePromise: this.userMessagePromise,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
@@ -456,6 +524,8 @@ class BaseClient {
|
|||||||
sender: this.sender,
|
sender: this.sender,
|
||||||
text: addSpaceIfNeeded(generation) + completion,
|
text: addSpaceIfNeeded(generation) + completion,
|
||||||
promptTokens,
|
promptTokens,
|
||||||
|
iconURL: this.options.iconURL,
|
||||||
|
endpoint: this.options.endpoint,
|
||||||
...(this.metadata ?? {}),
|
...(this.metadata ?? {}),
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -469,15 +539,11 @@ class BaseClient {
|
|||||||
const completionTokens = this.getTokenCount(completion);
|
const completionTokens = this.getTokenCount(completion);
|
||||||
await this.recordTokenUsage({ promptTokens, completionTokens });
|
await this.recordTokenUsage({ promptTokens, completionTokens });
|
||||||
}
|
}
|
||||||
await this.saveMessageToDatabase(responseMessage, saveOptions, user);
|
this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
|
||||||
delete responseMessage.tokenCount;
|
delete responseMessage.tokenCount;
|
||||||
return responseMessage;
|
return responseMessage;
|
||||||
}
|
}
|
||||||
|
|
||||||
async getConversation(conversationId, user = null) {
|
|
||||||
return await getConvo(user, conversationId);
|
|
||||||
}
|
|
||||||
|
|
||||||
async loadHistory(conversationId, parentMessageId = null) {
|
async loadHistory(conversationId, parentMessageId = null) {
|
||||||
logger.debug('[BaseClient] Loading history:', { conversationId, parentMessageId });
|
logger.debug('[BaseClient] Loading history:', { conversationId, parentMessageId });
|
||||||
|
|
||||||
@@ -525,14 +591,31 @@ class BaseClient {
|
|||||||
return _messages;
|
return _messages;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save a message to the database.
|
||||||
|
* @param {TMessage} message
|
||||||
|
* @param {Partial<TConversation>} endpointOptions
|
||||||
|
* @param {string | null} user
|
||||||
|
*/
|
||||||
async saveMessageToDatabase(message, endpointOptions, user = null) {
|
async saveMessageToDatabase(message, endpointOptions, user = null) {
|
||||||
await saveMessage({ ...message, endpoint: this.options.endpoint, user, unfinished: false });
|
const savedMessage = await saveMessage({
|
||||||
await saveConvo(user, {
|
...message,
|
||||||
|
endpoint: this.options.endpoint,
|
||||||
|
unfinished: false,
|
||||||
|
user,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (this.skipSaveConvo) {
|
||||||
|
return { message: savedMessage };
|
||||||
|
}
|
||||||
|
const conversation = await saveConvo(user, {
|
||||||
conversationId: message.conversationId,
|
conversationId: message.conversationId,
|
||||||
endpoint: this.options.endpoint,
|
endpoint: this.options.endpoint,
|
||||||
endpointType: this.options.endpointType,
|
endpointType: this.options.endpointType,
|
||||||
...endpointOptions,
|
...endpointOptions,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return { message: savedMessage, conversation };
|
||||||
}
|
}
|
||||||
|
|
||||||
async updateMessageInDatabase(message) {
|
async updateMessageInDatabase(message) {
|
||||||
@@ -556,11 +639,11 @@ class BaseClient {
|
|||||||
* the message is considered a root message.
|
* the message is considered a root message.
|
||||||
*
|
*
|
||||||
* @param {Object} options - The options for the function.
|
* @param {Object} options - The options for the function.
|
||||||
* @param {Array} options.messages - An array of message objects. Each object should have either an 'id' or 'messageId' property, and may have a 'parentMessageId' property.
|
* @param {TMessage[]} options.messages - An array of message objects. Each object should have either an 'id' or 'messageId' property, and may have a 'parentMessageId' property.
|
||||||
* @param {string} options.parentMessageId - The ID of the parent message to start the traversal from.
|
* @param {string} options.parentMessageId - The ID of the parent message to start the traversal from.
|
||||||
* @param {Function} [options.mapMethod] - An optional function to map over the ordered messages. If provided, it will be applied to each message in the resulting array.
|
* @param {Function} [options.mapMethod] - An optional function to map over the ordered messages. If provided, it will be applied to each message in the resulting array.
|
||||||
* @param {boolean} [options.summary=false] - If set to true, the traversal modifies messages with 'summary' and 'summaryTokenCount' properties and stops at the message with a 'summary' property.
|
* @param {boolean} [options.summary=false] - If set to true, the traversal modifies messages with 'summary' and 'summaryTokenCount' properties and stops at the message with a 'summary' property.
|
||||||
* @returns {Array} An array containing the messages in the order they should be displayed, starting with the most recent message with a 'summary' property if the 'summary' option is true, and ending with the message identified by 'parentMessageId'.
|
* @returns {TMessage[]} An array containing the messages in the order they should be displayed, starting with the most recent message with a 'summary' property if the 'summary' option is true, and ending with the message identified by 'parentMessageId'.
|
||||||
*/
|
*/
|
||||||
static getMessagesForConversation({
|
static getMessagesForConversation({
|
||||||
messages,
|
messages,
|
||||||
|
|||||||
@@ -438,9 +438,17 @@ class ChatGPTClient extends BaseClient {
|
|||||||
|
|
||||||
if (message.eventType === 'text-generation' && message.text) {
|
if (message.eventType === 'text-generation' && message.text) {
|
||||||
onTokenProgress(message.text);
|
onTokenProgress(message.text);
|
||||||
} else if (message.eventType === 'stream-end' && message.response) {
|
reply += message.text;
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
Cohere API Chinese Unicode character replacement hotfix.
|
||||||
|
Should be un-commented when the following issue is resolved:
|
||||||
|
https://github.com/cohere-ai/cohere-typescript/issues/151
|
||||||
|
|
||||||
|
else if (message.eventType === 'stream-end' && message.response) {
|
||||||
reply = message.response.text;
|
reply = message.response.text;
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
}
|
}
|
||||||
|
|
||||||
return reply;
|
return reply;
|
||||||
|
|||||||
@@ -16,10 +16,15 @@ const {
|
|||||||
AuthKeys,
|
AuthKeys,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { encodeAndFormat } = require('~/server/services/Files/images');
|
const { encodeAndFormat } = require('~/server/services/Files/images');
|
||||||
const { formatMessage, createContextHandlers } = require('./prompts');
|
|
||||||
const { getModelMaxTokens } = require('~/utils');
|
const { getModelMaxTokens } = require('~/utils');
|
||||||
const BaseClient = require('./BaseClient');
|
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
const {
|
||||||
|
formatMessage,
|
||||||
|
createContextHandlers,
|
||||||
|
titleInstruction,
|
||||||
|
truncateText,
|
||||||
|
} = require('./prompts');
|
||||||
|
const BaseClient = require('./BaseClient');
|
||||||
|
|
||||||
const loc = 'us-central1';
|
const loc = 'us-central1';
|
||||||
const publisher = 'google';
|
const publisher = 'google';
|
||||||
@@ -138,7 +143,10 @@ class GoogleClient extends BaseClient {
|
|||||||
!isGenerativeModel && !isChatModel && /code|text/.test(this.modelOptions.model);
|
!isGenerativeModel && !isChatModel && /code|text/.test(this.modelOptions.model);
|
||||||
const { isTextModel } = this;
|
const { isTextModel } = this;
|
||||||
|
|
||||||
this.maxContextTokens = getModelMaxTokens(this.modelOptions.model, EModelEndpoint.google);
|
this.maxContextTokens =
|
||||||
|
this.options.maxContextTokens ??
|
||||||
|
getModelMaxTokens(this.modelOptions.model, EModelEndpoint.google);
|
||||||
|
|
||||||
// The max prompt tokens is determined by the max context tokens minus the max response tokens.
|
// The max prompt tokens is determined by the max context tokens minus the max response tokens.
|
||||||
// Earlier messages will be dropped until the prompt is within the limit.
|
// Earlier messages will be dropped until the prompt is within the limit.
|
||||||
this.maxResponseTokens = this.modelOptions.maxOutputTokens || settings.maxOutputTokens.default;
|
this.maxResponseTokens = this.modelOptions.maxOutputTokens || settings.maxOutputTokens.default;
|
||||||
@@ -588,12 +596,16 @@ class GoogleClient extends BaseClient {
|
|||||||
createLLM(clientOptions) {
|
createLLM(clientOptions) {
|
||||||
const model = clientOptions.modelName ?? clientOptions.model;
|
const model = clientOptions.modelName ?? clientOptions.model;
|
||||||
if (this.project_id && this.isTextModel) {
|
if (this.project_id && this.isTextModel) {
|
||||||
|
logger.debug('Creating Google VertexAI client');
|
||||||
return new GoogleVertexAI(clientOptions);
|
return new GoogleVertexAI(clientOptions);
|
||||||
} else if (this.project_id && this.isChatModel) {
|
} else if (this.project_id && this.isChatModel) {
|
||||||
|
logger.debug('Creating Chat Google VertexAI client');
|
||||||
return new ChatGoogleVertexAI(clientOptions);
|
return new ChatGoogleVertexAI(clientOptions);
|
||||||
} else if (this.project_id) {
|
} else if (this.project_id) {
|
||||||
|
logger.debug('Creating VertexAI client');
|
||||||
return new ChatVertexAI(clientOptions);
|
return new ChatVertexAI(clientOptions);
|
||||||
} else if (model.includes('1.5')) {
|
} else if (model.includes('1.5')) {
|
||||||
|
logger.debug('Creating GenAI client');
|
||||||
return new GenAI(this.apiKey).getGenerativeModel(
|
return new GenAI(this.apiKey).getGenerativeModel(
|
||||||
{
|
{
|
||||||
...clientOptions,
|
...clientOptions,
|
||||||
@@ -603,6 +615,7 @@ class GoogleClient extends BaseClient {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.debug('Creating Chat Google Generative AI client');
|
||||||
return new ChatGoogleGenerativeAI({ ...clientOptions, apiKey: this.apiKey });
|
return new ChatGoogleGenerativeAI({ ...clientOptions, apiKey: this.apiKey });
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -677,26 +690,36 @@ class GoogleClient extends BaseClient {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const safetySettings = _payload.safetySettings;
|
||||||
|
requestOptions.safetySettings = safetySettings;
|
||||||
|
|
||||||
|
const delay = modelName.includes('flash') ? 8 : 14;
|
||||||
const result = await client.generateContentStream(requestOptions);
|
const result = await client.generateContentStream(requestOptions);
|
||||||
for await (const chunk of result.stream) {
|
for await (const chunk of result.stream) {
|
||||||
const chunkText = chunk.text();
|
const chunkText = chunk.text();
|
||||||
this.generateTextStream(chunkText, onProgress, {
|
await this.generateTextStream(chunkText, onProgress, {
|
||||||
delay: 12,
|
delay,
|
||||||
});
|
});
|
||||||
reply += chunkText;
|
reply += chunkText;
|
||||||
}
|
}
|
||||||
return reply;
|
return reply;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const safetySettings = _payload.safetySettings;
|
||||||
const stream = await model.stream(messages, {
|
const stream = await model.stream(messages, {
|
||||||
signal: abortController.signal,
|
signal: abortController.signal,
|
||||||
timeout: 7000,
|
timeout: 7000,
|
||||||
|
safetySettings: safetySettings,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let delay = this.isGenerativeModel ? 12 : 8;
|
||||||
|
if (modelName.includes('flash')) {
|
||||||
|
delay = 5;
|
||||||
|
}
|
||||||
for await (const chunk of stream) {
|
for await (const chunk of stream) {
|
||||||
const chunkText = chunk?.content ?? chunk;
|
const chunkText = chunk?.content ?? chunk;
|
||||||
this.generateTextStream(chunkText, onProgress, {
|
await this.generateTextStream(chunkText, onProgress, {
|
||||||
delay: this.isGenerativeModel ? 12 : 8,
|
delay,
|
||||||
});
|
});
|
||||||
reply += chunkText;
|
reply += chunkText;
|
||||||
}
|
}
|
||||||
@@ -704,10 +727,130 @@ class GoogleClient extends BaseClient {
|
|||||||
return reply;
|
return reply;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stripped-down logic for generating a title. This uses the non-streaming APIs, since the user does not see titles streaming
|
||||||
|
*/
|
||||||
|
async titleChatCompletion(_payload, options = {}) {
|
||||||
|
const { abortController } = options;
|
||||||
|
const { parameters, instances } = _payload;
|
||||||
|
const { messages: _messages, examples: _examples } = instances?.[0] ?? {};
|
||||||
|
|
||||||
|
let clientOptions = { ...parameters, maxRetries: 2 };
|
||||||
|
|
||||||
|
logger.debug('Initialized title client options');
|
||||||
|
|
||||||
|
if (this.project_id) {
|
||||||
|
clientOptions['authOptions'] = {
|
||||||
|
credentials: {
|
||||||
|
...this.serviceKey,
|
||||||
|
},
|
||||||
|
projectId: this.project_id,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!parameters) {
|
||||||
|
clientOptions = { ...clientOptions, ...this.modelOptions };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.isGenerativeModel && !this.project_id) {
|
||||||
|
clientOptions.modelName = clientOptions.model;
|
||||||
|
delete clientOptions.model;
|
||||||
|
}
|
||||||
|
|
||||||
|
const model = this.createLLM(clientOptions);
|
||||||
|
|
||||||
|
let reply = '';
|
||||||
|
const messages = this.isTextModel ? _payload.trim() : _messages;
|
||||||
|
|
||||||
|
const modelName = clientOptions.modelName ?? clientOptions.model ?? '';
|
||||||
|
if (modelName?.includes('1.5') && !this.project_id) {
|
||||||
|
logger.debug('Identified titling model as 1.5 version');
|
||||||
|
/** @type {GenerativeModel} */
|
||||||
|
const client = model;
|
||||||
|
const requestOptions = {
|
||||||
|
contents: _payload,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.options?.promptPrefix?.length) {
|
||||||
|
requestOptions.systemInstruction = {
|
||||||
|
parts: [
|
||||||
|
{
|
||||||
|
text: this.options.promptPrefix,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const safetySettings = _payload.safetySettings;
|
||||||
|
requestOptions.safetySettings = safetySettings;
|
||||||
|
|
||||||
|
const result = await client.generateContent(requestOptions);
|
||||||
|
|
||||||
|
reply = result.response?.text();
|
||||||
|
|
||||||
|
return reply;
|
||||||
|
} else {
|
||||||
|
logger.debug('Beginning titling');
|
||||||
|
const safetySettings = _payload.safetySettings;
|
||||||
|
|
||||||
|
const titleResponse = await model.invoke(messages, {
|
||||||
|
signal: abortController.signal,
|
||||||
|
timeout: 7000,
|
||||||
|
safetySettings: safetySettings,
|
||||||
|
});
|
||||||
|
|
||||||
|
reply = titleResponse.content;
|
||||||
|
|
||||||
|
return reply;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async titleConvo({ text, responseText = '' }) {
|
||||||
|
let title = 'New Chat';
|
||||||
|
const convo = `||>User:
|
||||||
|
"${truncateText(text)}"
|
||||||
|
||>Response:
|
||||||
|
"${JSON.stringify(truncateText(responseText))}"`;
|
||||||
|
|
||||||
|
let { prompt: payload } = await this.buildMessages([
|
||||||
|
{
|
||||||
|
text: `Please generate ${titleInstruction}
|
||||||
|
|
||||||
|
${convo}
|
||||||
|
|
||||||
|
||>Title:`,
|
||||||
|
isCreatedByUser: true,
|
||||||
|
author: this.userLabel,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (this.isVisionModel) {
|
||||||
|
logger.warn(
|
||||||
|
`Current vision model does not support titling without an attachment; falling back to default model ${settings.model.default}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
payload.parameters = { ...payload.parameters, model: settings.model.default };
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
title = await this.titleChatCompletion(payload, {
|
||||||
|
abortController: new AbortController(),
|
||||||
|
onProgress: () => {},
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
logger.error('[GoogleClient] There was an issue generating the title', e);
|
||||||
|
}
|
||||||
|
logger.debug(`Title response: ${title}`);
|
||||||
|
return title;
|
||||||
|
}
|
||||||
|
|
||||||
getSaveOptions() {
|
getSaveOptions() {
|
||||||
return {
|
return {
|
||||||
promptPrefix: this.options.promptPrefix,
|
promptPrefix: this.options.promptPrefix,
|
||||||
modelLabel: this.options.modelLabel,
|
modelLabel: this.options.modelLabel,
|
||||||
|
iconURL: this.options.iconURL,
|
||||||
|
greeting: this.options.greeting,
|
||||||
|
spec: this.options.spec,
|
||||||
...this.modelOptions,
|
...this.modelOptions,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -717,6 +860,33 @@ class GoogleClient extends BaseClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async sendCompletion(payload, opts = {}) {
|
async sendCompletion(payload, opts = {}) {
|
||||||
|
const modelName = payload.parameters?.model;
|
||||||
|
|
||||||
|
if (modelName && modelName.toLowerCase().includes('gemini')) {
|
||||||
|
const safetySettings = [
|
||||||
|
{
|
||||||
|
category: 'HARM_CATEGORY_SEXUALLY_EXPLICIT',
|
||||||
|
threshold:
|
||||||
|
process.env.GOOGLE_SAFETY_SEXUALLY_EXPLICIT || 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
category: 'HARM_CATEGORY_HATE_SPEECH',
|
||||||
|
threshold: process.env.GOOGLE_SAFETY_HATE_SPEECH || 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
category: 'HARM_CATEGORY_HARASSMENT',
|
||||||
|
threshold: process.env.GOOGLE_SAFETY_HARASSMENT || 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
category: 'HARM_CATEGORY_DANGEROUS_CONTENT',
|
||||||
|
threshold:
|
||||||
|
process.env.GOOGLE_SAFETY_DANGEROUS_CONTENT || 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
payload.safetySettings = safetySettings;
|
||||||
|
}
|
||||||
|
|
||||||
let reply = '';
|
let reply = '';
|
||||||
reply = await this.getCompletion(payload, opts);
|
reply = await this.getCompletion(payload, opts);
|
||||||
return reply.trim();
|
return reply.trim();
|
||||||
|
|||||||
154
api/app/clients/OllamaClient.js
Normal file
154
api/app/clients/OllamaClient.js
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
const { z } = require('zod');
|
||||||
|
const axios = require('axios');
|
||||||
|
const { Ollama } = require('ollama');
|
||||||
|
const { deriveBaseURL } = require('~/utils');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
const ollamaPayloadSchema = z.object({
|
||||||
|
mirostat: z.number().optional(),
|
||||||
|
mirostat_eta: z.number().optional(),
|
||||||
|
mirostat_tau: z.number().optional(),
|
||||||
|
num_ctx: z.number().optional(),
|
||||||
|
repeat_last_n: z.number().optional(),
|
||||||
|
repeat_penalty: z.number().optional(),
|
||||||
|
temperature: z.number().optional(),
|
||||||
|
seed: z.number().nullable().optional(),
|
||||||
|
stop: z.array(z.string()).optional(),
|
||||||
|
tfs_z: z.number().optional(),
|
||||||
|
num_predict: z.number().optional(),
|
||||||
|
top_k: z.number().optional(),
|
||||||
|
top_p: z.number().optional(),
|
||||||
|
stream: z.optional(z.boolean()),
|
||||||
|
model: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} imageUrl
|
||||||
|
* @returns {string}
|
||||||
|
* @throws {Error}
|
||||||
|
*/
|
||||||
|
const getValidBase64 = (imageUrl) => {
|
||||||
|
const parts = imageUrl.split(';base64,');
|
||||||
|
|
||||||
|
if (parts.length === 2) {
|
||||||
|
return parts[1];
|
||||||
|
} else {
|
||||||
|
logger.error('Invalid or no Base64 string found in URL.');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
class OllamaClient {
|
||||||
|
constructor(options = {}) {
|
||||||
|
const host = deriveBaseURL(options.baseURL ?? 'http://localhost:11434');
|
||||||
|
/** @type {Ollama} */
|
||||||
|
this.client = new Ollama({ host });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches Ollama models from the specified base API path.
|
||||||
|
* @param {string} baseURL
|
||||||
|
* @returns {Promise<string[]>} The Ollama models.
|
||||||
|
*/
|
||||||
|
static async fetchModels(baseURL) {
|
||||||
|
let models = [];
|
||||||
|
if (!baseURL) {
|
||||||
|
return models;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const ollamaEndpoint = deriveBaseURL(baseURL);
|
||||||
|
/** @type {Promise<AxiosResponse<OllamaListResponse>>} */
|
||||||
|
const response = await axios.get(`${ollamaEndpoint}/api/tags`);
|
||||||
|
models = response.data.models.map((tag) => tag.name);
|
||||||
|
return models;
|
||||||
|
} catch (error) {
|
||||||
|
const logMessage =
|
||||||
|
'Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn\'t start with `ollama` (case-insensitive).';
|
||||||
|
logger.error(logMessage, error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {ChatCompletionMessage[]} messages
|
||||||
|
* @returns {OllamaMessage[]}
|
||||||
|
*/
|
||||||
|
static formatOpenAIMessages(messages) {
|
||||||
|
const ollamaMessages = [];
|
||||||
|
|
||||||
|
for (const message of messages) {
|
||||||
|
if (typeof message.content === 'string') {
|
||||||
|
ollamaMessages.push({
|
||||||
|
role: message.role,
|
||||||
|
content: message.content,
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let aggregatedText = '';
|
||||||
|
let imageUrls = [];
|
||||||
|
|
||||||
|
for (const content of message.content) {
|
||||||
|
if (content.type === 'text') {
|
||||||
|
aggregatedText += content.text + ' ';
|
||||||
|
} else if (content.type === 'image_url') {
|
||||||
|
imageUrls.push(getValidBase64(content.image_url.url));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const ollamaMessage = {
|
||||||
|
role: message.role,
|
||||||
|
content: aggregatedText.trim(),
|
||||||
|
};
|
||||||
|
|
||||||
|
if (imageUrls.length > 0) {
|
||||||
|
ollamaMessage.images = imageUrls;
|
||||||
|
}
|
||||||
|
|
||||||
|
ollamaMessages.push(ollamaMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ollamaMessages;
|
||||||
|
}
|
||||||
|
|
||||||
|
/***
|
||||||
|
* @param {Object} params
|
||||||
|
* @param {ChatCompletionPayload} params.payload
|
||||||
|
* @param {onTokenProgress} params.onProgress
|
||||||
|
* @param {AbortController} params.abortController
|
||||||
|
*/
|
||||||
|
async chatCompletion({ payload, onProgress, abortController = null }) {
|
||||||
|
let intermediateReply = '';
|
||||||
|
|
||||||
|
const parameters = ollamaPayloadSchema.parse(payload);
|
||||||
|
const messages = OllamaClient.formatOpenAIMessages(payload.messages);
|
||||||
|
|
||||||
|
if (parameters.stream) {
|
||||||
|
const stream = await this.client.chat({
|
||||||
|
messages,
|
||||||
|
...parameters,
|
||||||
|
});
|
||||||
|
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
const token = chunk.message.content;
|
||||||
|
intermediateReply += token;
|
||||||
|
onProgress(token);
|
||||||
|
if (abortController.signal.aborted) {
|
||||||
|
stream.controller.abort();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO: regular completion
|
||||||
|
else {
|
||||||
|
// const generation = await this.client.generate(payload);
|
||||||
|
}
|
||||||
|
|
||||||
|
return intermediateReply;
|
||||||
|
}
|
||||||
|
catch(err) {
|
||||||
|
logger.error('[OllamaClient.chatCompletion]', err);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { OllamaClient, ollamaPayloadSchema };
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
|
const { OllamaClient } = require('./OllamaClient');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const {
|
const {
|
||||||
|
Constants,
|
||||||
ImageDetail,
|
ImageDetail,
|
||||||
EModelEndpoint,
|
EModelEndpoint,
|
||||||
resolveHeaders,
|
resolveHeaders,
|
||||||
@@ -20,16 +22,16 @@ const {
|
|||||||
const {
|
const {
|
||||||
truncateText,
|
truncateText,
|
||||||
formatMessage,
|
formatMessage,
|
||||||
createContextHandlers,
|
|
||||||
CUT_OFF_PROMPT,
|
CUT_OFF_PROMPT,
|
||||||
titleInstruction,
|
titleInstruction,
|
||||||
|
createContextHandlers,
|
||||||
} = require('./prompts');
|
} = require('./prompts');
|
||||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||||
|
const { isEnabled, sleep } = require('~/server/utils');
|
||||||
const { handleOpenAIErrors } = require('./tools/util');
|
const { handleOpenAIErrors } = require('./tools/util');
|
||||||
const spendTokens = require('~/models/spendTokens');
|
const spendTokens = require('~/models/spendTokens');
|
||||||
const { createLLM, RunManager } = require('./llm');
|
const { createLLM, RunManager } = require('./llm');
|
||||||
const ChatGPTClient = require('./ChatGPTClient');
|
const ChatGPTClient = require('./ChatGPTClient');
|
||||||
const { isEnabled } = require('~/server/utils');
|
|
||||||
const { summaryBuffer } = require('./memory');
|
const { summaryBuffer } = require('./memory');
|
||||||
const { runTitleChain } = require('./chains');
|
const { runTitleChain } = require('./chains');
|
||||||
const { tokenSplit } = require('./document');
|
const { tokenSplit } = require('./document');
|
||||||
@@ -127,6 +129,10 @@ class OpenAIClient extends BaseClient {
|
|||||||
this.useOpenRouter = true;
|
this.useOpenRouter = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (this.options.endpoint?.toLowerCase() === 'ollama') {
|
||||||
|
this.isOllama = true;
|
||||||
|
}
|
||||||
|
|
||||||
this.FORCE_PROMPT =
|
this.FORCE_PROMPT =
|
||||||
isEnabled(OPENAI_FORCE_PROMPT) ||
|
isEnabled(OPENAI_FORCE_PROMPT) ||
|
||||||
(reverseProxy && reverseProxy.includes('completions') && !reverseProxy.includes('chat'));
|
(reverseProxy && reverseProxy.includes('completions') && !reverseProxy.includes('chat'));
|
||||||
@@ -159,11 +165,13 @@ class OpenAIClient extends BaseClient {
|
|||||||
model.startsWith('text-chat') || model.startsWith('text-davinci-002-render');
|
model.startsWith('text-chat') || model.startsWith('text-davinci-002-render');
|
||||||
|
|
||||||
this.maxContextTokens =
|
this.maxContextTokens =
|
||||||
|
this.options.maxContextTokens ??
|
||||||
getModelMaxTokens(
|
getModelMaxTokens(
|
||||||
model,
|
model,
|
||||||
this.options.endpointType ?? this.options.endpoint,
|
this.options.endpointType ?? this.options.endpoint,
|
||||||
this.options.endpointTokenConfig,
|
this.options.endpointTokenConfig,
|
||||||
) ?? 4095; // 1 less than maximum
|
) ??
|
||||||
|
4095; // 1 less than maximum
|
||||||
|
|
||||||
if (this.shouldSummarize) {
|
if (this.shouldSummarize) {
|
||||||
this.maxContextTokens = Math.floor(this.maxContextTokens / 2);
|
this.maxContextTokens = Math.floor(this.maxContextTokens / 2);
|
||||||
@@ -200,16 +208,6 @@ class OpenAIClient extends BaseClient {
|
|||||||
|
|
||||||
this.setupTokens();
|
this.setupTokens();
|
||||||
|
|
||||||
if (!this.modelOptions.stop && !this.isVisionModel) {
|
|
||||||
const stopTokens = [this.startToken];
|
|
||||||
if (this.endToken && this.endToken !== this.startToken) {
|
|
||||||
stopTokens.push(this.endToken);
|
|
||||||
}
|
|
||||||
stopTokens.push(`\n${this.userLabel}:`);
|
|
||||||
stopTokens.push('<|diff_marker|>');
|
|
||||||
this.modelOptions.stop = stopTokens;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (reverseProxy) {
|
if (reverseProxy) {
|
||||||
this.completionsUrl = reverseProxy;
|
this.completionsUrl = reverseProxy;
|
||||||
this.langchainProxy = extractBaseURL(reverseProxy);
|
this.langchainProxy = extractBaseURL(reverseProxy);
|
||||||
@@ -243,23 +241,52 @@ class OpenAIClient extends BaseClient {
|
|||||||
* @param {MongoFile[]} attachments
|
* @param {MongoFile[]} attachments
|
||||||
*/
|
*/
|
||||||
checkVisionRequest(attachments) {
|
checkVisionRequest(attachments) {
|
||||||
const availableModels = this.options.modelsConfig?.[this.options.endpoint];
|
if (!attachments) {
|
||||||
this.isVisionModel = validateVisionModel({ model: this.modelOptions.model, availableModels });
|
return;
|
||||||
|
|
||||||
const visionModelAvailable = availableModels?.includes(this.defaultVisionModel);
|
|
||||||
if (
|
|
||||||
attachments &&
|
|
||||||
attachments.some((file) => file?.type && file?.type?.includes('image')) &&
|
|
||||||
visionModelAvailable &&
|
|
||||||
!this.isVisionModel
|
|
||||||
) {
|
|
||||||
this.modelOptions.model = this.defaultVisionModel;
|
|
||||||
this.isVisionModel = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const availableModels = this.options.modelsConfig?.[this.options.endpoint];
|
||||||
|
if (!availableModels) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let visionRequestDetected = false;
|
||||||
|
for (const file of attachments) {
|
||||||
|
if (file?.type?.includes('image')) {
|
||||||
|
visionRequestDetected = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!visionRequestDetected) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isVisionModel = validateVisionModel({ model: this.modelOptions.model, availableModels });
|
||||||
if (this.isVisionModel) {
|
if (this.isVisionModel) {
|
||||||
delete this.modelOptions.stop;
|
delete this.modelOptions.stop;
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (const model of availableModels) {
|
||||||
|
if (!validateVisionModel({ model, availableModels })) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
this.modelOptions.model = model;
|
||||||
|
this.isVisionModel = true;
|
||||||
|
delete this.modelOptions.stop;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!availableModels.includes(this.defaultVisionModel)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!validateVisionModel({ model: this.defaultVisionModel, availableModels })) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.modelOptions.model = this.defaultVisionModel;
|
||||||
|
this.isVisionModel = true;
|
||||||
|
delete this.modelOptions.stop;
|
||||||
}
|
}
|
||||||
|
|
||||||
setupTokens() {
|
setupTokens() {
|
||||||
@@ -281,7 +308,7 @@ class OpenAIClient extends BaseClient {
|
|||||||
let tokenizer;
|
let tokenizer;
|
||||||
this.encoding = 'text-davinci-003';
|
this.encoding = 'text-davinci-003';
|
||||||
if (this.isChatCompletion) {
|
if (this.isChatCompletion) {
|
||||||
this.encoding = 'cl100k_base';
|
this.encoding = this.modelOptions.model.includes('gpt-4o') ? 'o200k_base' : 'cl100k_base';
|
||||||
tokenizer = this.constructor.getTokenizer(this.encoding);
|
tokenizer = this.constructor.getTokenizer(this.encoding);
|
||||||
} else if (this.isUnofficialChatGptModel) {
|
} else if (this.isUnofficialChatGptModel) {
|
||||||
const extendSpecialTokens = {
|
const extendSpecialTokens = {
|
||||||
@@ -386,10 +413,14 @@ class OpenAIClient extends BaseClient {
|
|||||||
|
|
||||||
getSaveOptions() {
|
getSaveOptions() {
|
||||||
return {
|
return {
|
||||||
|
maxContextTokens: this.options.maxContextTokens,
|
||||||
chatGptLabel: this.options.chatGptLabel,
|
chatGptLabel: this.options.chatGptLabel,
|
||||||
promptPrefix: this.options.promptPrefix,
|
promptPrefix: this.options.promptPrefix,
|
||||||
resendFiles: this.options.resendFiles,
|
resendFiles: this.options.resendFiles,
|
||||||
imageDetail: this.options.imageDetail,
|
imageDetail: this.options.imageDetail,
|
||||||
|
iconURL: this.options.iconURL,
|
||||||
|
greeting: this.options.greeting,
|
||||||
|
spec: this.options.spec,
|
||||||
...this.modelOptions,
|
...this.modelOptions,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -411,7 +442,11 @@ class OpenAIClient extends BaseClient {
|
|||||||
* @returns {Promise<MongoFile[]>}
|
* @returns {Promise<MongoFile[]>}
|
||||||
*/
|
*/
|
||||||
async addImageURLs(message, attachments) {
|
async addImageURLs(message, attachments) {
|
||||||
const { files, image_urls } = await encodeAndFormat(this.options.req, attachments);
|
const { files, image_urls } = await encodeAndFormat(
|
||||||
|
this.options.req,
|
||||||
|
attachments,
|
||||||
|
this.options.endpoint,
|
||||||
|
);
|
||||||
message.image_urls = image_urls.length ? image_urls : undefined;
|
message.image_urls = image_urls.length ? image_urls : undefined;
|
||||||
return files;
|
return files;
|
||||||
}
|
}
|
||||||
@@ -553,7 +588,7 @@ class OpenAIClient extends BaseClient {
|
|||||||
let streamResult = null;
|
let streamResult = null;
|
||||||
this.modelOptions.user = this.user;
|
this.modelOptions.user = this.user;
|
||||||
const invalidBaseUrl = this.completionsUrl && extractBaseURL(this.completionsUrl) === null;
|
const invalidBaseUrl = this.completionsUrl && extractBaseURL(this.completionsUrl) === null;
|
||||||
const useOldMethod = !!(invalidBaseUrl || !this.isChatCompletion || typeof Bun !== 'undefined');
|
const useOldMethod = !!(invalidBaseUrl || !this.isChatCompletion);
|
||||||
if (typeof opts.onProgress === 'function' && useOldMethod) {
|
if (typeof opts.onProgress === 'function' && useOldMethod) {
|
||||||
const completionResult = await this.getCompletion(
|
const completionResult = await this.getCompletion(
|
||||||
payload,
|
payload,
|
||||||
@@ -721,6 +756,12 @@ class OpenAIClient extends BaseClient {
|
|||||||
* In case of failure, it will return the default title, "New Chat".
|
* In case of failure, it will return the default title, "New Chat".
|
||||||
*/
|
*/
|
||||||
async titleConvo({ text, conversationId, responseText = '' }) {
|
async titleConvo({ text, conversationId, responseText = '' }) {
|
||||||
|
this.conversationId = conversationId;
|
||||||
|
|
||||||
|
if (this.options.attachments) {
|
||||||
|
delete this.options.attachments;
|
||||||
|
}
|
||||||
|
|
||||||
let title = 'New Chat';
|
let title = 'New Chat';
|
||||||
const convo = `||>User:
|
const convo = `||>User:
|
||||||
"${truncateText(text)}"
|
"${truncateText(text)}"
|
||||||
@@ -729,7 +770,10 @@ class OpenAIClient extends BaseClient {
|
|||||||
|
|
||||||
const { OPENAI_TITLE_MODEL } = process.env ?? {};
|
const { OPENAI_TITLE_MODEL } = process.env ?? {};
|
||||||
|
|
||||||
const model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
|
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
|
||||||
|
if (model === Constants.CURRENT_MODEL) {
|
||||||
|
model = this.modelOptions.model;
|
||||||
|
}
|
||||||
|
|
||||||
const modelOptions = {
|
const modelOptions = {
|
||||||
// TODO: remove the gpt fallback and make it specific to endpoint
|
// TODO: remove the gpt fallback and make it specific to endpoint
|
||||||
@@ -783,7 +827,7 @@ class OpenAIClient extends BaseClient {
|
|||||||
|
|
||||||
const instructionsPayload = [
|
const instructionsPayload = [
|
||||||
{
|
{
|
||||||
role: 'system',
|
role: this.options.titleMessageRole ?? 'system',
|
||||||
content: `Please generate ${titleInstruction}
|
content: `Please generate ${titleInstruction}
|
||||||
|
|
||||||
${convo}
|
${convo}
|
||||||
@@ -796,13 +840,17 @@ ${convo}
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
let useChatCompletion = true;
|
let useChatCompletion = true;
|
||||||
|
|
||||||
if (this.options.reverseProxyUrl === CohereConstants.API_URL) {
|
if (this.options.reverseProxyUrl === CohereConstants.API_URL) {
|
||||||
useChatCompletion = false;
|
useChatCompletion = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
title = (
|
title = (
|
||||||
await this.sendPayload(instructionsPayload, { modelOptions, useChatCompletion })
|
await this.sendPayload(instructionsPayload, { modelOptions, useChatCompletion })
|
||||||
).replaceAll('"', '');
|
).replaceAll('"', '');
|
||||||
|
|
||||||
const completionTokens = this.getTokenCount(title);
|
const completionTokens = this.getTokenCount(title);
|
||||||
|
|
||||||
this.recordTokenUsage({ promptTokens, completionTokens, context: 'title' });
|
this.recordTokenUsage({ promptTokens, completionTokens, context: 'title' });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.error(
|
logger.error(
|
||||||
@@ -826,6 +874,7 @@ ${convo}
|
|||||||
context: 'title',
|
context: 'title',
|
||||||
tokenBuffer: 150,
|
tokenBuffer: 150,
|
||||||
});
|
});
|
||||||
|
|
||||||
title = await runTitleChain({ llm, text, convo, signal: this.abortController.signal });
|
title = await runTitleChain({ llm, text, convo, signal: this.abortController.signal });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e?.message?.toLowerCase()?.includes('abort')) {
|
if (e?.message?.toLowerCase()?.includes('abort')) {
|
||||||
@@ -851,7 +900,11 @@ ${convo}
|
|||||||
|
|
||||||
// TODO: remove the gpt fallback and make it specific to endpoint
|
// TODO: remove the gpt fallback and make it specific to endpoint
|
||||||
const { OPENAI_SUMMARY_MODEL = 'gpt-3.5-turbo' } = process.env ?? {};
|
const { OPENAI_SUMMARY_MODEL = 'gpt-3.5-turbo' } = process.env ?? {};
|
||||||
const model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL;
|
let model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL;
|
||||||
|
if (model === Constants.CURRENT_MODEL) {
|
||||||
|
model = this.modelOptions.model;
|
||||||
|
}
|
||||||
|
|
||||||
const maxContextTokens =
|
const maxContextTokens =
|
||||||
getModelMaxTokens(
|
getModelMaxTokens(
|
||||||
model,
|
model,
|
||||||
@@ -959,9 +1012,9 @@ ${convo}
|
|||||||
await spendTokens(
|
await spendTokens(
|
||||||
{
|
{
|
||||||
context,
|
context,
|
||||||
user: this.user,
|
|
||||||
model: this.modelOptions.model,
|
model: this.modelOptions.model,
|
||||||
conversationId: this.conversationId,
|
conversationId: this.conversationId,
|
||||||
|
user: this.user ?? this.options.req.user?.id,
|
||||||
endpointTokenConfig: this.options.endpointTokenConfig,
|
endpointTokenConfig: this.options.endpointTokenConfig,
|
||||||
},
|
},
|
||||||
{ promptTokens, completionTokens },
|
{ promptTokens, completionTokens },
|
||||||
@@ -1053,7 +1106,12 @@ ${convo}
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (this.azure || this.options.azure) {
|
if (this.azure || this.options.azure) {
|
||||||
// Azure does not accept `model` in the body, so we need to remove it.
|
/* Azure Bug, extremely short default `max_tokens` response */
|
||||||
|
if (!modelOptions.max_tokens && modelOptions.model === 'gpt-4-vision-preview') {
|
||||||
|
modelOptions.max_tokens = 4000;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Azure does not accept `model` in the body, so we need to remove it. */
|
||||||
delete modelOptions.model;
|
delete modelOptions.model;
|
||||||
|
|
||||||
opts.baseURL = this.langchainProxy
|
opts.baseURL = this.langchainProxy
|
||||||
@@ -1074,15 +1132,13 @@ ${convo}
|
|||||||
let chatCompletion;
|
let chatCompletion;
|
||||||
/** @type {OpenAI} */
|
/** @type {OpenAI} */
|
||||||
const openai = new OpenAI({
|
const openai = new OpenAI({
|
||||||
|
fetch: this.fetch,
|
||||||
apiKey: this.apiKey,
|
apiKey: this.apiKey,
|
||||||
...opts,
|
...opts,
|
||||||
});
|
});
|
||||||
|
|
||||||
/* hacky fixes for Mistral AI API:
|
/* Re-orders system message to the top of the messages payload, as not allowed anywhere else */
|
||||||
- Re-orders system message to the top of the messages payload, as not allowed anywhere else
|
if (modelOptions.messages && (opts.baseURL.includes('api.mistral.ai') || this.isOllama)) {
|
||||||
- If there is only one message and it's a system message, change the role to user
|
|
||||||
*/
|
|
||||||
if (opts.baseURL.includes('https://api.mistral.ai/v1') && modelOptions.messages) {
|
|
||||||
const { messages } = modelOptions;
|
const { messages } = modelOptions;
|
||||||
|
|
||||||
const systemMessageIndex = messages.findIndex((msg) => msg.role === 'system');
|
const systemMessageIndex = messages.findIndex((msg) => msg.role === 'system');
|
||||||
@@ -1093,10 +1149,16 @@ ${convo}
|
|||||||
}
|
}
|
||||||
|
|
||||||
modelOptions.messages = messages;
|
modelOptions.messages = messages;
|
||||||
|
}
|
||||||
|
|
||||||
if (messages.length === 1 && messages[0].role === 'system') {
|
/* If there is only one message and it's a system message, change the role to user */
|
||||||
modelOptions.messages[0].role = 'user';
|
if (
|
||||||
}
|
(opts.baseURL.includes('api.mistral.ai') || opts.baseURL.includes('api.perplexity.ai')) &&
|
||||||
|
modelOptions.messages &&
|
||||||
|
modelOptions.messages.length === 1 &&
|
||||||
|
modelOptions.messages[0]?.role === 'system'
|
||||||
|
) {
|
||||||
|
modelOptions.messages[0].role = 'user';
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.addParams && typeof this.options.addParams === 'object') {
|
if (this.options.addParams && typeof this.options.addParams === 'object') {
|
||||||
@@ -1120,6 +1182,15 @@ ${convo}
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (this.message_file_map && this.isOllama) {
|
||||||
|
const ollamaClient = new OllamaClient({ baseURL });
|
||||||
|
return await ollamaClient.chatCompletion({
|
||||||
|
payload: modelOptions,
|
||||||
|
onProgress,
|
||||||
|
abortController,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
let UnexpectedRoleError = false;
|
let UnexpectedRoleError = false;
|
||||||
if (modelOptions.stream) {
|
if (modelOptions.stream) {
|
||||||
const stream = await openai.beta.chat.completions
|
const stream = await openai.beta.chat.completions
|
||||||
@@ -1150,6 +1221,8 @@ ${convo}
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const azureDelay = this.modelOptions.model?.includes('gpt-4') ? 30 : 17;
|
||||||
|
|
||||||
for await (const chunk of stream) {
|
for await (const chunk of stream) {
|
||||||
const token = chunk.choices[0]?.delta?.content || '';
|
const token = chunk.choices[0]?.delta?.content || '';
|
||||||
intermediateReply += token;
|
intermediateReply += token;
|
||||||
@@ -1158,6 +1231,10 @@ ${convo}
|
|||||||
stream.controller.abort();
|
stream.controller.abort();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (this.azure) {
|
||||||
|
await sleep(azureDelay);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!UnexpectedRoleError) {
|
if (!UnexpectedRoleError) {
|
||||||
|
|||||||
@@ -42,8 +42,12 @@ class PluginsClient extends OpenAIClient {
|
|||||||
return {
|
return {
|
||||||
chatGptLabel: this.options.chatGptLabel,
|
chatGptLabel: this.options.chatGptLabel,
|
||||||
promptPrefix: this.options.promptPrefix,
|
promptPrefix: this.options.promptPrefix,
|
||||||
|
tools: this.options.tools,
|
||||||
...this.modelOptions,
|
...this.modelOptions,
|
||||||
agentOptions: this.agentOptions,
|
agentOptions: this.agentOptions,
|
||||||
|
iconURL: this.options.iconURL,
|
||||||
|
greeting: this.options.greeting,
|
||||||
|
spec: this.options.spec,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -144,9 +148,11 @@ class PluginsClient extends OpenAIClient {
|
|||||||
signal,
|
signal,
|
||||||
pastMessages,
|
pastMessages,
|
||||||
tools: this.tools,
|
tools: this.tools,
|
||||||
currentDateString: this.currentDateString,
|
|
||||||
verbose: this.options.debug,
|
verbose: this.options.debug,
|
||||||
returnIntermediateSteps: true,
|
returnIntermediateSteps: true,
|
||||||
|
customName: this.options.chatGptLabel,
|
||||||
|
currentDateString: this.currentDateString,
|
||||||
|
customInstructions: this.options.promptPrefix,
|
||||||
callbackManager: CallbackManager.fromHandlers({
|
callbackManager: CallbackManager.fromHandlers({
|
||||||
async handleAgentAction(action, runId) {
|
async handleAgentAction(action, runId) {
|
||||||
handleAction(action, runId, onAgentAction);
|
handleAction(action, runId, onAgentAction);
|
||||||
@@ -232,18 +238,30 @@ class PluginsClient extends OpenAIClient {
|
|||||||
await this.recordTokenUsage(responseMessage);
|
await this.recordTokenUsage(responseMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.saveMessageToDatabase(responseMessage, saveOptions, user);
|
this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
|
||||||
delete responseMessage.tokenCount;
|
delete responseMessage.tokenCount;
|
||||||
return { ...responseMessage, ...result };
|
return { ...responseMessage, ...result };
|
||||||
}
|
}
|
||||||
|
|
||||||
async sendMessage(message, opts = {}) {
|
async sendMessage(message, opts = {}) {
|
||||||
|
/** @type {{ filteredTools: string[], includedTools: string[] }} */
|
||||||
|
const { filteredTools = [], includedTools = [] } = this.options.req.app.locals;
|
||||||
|
|
||||||
|
if (includedTools.length > 0) {
|
||||||
|
const tools = this.options.tools.filter((plugin) => includedTools.includes(plugin));
|
||||||
|
this.options.tools = tools;
|
||||||
|
} else {
|
||||||
|
const tools = this.options.tools.filter((plugin) => !filteredTools.includes(plugin));
|
||||||
|
this.options.tools = tools;
|
||||||
|
}
|
||||||
|
|
||||||
// If a message is edited, no tools can be used.
|
// If a message is edited, no tools can be used.
|
||||||
const completionMode = this.options.tools.length === 0 || opts.isEdited;
|
const completionMode = this.options.tools.length === 0 || opts.isEdited;
|
||||||
if (completionMode) {
|
if (completionMode) {
|
||||||
this.setOptions(opts);
|
this.setOptions(opts);
|
||||||
return super.sendMessage(message, opts);
|
return super.sendMessage(message, opts);
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug('[PluginsClient] sendMessage', { userMessageText: message, opts });
|
logger.debug('[PluginsClient] sendMessage', { userMessageText: message, opts });
|
||||||
const {
|
const {
|
||||||
user,
|
user,
|
||||||
@@ -258,6 +276,14 @@ class PluginsClient extends OpenAIClient {
|
|||||||
onToolEnd,
|
onToolEnd,
|
||||||
} = await this.handleStartMethods(message, opts);
|
} = await this.handleStartMethods(message, opts);
|
||||||
|
|
||||||
|
if (opts.progressCallback) {
|
||||||
|
opts.onProgress = opts.progressCallback.call(null, {
|
||||||
|
...(opts.progressOptions ?? {}),
|
||||||
|
parentMessageId: userMessage.messageId,
|
||||||
|
messageId: responseMessageId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
this.currentMessages.push(userMessage);
|
this.currentMessages.push(userMessage);
|
||||||
|
|
||||||
let {
|
let {
|
||||||
@@ -286,7 +312,15 @@ class PluginsClient extends OpenAIClient {
|
|||||||
if (payload) {
|
if (payload) {
|
||||||
this.currentMessages = payload;
|
this.currentMessages = payload;
|
||||||
}
|
}
|
||||||
await this.saveMessageToDatabase(userMessage, saveOptions, user);
|
|
||||||
|
if (!this.skipSaveUserMessage) {
|
||||||
|
this.userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user);
|
||||||
|
if (typeof opts?.getReqData === 'function') {
|
||||||
|
opts.getReqData({
|
||||||
|
userMessagePromise: this.userMessagePromise,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (isEnabled(process.env.CHECK_BALANCE)) {
|
if (isEnabled(process.env.CHECK_BALANCE)) {
|
||||||
await checkBalance({
|
await checkBalance({
|
||||||
@@ -304,6 +338,8 @@ class PluginsClient extends OpenAIClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const responseMessage = {
|
const responseMessage = {
|
||||||
|
endpoint: EModelEndpoint.gptPlugins,
|
||||||
|
iconURL: this.options.iconURL,
|
||||||
messageId: responseMessageId,
|
messageId: responseMessageId,
|
||||||
conversationId,
|
conversationId,
|
||||||
parentMessageId: userMessage.messageId,
|
parentMessageId: userMessage.messageId,
|
||||||
|
|||||||
@@ -13,10 +13,18 @@ const initializeCustomAgent = async ({
|
|||||||
tools,
|
tools,
|
||||||
model,
|
model,
|
||||||
pastMessages,
|
pastMessages,
|
||||||
|
customName,
|
||||||
|
customInstructions,
|
||||||
currentDateString,
|
currentDateString,
|
||||||
...rest
|
...rest
|
||||||
}) => {
|
}) => {
|
||||||
let prompt = CustomAgent.createPrompt(tools, { currentDateString, model: model.modelName });
|
let prompt = CustomAgent.createPrompt(tools, { currentDateString, model: model.modelName });
|
||||||
|
if (customName) {
|
||||||
|
prompt = `You are "${customName}".\n${prompt}`;
|
||||||
|
}
|
||||||
|
if (customInstructions) {
|
||||||
|
prompt = `${prompt}\n${customInstructions}`;
|
||||||
|
}
|
||||||
|
|
||||||
const chatPrompt = ChatPromptTemplate.fromMessages([
|
const chatPrompt = ChatPromptTemplate.fromMessages([
|
||||||
new SystemMessagePromptTemplate(prompt),
|
new SystemMessagePromptTemplate(prompt),
|
||||||
|
|||||||
@@ -1,44 +1,3 @@
|
|||||||
/*
|
|
||||||
module.exports = `You are ChatGPT, a Large Language model with useful tools.
|
|
||||||
|
|
||||||
Talk to the human and provide meaningful answers when questions are asked.
|
|
||||||
|
|
||||||
Use the tools when you need them, but use your own knowledge if you are confident of the answer. Keep answers short and concise.
|
|
||||||
|
|
||||||
A tool is not usually needed for creative requests, so do your best to answer them without tools.
|
|
||||||
|
|
||||||
Avoid repeating identical answers if it appears before. Only fulfill the human's requests, do not create extra steps beyond what the human has asked for.
|
|
||||||
|
|
||||||
Your input for 'Action' should be the name of tool used only.
|
|
||||||
|
|
||||||
Be honest. If you can't answer something, or a tool is not appropriate, say you don't know or answer to the best of your ability.
|
|
||||||
|
|
||||||
Attempt to fulfill the human's requests in as few actions as possible`;
|
|
||||||
*/
|
|
||||||
|
|
||||||
// module.exports = `You are ChatGPT, a highly knowledgeable and versatile large language model.
|
|
||||||
|
|
||||||
// Engage with the Human conversationally, providing concise and meaningful answers to questions. Utilize built-in tools when necessary, except for creative requests, where relying on your own knowledge is preferred. Aim for variety and avoid repetitive answers.
|
|
||||||
|
|
||||||
// For your 'Action' input, state the name of the tool used only, and honor user requests without adding extra steps. Always be honest; if you cannot provide an appropriate answer or tool, admit that or do your best.
|
|
||||||
|
|
||||||
// Strive to meet the user's needs efficiently with minimal actions.`;
|
|
||||||
|
|
||||||
// import {
|
|
||||||
// BasePromptTemplate,
|
|
||||||
// BaseStringPromptTemplate,
|
|
||||||
// SerializedBasePromptTemplate,
|
|
||||||
// renderTemplate,
|
|
||||||
// } from "langchain/prompts";
|
|
||||||
|
|
||||||
// prefix: `You are ChatGPT, a highly knowledgeable and versatile large language model.
|
|
||||||
// Your objective is to help users by understanding their intent and choosing the best action. Prioritize direct, specific responses. Use concise, varied answers and rely on your knowledge for creative tasks. Utilize tools when needed, and structure results for machine compatibility.
|
|
||||||
// prefix: `Objective: to comprehend human intentions based on user input and available tools. Goal: identify the best action to directly address the human's query. In your subsequent steps, you will utilize the chosen action. You may select multiple actions and list them in a meaningful order. Prioritize actions that directly relate to the user's query over general ones. Ensure that the generated thought is highly specific and explicit to best match the user's expectations. Construct the result in a manner that an online open-API would most likely expect. Provide concise and meaningful answers to human queries. Utilize tools when necessary. Relying on your own knowledge is preferred for creative requests. Aim for variety and avoid repetitive answers.
|
|
||||||
|
|
||||||
// # Available Actions & Tools:
|
|
||||||
// N/A: no suitable action, use your own knowledge.`,
|
|
||||||
// suffix: `Remember, all your responses MUST adhere to the described format and only respond if the format is followed. Output exactly with the requested format, avoiding any other text as this will be parsed by a machine. Following 'Action:', provide only one of the actions listed above. If a tool is not necessary, deduce this quickly and finish your response. Honor the human's requests without adding extra steps. Carry out tasks in the sequence written by the human. Always be honest; if you cannot provide an appropriate answer or tool, do your best with your own knowledge. Strive to meet the user's needs efficiently with minimal actions.`;
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
'gpt3-v1': {
|
'gpt3-v1': {
|
||||||
prefix: `Objective: Understand human intentions using user input and available tools. Goal: Identify the most suitable actions to directly address user queries.
|
prefix: `Objective: Understand human intentions using user input and available tools. Goal: Identify the most suitable actions to directly address user queries.
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ const initializeFunctionsAgent = async ({
|
|||||||
tools,
|
tools,
|
||||||
model,
|
model,
|
||||||
pastMessages,
|
pastMessages,
|
||||||
|
customName,
|
||||||
|
customInstructions,
|
||||||
currentDateString,
|
currentDateString,
|
||||||
...rest
|
...rest
|
||||||
}) => {
|
}) => {
|
||||||
@@ -24,7 +26,13 @@ const initializeFunctionsAgent = async ({
|
|||||||
returnMessages: true,
|
returnMessages: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
const prefix = addToolDescriptions(`Current Date: ${currentDateString}\n${PREFIX}`, tools);
|
let prefix = addToolDescriptions(`Current Date: ${currentDateString}\n${PREFIX}`, tools);
|
||||||
|
if (customName) {
|
||||||
|
prefix = `You are "${customName}".\n${prefix}`;
|
||||||
|
}
|
||||||
|
if (customInstructions) {
|
||||||
|
prefix = `${prefix}\n${customInstructions}`;
|
||||||
|
}
|
||||||
|
|
||||||
return await initializeAgentExecutorWithOptions(tools, model, {
|
return await initializeAgentExecutorWithOptions(tools, model, {
|
||||||
agentType: 'openai-functions',
|
agentType: 'openai-functions',
|
||||||
|
|||||||
@@ -8,8 +8,6 @@ In your response, remember to follow these guidelines:
|
|||||||
- If you don't know the answer, simply say that you don't know.
|
- If you don't know the answer, simply say that you don't know.
|
||||||
- If you are unsure how to answer, ask for clarification.
|
- If you are unsure how to answer, ask for clarification.
|
||||||
- Avoid mentioning that you obtained the information from the context.
|
- Avoid mentioning that you obtained the information from the context.
|
||||||
|
|
||||||
Answer appropriately in the user's language.
|
|
||||||
`;
|
`;
|
||||||
|
|
||||||
function createContextHandlers(req, userMessageContent) {
|
function createContextHandlers(req, userMessageContent) {
|
||||||
@@ -94,37 +92,40 @@ function createContextHandlers(req, userMessageContent) {
|
|||||||
|
|
||||||
const resolvedQueries = await Promise.all(queryPromises);
|
const resolvedQueries = await Promise.all(queryPromises);
|
||||||
|
|
||||||
const context = resolvedQueries
|
const context =
|
||||||
.map((queryResult, index) => {
|
resolvedQueries.length === 0
|
||||||
const file = processedFiles[index];
|
? '\n\tThe semantic search did not return any results.'
|
||||||
let contextItems = queryResult.data;
|
: resolvedQueries
|
||||||
|
.map((queryResult, index) => {
|
||||||
|
const file = processedFiles[index];
|
||||||
|
let contextItems = queryResult.data;
|
||||||
|
|
||||||
const generateContext = (currentContext) =>
|
const generateContext = (currentContext) =>
|
||||||
`
|
`
|
||||||
<file>
|
<file>
|
||||||
<filename>${file.filename}</filename>
|
<filename>${file.filename}</filename>
|
||||||
<context>${currentContext}
|
<context>${currentContext}
|
||||||
</context>
|
</context>
|
||||||
</file>`;
|
</file>`;
|
||||||
|
|
||||||
if (useFullContext) {
|
if (useFullContext) {
|
||||||
return generateContext(`\n${contextItems}`);
|
return generateContext(`\n${contextItems}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
contextItems = queryResult.data
|
contextItems = queryResult.data
|
||||||
.map((item) => {
|
.map((item) => {
|
||||||
const pageContent = item[0].page_content;
|
const pageContent = item[0].page_content;
|
||||||
return `
|
return `
|
||||||
<contextItem>
|
<contextItem>
|
||||||
<![CDATA[${pageContent?.trim()}]]>
|
<![CDATA[${pageContent?.trim()}]]>
|
||||||
</contextItem>`;
|
</contextItem>`;
|
||||||
|
})
|
||||||
|
.join('');
|
||||||
|
|
||||||
|
return generateContext(contextItems);
|
||||||
})
|
})
|
||||||
.join('');
|
.join('');
|
||||||
|
|
||||||
return generateContext(contextItems);
|
|
||||||
})
|
|
||||||
.join('');
|
|
||||||
|
|
||||||
if (useFullContext) {
|
if (useFullContext) {
|
||||||
const prompt = `${header}
|
const prompt = `${header}
|
||||||
${context}
|
${context}
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ ${convo}`,
|
|||||||
};
|
};
|
||||||
|
|
||||||
const titleInstruction =
|
const titleInstruction =
|
||||||
'a concise, 5-word-or-less title for the conversation, using its same language, with no punctuation. Apply title case conventions appropriate for the language. For English, use AP Stylebook Title Case. Never directly mention the language name or the word "title"';
|
'a concise, 5-word-or-less title for the conversation, using its same language, with no punctuation. Apply title case conventions appropriate for the language. Never directly mention the language name or the word "title"';
|
||||||
const titleFunctionPrompt = `In this environment you have access to a set of tools you can use to generate the conversation title.
|
const titleFunctionPrompt = `In this environment you have access to a set of tools you can use to generate the conversation title.
|
||||||
|
|
||||||
You may call them like this:
|
You may call them like this:
|
||||||
@@ -59,25 +59,57 @@ Submit a brief title in the conversation's language, following the parameter des
|
|||||||
</tool_description>
|
</tool_description>
|
||||||
</tools>`;
|
</tools>`;
|
||||||
|
|
||||||
|
const genTranslationPrompt = (
|
||||||
|
translationPrompt,
|
||||||
|
) => `In this environment you have access to a set of tools you can use to translate text.
|
||||||
|
|
||||||
|
You may call them like this:
|
||||||
|
<function_calls>
|
||||||
|
<invoke>
|
||||||
|
<tool_name>$TOOL_NAME</tool_name>
|
||||||
|
<parameters>
|
||||||
|
<$PARAMETER_NAME>$PARAMETER_VALUE</$PARAMETER_NAME>
|
||||||
|
...
|
||||||
|
</parameters>
|
||||||
|
</invoke>
|
||||||
|
</function_calls>
|
||||||
|
|
||||||
|
Here are the tools available:
|
||||||
|
<tools>
|
||||||
|
<tool_description>
|
||||||
|
<tool_name>submit_translation</tool_name>
|
||||||
|
<description>
|
||||||
|
Submit a translation in the target language, following the parameter description and its language closely.
|
||||||
|
</description>
|
||||||
|
<parameters>
|
||||||
|
<parameter>
|
||||||
|
<name>translation</name>
|
||||||
|
<type>string</type>
|
||||||
|
<description>${translationPrompt}
|
||||||
|
ONLY include the generated translation without quotations, nor its related key</description>
|
||||||
|
</parameter>
|
||||||
|
</parameters>
|
||||||
|
</tool_description>
|
||||||
|
</tools>`;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses titles from title functions based on the provided prompt.
|
* Parses specified parameter from the provided prompt.
|
||||||
* @param {string} prompt - The prompt containing the title function.
|
* @param {string} prompt - The prompt containing the desired parameter.
|
||||||
* @returns {string} The parsed title. "New Chat" if no title is found.
|
* @param {string} paramName - The name of the parameter to extract.
|
||||||
|
* @returns {string} The parsed parameter's value or a default value if not found.
|
||||||
*/
|
*/
|
||||||
function parseTitleFromPrompt(prompt) {
|
function parseParamFromPrompt(prompt, paramName) {
|
||||||
const titleRegex = /<title>(.+?)<\/title>/;
|
const paramRegex = new RegExp(`<${paramName}>([\\s\\S]+?)</${paramName}>`);
|
||||||
const titleMatch = prompt.match(titleRegex);
|
const paramMatch = prompt.match(paramRegex);
|
||||||
|
|
||||||
if (titleMatch && titleMatch[1]) {
|
if (paramMatch && paramMatch[1]) {
|
||||||
const title = titleMatch[1].trim();
|
return paramMatch[1].trim();
|
||||||
|
|
||||||
// // Capitalize the first letter of each word; Note: unnecessary due to title case prompting
|
|
||||||
// const capitalizedTitle = title.replace(/\b\w/g, (char) => char.toUpperCase());
|
|
||||||
|
|
||||||
return title;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return 'New Chat';
|
if (prompt && prompt.length) {
|
||||||
|
return `NO TOOL INVOCATION: ${prompt}`;
|
||||||
|
}
|
||||||
|
return `No ${paramName} provided`;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
@@ -85,5 +117,6 @@ module.exports = {
|
|||||||
titleInstruction,
|
titleInstruction,
|
||||||
createTitlePrompt,
|
createTitlePrompt,
|
||||||
titleFunctionPrompt,
|
titleFunctionPrompt,
|
||||||
parseTitleFromPrompt,
|
parseParamFromPrompt,
|
||||||
|
genTranslationPrompt,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -576,7 +576,11 @@ describe('BaseClient', () => {
|
|||||||
const onStart = jest.fn();
|
const onStart = jest.fn();
|
||||||
const opts = { onStart };
|
const opts = { onStart };
|
||||||
await TestClient.sendMessage('Hello, world!', opts);
|
await TestClient.sendMessage('Hello, world!', opts);
|
||||||
expect(onStart).toHaveBeenCalledWith(expect.objectContaining({ text: 'Hello, world!' }));
|
|
||||||
|
expect(onStart).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({ text: 'Hello, world!' }),
|
||||||
|
expect.any(String),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('saveMessageToDatabase is called with the correct arguments', async () => {
|
test('saveMessageToDatabase is called with the correct arguments', async () => {
|
||||||
|
|||||||
@@ -40,7 +40,8 @@ class FakeClient extends BaseClient {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
this.maxContextTokens = getModelMaxTokens(this.modelOptions.model) ?? 4097;
|
this.maxContextTokens =
|
||||||
|
this.options.maxContextTokens ?? getModelMaxTokens(this.modelOptions.model) ?? 4097;
|
||||||
}
|
}
|
||||||
buildMessages() {}
|
buildMessages() {}
|
||||||
getTokenCount(str) {
|
getTokenCount(str) {
|
||||||
|
|||||||
@@ -144,6 +144,7 @@ describe('OpenAIClient', () => {
|
|||||||
|
|
||||||
const defaultOptions = {
|
const defaultOptions = {
|
||||||
// debug: true,
|
// debug: true,
|
||||||
|
req: {},
|
||||||
openaiApiKey: 'new-api-key',
|
openaiApiKey: 'new-api-key',
|
||||||
modelOptions: {
|
modelOptions: {
|
||||||
model,
|
model,
|
||||||
@@ -157,12 +158,19 @@ describe('OpenAIClient', () => {
|
|||||||
azureOpenAIApiVersion: '2020-07-01-preview',
|
azureOpenAIApiVersion: '2020-07-01-preview',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let originalWarn;
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
jest.spyOn(console, 'warn').mockImplementation(() => {});
|
originalWarn = console.warn;
|
||||||
|
console.warn = jest.fn();
|
||||||
});
|
});
|
||||||
|
|
||||||
afterAll(() => {
|
afterAll(() => {
|
||||||
console.warn.mockRestore();
|
console.warn = originalWarn;
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
console.warn.mockClear();
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
@@ -662,4 +670,35 @@ describe('OpenAIClient', () => {
|
|||||||
expect(constructorArgs.baseURL).toBe(expectedURL);
|
expect(constructorArgs.baseURL).toBe(expectedURL);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('checkVisionRequest functionality', () => {
|
||||||
|
let client;
|
||||||
|
const attachments = [{ type: 'image/png' }];
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
client = new OpenAIClient('test-api-key', {
|
||||||
|
endpoint: 'ollama',
|
||||||
|
modelOptions: {
|
||||||
|
model: 'initial-model',
|
||||||
|
},
|
||||||
|
modelsConfig: {
|
||||||
|
ollama: ['initial-model', 'llava', 'other-model'],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
client.defaultVisionModel = 'non-valid-default-model';
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
jest.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set "llava" as the model if it is the first valid model when default validation fails', () => {
|
||||||
|
client.checkVisionRequest(attachments);
|
||||||
|
|
||||||
|
expect(client.modelOptions.model).toBe('llava');
|
||||||
|
expect(client.isVisionModel).toBeTruthy();
|
||||||
|
expect(client.modelOptions.stop).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -194,6 +194,7 @@ describe('PluginsClient', () => {
|
|||||||
expect(client.getFunctionModelName('')).toBe('gpt-3.5-turbo');
|
expect(client.getFunctionModelName('')).toBe('gpt-3.5-turbo');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Azure OpenAI tests specific to Plugins', () => {
|
describe('Azure OpenAI tests specific to Plugins', () => {
|
||||||
// TODO: add more tests for Azure OpenAI integration with Plugins
|
// TODO: add more tests for Azure OpenAI integration with Plugins
|
||||||
// let client;
|
// let client;
|
||||||
@@ -220,4 +221,94 @@ describe('PluginsClient', () => {
|
|||||||
spy.mockRestore();
|
spy.mockRestore();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('sendMessage with filtered tools', () => {
|
||||||
|
let TestAgent;
|
||||||
|
const apiKey = 'fake-api-key';
|
||||||
|
const mockTools = [{ name: 'tool1' }, { name: 'tool2' }, { name: 'tool3' }, { name: 'tool4' }];
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
TestAgent = new PluginsClient(apiKey, {
|
||||||
|
tools: mockTools,
|
||||||
|
modelOptions: {
|
||||||
|
model: 'gpt-3.5-turbo',
|
||||||
|
temperature: 0,
|
||||||
|
max_tokens: 2,
|
||||||
|
},
|
||||||
|
agentOptions: {
|
||||||
|
model: 'gpt-3.5-turbo',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
TestAgent.options.req = {
|
||||||
|
app: {
|
||||||
|
locals: {},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
TestAgent.sendMessage = jest.fn().mockImplementation(async () => {
|
||||||
|
const { filteredTools = [], includedTools = [] } = TestAgent.options.req.app.locals;
|
||||||
|
|
||||||
|
if (includedTools.length > 0) {
|
||||||
|
const tools = TestAgent.options.tools.filter((plugin) =>
|
||||||
|
includedTools.includes(plugin.name),
|
||||||
|
);
|
||||||
|
TestAgent.options.tools = tools;
|
||||||
|
} else {
|
||||||
|
const tools = TestAgent.options.tools.filter(
|
||||||
|
(plugin) => !filteredTools.includes(plugin.name),
|
||||||
|
);
|
||||||
|
TestAgent.options.tools = tools;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
text: 'Mocked response',
|
||||||
|
tools: TestAgent.options.tools,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should filter out tools when filteredTools is provided', async () => {
|
||||||
|
TestAgent.options.req.app.locals.filteredTools = ['tool1', 'tool3'];
|
||||||
|
const response = await TestAgent.sendMessage('Test message');
|
||||||
|
expect(response.tools).toHaveLength(2);
|
||||||
|
expect(response.tools).toEqual(
|
||||||
|
expect.arrayContaining([
|
||||||
|
expect.objectContaining({ name: 'tool2' }),
|
||||||
|
expect.objectContaining({ name: 'tool4' }),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should only include specified tools when includedTools is provided', async () => {
|
||||||
|
TestAgent.options.req.app.locals.includedTools = ['tool2', 'tool4'];
|
||||||
|
const response = await TestAgent.sendMessage('Test message');
|
||||||
|
expect(response.tools).toHaveLength(2);
|
||||||
|
expect(response.tools).toEqual(
|
||||||
|
expect.arrayContaining([
|
||||||
|
expect.objectContaining({ name: 'tool2' }),
|
||||||
|
expect.objectContaining({ name: 'tool4' }),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should prioritize includedTools over filteredTools', async () => {
|
||||||
|
TestAgent.options.req.app.locals.filteredTools = ['tool1', 'tool3'];
|
||||||
|
TestAgent.options.req.app.locals.includedTools = ['tool1', 'tool2'];
|
||||||
|
const response = await TestAgent.sendMessage('Test message');
|
||||||
|
expect(response.tools).toHaveLength(2);
|
||||||
|
expect(response.tools).toEqual(
|
||||||
|
expect.arrayContaining([
|
||||||
|
expect.objectContaining({ name: 'tool1' }),
|
||||||
|
expect.objectContaining({ name: 'tool2' }),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should not modify tools when no filters are provided', async () => {
|
||||||
|
const response = await TestAgent.sendMessage('Test message');
|
||||||
|
expect(response.tools).toHaveLength(4);
|
||||||
|
expect(response.tools).toEqual(expect.arrayContaining(mockTools));
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -80,13 +80,18 @@ class StableDiffusionAPI extends StructuredTool {
|
|||||||
const payload = {
|
const payload = {
|
||||||
prompt,
|
prompt,
|
||||||
negative_prompt,
|
negative_prompt,
|
||||||
sampler_index: 'DPM++ 2M Karras',
|
|
||||||
cfg_scale: 4.5,
|
cfg_scale: 4.5,
|
||||||
steps: 22,
|
steps: 22,
|
||||||
width: 1024,
|
width: 1024,
|
||||||
height: 1024,
|
height: 1024,
|
||||||
};
|
};
|
||||||
const generationResponse = await axios.post(`${url}/sdapi/v1/txt2img`, payload);
|
let generationResponse;
|
||||||
|
try {
|
||||||
|
generationResponse = await axios.post(`${url}/sdapi/v1/txt2img`, payload);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[StableDiffusion] Error while generating image:', error);
|
||||||
|
return 'Error making API request.';
|
||||||
|
}
|
||||||
const image = generationResponse.data.images[0];
|
const image = generationResponse.data.images[0];
|
||||||
|
|
||||||
/** @type {{ height: number, width: number, seed: number, infotexts: string[] }} */
|
/** @type {{ height: number, width: number, seed: number, infotexts: string[] }} */
|
||||||
|
|||||||
17
api/cache/getLogStores.js
vendored
17
api/cache/getLogStores.js
vendored
@@ -7,6 +7,7 @@ const keyvMongo = require('./keyvMongo');
|
|||||||
|
|
||||||
const { BAN_DURATION, USE_REDIS } = process.env ?? {};
|
const { BAN_DURATION, USE_REDIS } = process.env ?? {};
|
||||||
const THIRTY_MINUTES = 1800000;
|
const THIRTY_MINUTES = 1800000;
|
||||||
|
const TEN_MINUTES = 600000;
|
||||||
|
|
||||||
const duration = math(BAN_DURATION, 7200000);
|
const duration = math(BAN_DURATION, 7200000);
|
||||||
|
|
||||||
@@ -24,6 +25,14 @@ const config = isEnabled(USE_REDIS)
|
|||||||
? new Keyv({ store: keyvRedis })
|
? new Keyv({ store: keyvRedis })
|
||||||
: new Keyv({ namespace: CacheKeys.CONFIG_STORE });
|
: new Keyv({ namespace: CacheKeys.CONFIG_STORE });
|
||||||
|
|
||||||
|
const roles = isEnabled(USE_REDIS)
|
||||||
|
? new Keyv({ store: keyvRedis })
|
||||||
|
: new Keyv({ namespace: CacheKeys.ROLES });
|
||||||
|
|
||||||
|
const audioRuns = isEnabled(USE_REDIS) // ttl: 30 minutes
|
||||||
|
? new Keyv({ store: keyvRedis, ttl: TEN_MINUTES })
|
||||||
|
: new Keyv({ namespace: CacheKeys.AUDIO_RUNS, ttl: TEN_MINUTES });
|
||||||
|
|
||||||
const tokenConfig = isEnabled(USE_REDIS) // ttl: 30 minutes
|
const tokenConfig = isEnabled(USE_REDIS) // ttl: 30 minutes
|
||||||
? new Keyv({ store: keyvRedis, ttl: THIRTY_MINUTES })
|
? new Keyv({ store: keyvRedis, ttl: THIRTY_MINUTES })
|
||||||
: new Keyv({ namespace: CacheKeys.TOKEN_CONFIG, ttl: THIRTY_MINUTES });
|
: new Keyv({ namespace: CacheKeys.TOKEN_CONFIG, ttl: THIRTY_MINUTES });
|
||||||
@@ -41,6 +50,7 @@ const abortKeys = isEnabled(USE_REDIS)
|
|||||||
: new Keyv({ namespace: CacheKeys.ABORT_KEYS, ttl: 600000 });
|
: new Keyv({ namespace: CacheKeys.ABORT_KEYS, ttl: 600000 });
|
||||||
|
|
||||||
const namespaces = {
|
const namespaces = {
|
||||||
|
[CacheKeys.ROLES]: roles,
|
||||||
[CacheKeys.CONFIG_STORE]: config,
|
[CacheKeys.CONFIG_STORE]: config,
|
||||||
pending_req,
|
pending_req,
|
||||||
[ViolationTypes.BAN]: new Keyv({ store: keyvMongo, namespace: CacheKeys.BANS, ttl: duration }),
|
[ViolationTypes.BAN]: new Keyv({ store: keyvMongo, namespace: CacheKeys.BANS, ttl: duration }),
|
||||||
@@ -55,7 +65,13 @@ const namespaces = {
|
|||||||
message_limit: createViolationInstance('message_limit'),
|
message_limit: createViolationInstance('message_limit'),
|
||||||
token_balance: createViolationInstance(ViolationTypes.TOKEN_BALANCE),
|
token_balance: createViolationInstance(ViolationTypes.TOKEN_BALANCE),
|
||||||
registrations: createViolationInstance('registrations'),
|
registrations: createViolationInstance('registrations'),
|
||||||
|
[ViolationTypes.TTS_LIMIT]: createViolationInstance(ViolationTypes.TTS_LIMIT),
|
||||||
|
[ViolationTypes.STT_LIMIT]: createViolationInstance(ViolationTypes.STT_LIMIT),
|
||||||
[ViolationTypes.FILE_UPLOAD_LIMIT]: createViolationInstance(ViolationTypes.FILE_UPLOAD_LIMIT),
|
[ViolationTypes.FILE_UPLOAD_LIMIT]: createViolationInstance(ViolationTypes.FILE_UPLOAD_LIMIT),
|
||||||
|
[ViolationTypes.VERIFY_EMAIL_LIMIT]: createViolationInstance(ViolationTypes.VERIFY_EMAIL_LIMIT),
|
||||||
|
[ViolationTypes.RESET_PASSWORD_LIMIT]: createViolationInstance(
|
||||||
|
ViolationTypes.RESET_PASSWORD_LIMIT,
|
||||||
|
),
|
||||||
[ViolationTypes.ILLEGAL_MODEL_REQUEST]: createViolationInstance(
|
[ViolationTypes.ILLEGAL_MODEL_REQUEST]: createViolationInstance(
|
||||||
ViolationTypes.ILLEGAL_MODEL_REQUEST,
|
ViolationTypes.ILLEGAL_MODEL_REQUEST,
|
||||||
),
|
),
|
||||||
@@ -64,6 +80,7 @@ const namespaces = {
|
|||||||
[CacheKeys.TOKEN_CONFIG]: tokenConfig,
|
[CacheKeys.TOKEN_CONFIG]: tokenConfig,
|
||||||
[CacheKeys.GEN_TITLE]: genTitle,
|
[CacheKeys.GEN_TITLE]: genTitle,
|
||||||
[CacheKeys.MODEL_QUERIES]: modelQueries,
|
[CacheKeys.MODEL_QUERIES]: modelQueries,
|
||||||
|
[CacheKeys.AUDIO_RUNS]: audioRuns,
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
2
api/cache/logViolation.js
vendored
2
api/cache/logViolation.js
vendored
@@ -1,6 +1,6 @@
|
|||||||
|
const { isEnabled } = require('~/server/utils');
|
||||||
const getLogStores = require('./getLogStores');
|
const getLogStores = require('./getLogStores');
|
||||||
const banViolation = require('./banViolation');
|
const banViolation = require('./banViolation');
|
||||||
const { isEnabled } = require('../server/utils');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Logs the violation.
|
* Logs the violation.
|
||||||
|
|||||||
@@ -27,26 +27,25 @@ function getMatchingSensitivePatterns(valueStr) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Redacts sensitive information from a console message.
|
* Redacts sensitive information from a console message and trims it to a specified length if provided.
|
||||||
*
|
|
||||||
* @param {string} str - The console message to be redacted.
|
* @param {string} str - The console message to be redacted.
|
||||||
* @returns {string} - The redacted console message.
|
* @param {number} [trimLength] - The optional length at which to trim the redacted message.
|
||||||
|
* @returns {string} - The redacted and optionally trimmed console message.
|
||||||
*/
|
*/
|
||||||
function redactMessage(str) {
|
function redactMessage(str, trimLength) {
|
||||||
if (!str) {
|
if (!str) {
|
||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
|
|
||||||
const patterns = getMatchingSensitivePatterns(str);
|
const patterns = getMatchingSensitivePatterns(str);
|
||||||
|
|
||||||
if (patterns.length === 0) {
|
|
||||||
return str;
|
|
||||||
}
|
|
||||||
|
|
||||||
patterns.forEach((pattern) => {
|
patterns.forEach((pattern) => {
|
||||||
str = str.replace(pattern, '$1[REDACTED]');
|
str = str.replace(pattern, '$1[REDACTED]');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (trimLength !== undefined && str.length > trimLength) {
|
||||||
|
return `${str.substring(0, trimLength)}...`;
|
||||||
|
}
|
||||||
|
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,28 @@
|
|||||||
const { MeiliSearch } = require('meilisearch');
|
const { MeiliSearch } = require('meilisearch');
|
||||||
const Message = require('~/models/schema/messageSchema');
|
|
||||||
const Conversation = require('~/models/schema/convoSchema');
|
const Conversation = require('~/models/schema/convoSchema');
|
||||||
|
const Message = require('~/models/schema/messageSchema');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const searchEnabled = process.env?.SEARCH?.toLowerCase() === 'true';
|
const searchEnabled = process.env?.SEARCH?.toLowerCase() === 'true';
|
||||||
let currentTimeout = null;
|
let currentTimeout = null;
|
||||||
|
|
||||||
|
class MeiliSearchClient {
|
||||||
|
static instance = null;
|
||||||
|
|
||||||
|
static getInstance() {
|
||||||
|
if (!MeiliSearchClient.instance) {
|
||||||
|
if (!process.env.MEILI_HOST || !process.env.MEILI_MASTER_KEY) {
|
||||||
|
throw new Error('Meilisearch configuration is missing.');
|
||||||
|
}
|
||||||
|
MeiliSearchClient.instance = new MeiliSearch({
|
||||||
|
host: process.env.MEILI_HOST,
|
||||||
|
apiKey: process.env.MEILI_MASTER_KEY,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return MeiliSearchClient.instance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
async function indexSync(req, res, next) {
|
async function indexSync(req, res, next) {
|
||||||
if (!searchEnabled) {
|
if (!searchEnabled) {
|
||||||
@@ -13,20 +30,10 @@ async function indexSync(req, res, next) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (!process.env.MEILI_HOST || !process.env.MEILI_MASTER_KEY || !searchEnabled) {
|
const client = MeiliSearchClient.getInstance();
|
||||||
throw new Error('Meilisearch not configured, search will be disabled.');
|
|
||||||
}
|
|
||||||
|
|
||||||
const client = new MeiliSearch({
|
|
||||||
host: process.env.MEILI_HOST,
|
|
||||||
apiKey: process.env.MEILI_MASTER_KEY,
|
|
||||||
});
|
|
||||||
|
|
||||||
const { status } = await client.health();
|
const { status } = await client.health();
|
||||||
// logger.debug(`[indexSync] Meilisearch: ${status}`);
|
if (status !== 'available' || !process.env.SEARCH) {
|
||||||
const result = status === 'available' && !!process.env.SEARCH;
|
|
||||||
|
|
||||||
if (!result) {
|
|
||||||
throw new Error('Meilisearch not available');
|
throw new Error('Meilisearch not available');
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -37,12 +44,8 @@ async function indexSync(req, res, next) {
|
|||||||
const messagesIndexed = messages.numberOfDocuments;
|
const messagesIndexed = messages.numberOfDocuments;
|
||||||
const convosIndexed = convos.numberOfDocuments;
|
const convosIndexed = convos.numberOfDocuments;
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(`[indexSync] There are ${messageCount} messages and ${messagesIndexed} indexed`);
|
||||||
`[indexSync] There are ${messageCount} messages in the database, ${messagesIndexed} indexed`,
|
logger.debug(`[indexSync] There are ${convoCount} convos and ${convosIndexed} indexed`);
|
||||||
);
|
|
||||||
logger.debug(
|
|
||||||
`[indexSync] There are ${convoCount} convos in the database, ${convosIndexed} indexed`,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (messageCount !== messagesIndexed) {
|
if (messageCount !== messagesIndexed) {
|
||||||
logger.debug('[indexSync] Messages out of sync, indexing');
|
logger.debug('[indexSync] Messages out of sync, indexing');
|
||||||
@@ -54,7 +57,6 @@ async function indexSync(req, res, next) {
|
|||||||
Conversation.syncWithMeili();
|
Conversation.syncWithMeili();
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// logger.debug('[indexSync] in index sync');
|
|
||||||
if (err.message.includes('not found')) {
|
if (err.message.includes('not found')) {
|
||||||
logger.debug('[indexSync] Creating indices...');
|
logger.debug('[indexSync] Creating indices...');
|
||||||
currentTimeout = setTimeout(async () => {
|
currentTimeout = setTimeout(async () => {
|
||||||
|
|||||||
@@ -62,8 +62,24 @@ const deleteAction = async (searchParams, session = null) => {
|
|||||||
return await Action.findOneAndDelete(searchParams, options).lean();
|
return await Action.findOneAndDelete(searchParams, options).lean();
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
/**
|
||||||
updateAction,
|
* Deletes actions by params, within a transaction session if provided.
|
||||||
getActions,
|
*
|
||||||
deleteAction,
|
* @param {Object} searchParams - The search parameters to find the actions to delete.
|
||||||
|
* @param {string} searchParams.action_id - The ID of the action(s) to delete.
|
||||||
|
* @param {string} searchParams.user - The user ID of the action's author.
|
||||||
|
* @param {mongoose.ClientSession} [session] - The transaction session to use (optional).
|
||||||
|
* @returns {Promise<Number>} A promise that resolves to the number of deleted action documents.
|
||||||
|
*/
|
||||||
|
const deleteActions = async (searchParams, session = null) => {
|
||||||
|
const options = session ? { session } : {};
|
||||||
|
const result = await Action.deleteMany(searchParams, options);
|
||||||
|
return result.deletedCount;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getActions,
|
||||||
|
updateAction,
|
||||||
|
deleteAction,
|
||||||
|
deleteActions,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ const Assistant = mongoose.model('assistant', assistantSchema);
|
|||||||
* @param {mongoose.ClientSession} [session] - The transaction session to use (optional).
|
* @param {mongoose.ClientSession} [session] - The transaction session to use (optional).
|
||||||
* @returns {Promise<Object>} The updated or newly created assistant document as a plain object.
|
* @returns {Promise<Object>} The updated or newly created assistant document as a plain object.
|
||||||
*/
|
*/
|
||||||
const updateAssistant = async (searchParams, updateData, session = null) => {
|
const updateAssistantDoc = async (searchParams, updateData, session = null) => {
|
||||||
const options = { new: true, upsert: true, session };
|
const options = { new: true, upsert: true, session };
|
||||||
return await Assistant.findOneAndUpdate(searchParams, updateData, options).lean();
|
return await Assistant.findOneAndUpdate(searchParams, updateData, options).lean();
|
||||||
};
|
};
|
||||||
@@ -39,8 +39,21 @@ const getAssistants = async (searchParams) => {
|
|||||||
return await Assistant.find(searchParams).lean();
|
return await Assistant.find(searchParams).lean();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes an assistant based on the provided ID.
|
||||||
|
*
|
||||||
|
* @param {Object} searchParams - The search parameters to find the assistant to delete.
|
||||||
|
* @param {string} searchParams.assistant_id - The ID of the assistant to delete.
|
||||||
|
* @param {string} searchParams.user - The user ID of the assistant's author.
|
||||||
|
* @returns {Promise<void>} Resolves when the assistant has been successfully deleted.
|
||||||
|
*/
|
||||||
|
const deleteAssistant = async (searchParams) => {
|
||||||
|
return await Assistant.findOneAndDelete(searchParams);
|
||||||
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
updateAssistant,
|
updateAssistantDoc,
|
||||||
|
deleteAssistant,
|
||||||
getAssistants,
|
getAssistants,
|
||||||
getAssistant,
|
getAssistant,
|
||||||
};
|
};
|
||||||
|
|||||||
61
api/models/Categories.js
Normal file
61
api/models/Categories.js
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
const { logger } = require('~/config');
|
||||||
|
// const { Categories } = require('./schema/categories');
|
||||||
|
const options = [
|
||||||
|
{
|
||||||
|
label: '',
|
||||||
|
value: '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'idea',
|
||||||
|
value: 'idea',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'travel',
|
||||||
|
value: 'travel',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'teach_or_explain',
|
||||||
|
value: 'teach_or_explain',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'write',
|
||||||
|
value: 'write',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'shop',
|
||||||
|
value: 'shop',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'code',
|
||||||
|
value: 'code',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'misc',
|
||||||
|
value: 'misc',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'roleplay',
|
||||||
|
value: 'roleplay',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'finance',
|
||||||
|
value: 'finance',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
/**
|
||||||
|
* Retrieves the categories asynchronously.
|
||||||
|
* @returns {Promise<TGetCategoriesResponse>} An array of category objects.
|
||||||
|
* @throws {Error} If there is an error retrieving the categories.
|
||||||
|
*/
|
||||||
|
getCategories: async () => {
|
||||||
|
try {
|
||||||
|
// const categories = await Categories.find();
|
||||||
|
return options;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error getting categories', error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
@@ -2,6 +2,12 @@ const Conversation = require('./schema/convoSchema');
|
|||||||
const { getMessages, deleteMessages } = require('./Message');
|
const { getMessages, deleteMessages } = require('./Message');
|
||||||
const logger = require('~/config/winston');
|
const logger = require('~/config/winston');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves a single conversation for a given user and conversation ID.
|
||||||
|
* @param {string} user - The user's ID.
|
||||||
|
* @param {string} conversationId - The conversation's ID.
|
||||||
|
* @returns {Promise<TConversation>} The conversation object.
|
||||||
|
*/
|
||||||
const getConvo = async (user, conversationId) => {
|
const getConvo = async (user, conversationId) => {
|
||||||
try {
|
try {
|
||||||
return await Conversation.findOne({ user, conversationId }).lean();
|
return await Conversation.findOne({ user, conversationId }).lean();
|
||||||
@@ -15,26 +21,52 @@ module.exports = {
|
|||||||
Conversation,
|
Conversation,
|
||||||
saveConvo: async (user, { conversationId, newConversationId, ...convo }) => {
|
saveConvo: async (user, { conversationId, newConversationId, ...convo }) => {
|
||||||
try {
|
try {
|
||||||
const messages = await getMessages({ conversationId });
|
const messages = await getMessages({ conversationId }, '_id');
|
||||||
const update = { ...convo, messages, user };
|
const update = { ...convo, messages, user };
|
||||||
if (newConversationId) {
|
if (newConversationId) {
|
||||||
update.conversationId = newConversationId;
|
update.conversationId = newConversationId;
|
||||||
}
|
}
|
||||||
|
|
||||||
return await Conversation.findOneAndUpdate({ conversationId: conversationId, user }, update, {
|
const conversation = await Conversation.findOneAndUpdate({ conversationId, user }, update, {
|
||||||
new: true,
|
new: true,
|
||||||
upsert: true,
|
upsert: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return conversation.toObject();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[saveConvo] Error saving conversation', error);
|
logger.error('[saveConvo] Error saving conversation', error);
|
||||||
return { message: 'Error saving conversation' };
|
return { message: 'Error saving conversation' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
getConvosByPage: async (user, pageNumber = 1, pageSize = 25) => {
|
bulkSaveConvos: async (conversations) => {
|
||||||
try {
|
try {
|
||||||
const totalConvos = (await Conversation.countDocuments({ user })) || 1;
|
const bulkOps = conversations.map((convo) => ({
|
||||||
|
updateOne: {
|
||||||
|
filter: { conversationId: convo.conversationId, user: convo.user },
|
||||||
|
update: convo,
|
||||||
|
upsert: true,
|
||||||
|
timestamps: false,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
const result = await Conversation.bulkWrite(bulkOps);
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[saveBulkConversations] Error saving conversations in bulk', error);
|
||||||
|
throw new Error('Failed to save conversations in bulk.');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
getConvosByPage: async (user, pageNumber = 1, pageSize = 25, isArchived = false) => {
|
||||||
|
const query = { user };
|
||||||
|
if (isArchived) {
|
||||||
|
query.isArchived = true;
|
||||||
|
} else {
|
||||||
|
query.$or = [{ isArchived: false }, { isArchived: { $exists: false } }];
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const totalConvos = (await Conversation.countDocuments(query)) || 1;
|
||||||
const totalPages = Math.ceil(totalConvos / pageSize);
|
const totalPages = Math.ceil(totalConvos / pageSize);
|
||||||
const convos = await Conversation.find({ user })
|
const convos = await Conversation.find(query)
|
||||||
.sort({ updatedAt: -1 })
|
.sort({ updatedAt: -1 })
|
||||||
.skip((pageNumber - 1) * pageSize)
|
.skip((pageNumber - 1) * pageSize)
|
||||||
.limit(pageSize)
|
.limit(pageSize)
|
||||||
|
|||||||
@@ -97,8 +97,12 @@ const deleteFileByFilter = async (filter) => {
|
|||||||
* @param {Array<string>} file_ids - The unique identifiers of the files to delete.
|
* @param {Array<string>} file_ids - The unique identifiers of the files to delete.
|
||||||
* @returns {Promise<Object>} A promise that resolves to the result of the deletion operation.
|
* @returns {Promise<Object>} A promise that resolves to the result of the deletion operation.
|
||||||
*/
|
*/
|
||||||
const deleteFiles = async (file_ids) => {
|
const deleteFiles = async (file_ids, user) => {
|
||||||
return await File.deleteMany({ file_id: { $in: file_ids } });
|
let deleteQuery = { file_id: { $in: file_ids } };
|
||||||
|
if (user) {
|
||||||
|
deleteQuery = { user: user };
|
||||||
|
}
|
||||||
|
return await File.deleteMany(deleteQuery);
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ module.exports = {
|
|||||||
async saveMessage({
|
async saveMessage({
|
||||||
user,
|
user,
|
||||||
endpoint,
|
endpoint,
|
||||||
|
iconURL,
|
||||||
messageId,
|
messageId,
|
||||||
newMessageId,
|
newMessageId,
|
||||||
conversationId,
|
conversationId,
|
||||||
@@ -35,6 +36,7 @@ module.exports = {
|
|||||||
|
|
||||||
const update = {
|
const update = {
|
||||||
user,
|
user,
|
||||||
|
iconURL,
|
||||||
endpoint,
|
endpoint,
|
||||||
messageId: newMessageId || messageId,
|
messageId: newMessageId || messageId,
|
||||||
conversationId,
|
conversationId,
|
||||||
@@ -55,23 +57,37 @@ module.exports = {
|
|||||||
if (files) {
|
if (files) {
|
||||||
update.files = files;
|
update.files = files;
|
||||||
}
|
}
|
||||||
// may also need to update the conversation here
|
|
||||||
await Message.findOneAndUpdate({ messageId }, update, { upsert: true, new: true });
|
|
||||||
|
|
||||||
return {
|
const message = await Message.findOneAndUpdate({ messageId }, update, {
|
||||||
messageId,
|
upsert: true,
|
||||||
conversationId,
|
new: true,
|
||||||
parentMessageId,
|
});
|
||||||
sender,
|
|
||||||
text,
|
return message.toObject();
|
||||||
isCreatedByUser,
|
|
||||||
tokenCount,
|
|
||||||
};
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('Error saving message:', err);
|
logger.error('Error saving message:', err);
|
||||||
throw new Error('Failed to save message.');
|
throw new Error('Failed to save message.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
async bulkSaveMessages(messages) {
|
||||||
|
try {
|
||||||
|
const bulkOps = messages.map((message) => ({
|
||||||
|
updateOne: {
|
||||||
|
filter: { messageId: message.messageId },
|
||||||
|
update: message,
|
||||||
|
upsert: true,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
const result = await Message.bulkWrite(bulkOps);
|
||||||
|
return result;
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('Error saving messages in bulk:', err);
|
||||||
|
throw new Error('Failed to save messages in bulk.');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Records a message in the database.
|
* Records a message in the database.
|
||||||
*
|
*
|
||||||
@@ -108,6 +124,14 @@ module.exports = {
|
|||||||
throw new Error('Failed to save message.');
|
throw new Error('Failed to save message.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
async updateMessageText({ messageId, text }) {
|
||||||
|
try {
|
||||||
|
await Message.updateOne({ messageId }, { text });
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('Error updating message text:', err);
|
||||||
|
throw new Error('Failed to update message text.');
|
||||||
|
}
|
||||||
|
},
|
||||||
async updateMessage(message) {
|
async updateMessage(message) {
|
||||||
try {
|
try {
|
||||||
const { messageId, ...update } = message;
|
const { messageId, ...update } = message;
|
||||||
@@ -150,8 +174,18 @@ module.exports = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
async getMessages(filter) {
|
/**
|
||||||
|
* Retrieves messages from the database.
|
||||||
|
* @param {Record<string, unknown>} filter
|
||||||
|
* @param {string | undefined} [select]
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
async getMessages(filter, select) {
|
||||||
try {
|
try {
|
||||||
|
if (select) {
|
||||||
|
return await Message.find(filter).select(select).sort({ createdAt: 1 }).lean();
|
||||||
|
}
|
||||||
|
|
||||||
return await Message.find(filter).sort({ createdAt: 1 }).lean();
|
return await Message.find(filter).sort({ createdAt: 1 }).lean();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('Error getting messages:', err);
|
logger.error('Error getting messages:', err);
|
||||||
|
|||||||
@@ -39,6 +39,12 @@ module.exports = {
|
|||||||
try {
|
try {
|
||||||
const setter = { $set: {} };
|
const setter = { $set: {} };
|
||||||
const update = { presetId, ...preset };
|
const update = { presetId, ...preset };
|
||||||
|
if (preset.tools && Array.isArray(preset.tools)) {
|
||||||
|
update.tools =
|
||||||
|
preset.tools
|
||||||
|
.map((tool) => tool?.pluginKey ?? tool)
|
||||||
|
.filter((toolName) => typeof toolName === 'string') ?? [];
|
||||||
|
}
|
||||||
if (newPresetId) {
|
if (newPresetId) {
|
||||||
update.presetId = newPresetId;
|
update.presetId = newPresetId;
|
||||||
}
|
}
|
||||||
|
|||||||
90
api/models/Project.js
Normal file
90
api/models/Project.js
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
const { model } = require('mongoose');
|
||||||
|
const projectSchema = require('~/models/schema/projectSchema');
|
||||||
|
|
||||||
|
const Project = model('Project', projectSchema);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a project by ID and convert the found project document to a plain object.
|
||||||
|
*
|
||||||
|
* @param {string} projectId - The ID of the project to find and return as a plain object.
|
||||||
|
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||||
|
* @returns {Promise<MongoProject>} A plain object representing the project document, or `null` if no project is found.
|
||||||
|
*/
|
||||||
|
const getProjectById = async function (projectId, fieldsToSelect = null) {
|
||||||
|
const query = Project.findById(projectId);
|
||||||
|
|
||||||
|
if (fieldsToSelect) {
|
||||||
|
query.select(fieldsToSelect);
|
||||||
|
}
|
||||||
|
|
||||||
|
return await query.lean();
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a project by name and convert the found project document to a plain object.
|
||||||
|
* If the project with the given name doesn't exist and the name is "instance", create it and return the lean version.
|
||||||
|
*
|
||||||
|
* @param {string} projectName - The name of the project to find or create.
|
||||||
|
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||||
|
* @returns {Promise<MongoProject>} A plain object representing the project document.
|
||||||
|
*/
|
||||||
|
const getProjectByName = async function (projectName, fieldsToSelect = null) {
|
||||||
|
const query = { name: projectName };
|
||||||
|
const update = { $setOnInsert: { name: projectName } };
|
||||||
|
const options = {
|
||||||
|
new: true,
|
||||||
|
upsert: projectName === 'instance',
|
||||||
|
lean: true,
|
||||||
|
select: fieldsToSelect,
|
||||||
|
};
|
||||||
|
|
||||||
|
return await Project.findOneAndUpdate(query, update, options);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add an array of prompt group IDs to a project's promptGroupIds array, ensuring uniqueness.
|
||||||
|
*
|
||||||
|
* @param {string} projectId - The ID of the project to update.
|
||||||
|
* @param {string[]} promptGroupIds - The array of prompt group IDs to add to the project.
|
||||||
|
* @returns {Promise<MongoProject>} The updated project document.
|
||||||
|
*/
|
||||||
|
const addGroupIdsToProject = async function (projectId, promptGroupIds) {
|
||||||
|
return await Project.findByIdAndUpdate(
|
||||||
|
projectId,
|
||||||
|
{ $addToSet: { promptGroupIds: { $each: promptGroupIds } } },
|
||||||
|
{ new: true },
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove an array of prompt group IDs from a project's promptGroupIds array.
|
||||||
|
*
|
||||||
|
* @param {string} projectId - The ID of the project to update.
|
||||||
|
* @param {string[]} promptGroupIds - The array of prompt group IDs to remove from the project.
|
||||||
|
* @returns {Promise<MongoProject>} The updated project document.
|
||||||
|
*/
|
||||||
|
const removeGroupIdsFromProject = async function (projectId, promptGroupIds) {
|
||||||
|
return await Project.findByIdAndUpdate(
|
||||||
|
projectId,
|
||||||
|
{ $pull: { promptGroupIds: { $in: promptGroupIds } } },
|
||||||
|
{ new: true },
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a prompt group ID from all projects.
|
||||||
|
*
|
||||||
|
* @param {string} promptGroupId - The ID of the prompt group to remove from projects.
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
const removeGroupFromAllProjects = async (promptGroupId) => {
|
||||||
|
await Project.updateMany({}, { $pull: { promptGroupIds: promptGroupId } });
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getProjectById,
|
||||||
|
getProjectByName,
|
||||||
|
addGroupIdsToProject,
|
||||||
|
removeGroupIdsFromProject,
|
||||||
|
removeGroupFromAllProjects,
|
||||||
|
};
|
||||||
@@ -1,52 +1,528 @@
|
|||||||
const mongoose = require('mongoose');
|
const { ObjectId } = require('mongodb');
|
||||||
|
const { SystemRoles, SystemCategories } = require('librechat-data-provider');
|
||||||
|
const {
|
||||||
|
getProjectByName,
|
||||||
|
addGroupIdsToProject,
|
||||||
|
removeGroupIdsFromProject,
|
||||||
|
removeGroupFromAllProjects,
|
||||||
|
} = require('./Project');
|
||||||
|
const { Prompt, PromptGroup } = require('./schema/promptSchema');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const promptSchema = mongoose.Schema(
|
/**
|
||||||
{
|
* Create a pipeline for the aggregation to get prompt groups
|
||||||
title: {
|
* @param {Object} query
|
||||||
type: String,
|
* @param {number} skip
|
||||||
required: true,
|
* @param {number} limit
|
||||||
|
* @returns {[Object]} - The pipeline for the aggregation
|
||||||
|
*/
|
||||||
|
const createGroupPipeline = (query, skip, limit) => {
|
||||||
|
return [
|
||||||
|
{ $match: query },
|
||||||
|
{ $sort: { createdAt: -1 } },
|
||||||
|
{ $skip: skip },
|
||||||
|
{ $limit: limit },
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'prompts',
|
||||||
|
localField: 'productionId',
|
||||||
|
foreignField: '_id',
|
||||||
|
as: 'productionPrompt',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
prompt: {
|
{ $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
|
||||||
type: String,
|
{
|
||||||
required: true,
|
$project: {
|
||||||
|
name: 1,
|
||||||
|
numberOfGenerations: 1,
|
||||||
|
oneliner: 1,
|
||||||
|
category: 1,
|
||||||
|
projectIds: 1,
|
||||||
|
productionId: 1,
|
||||||
|
author: 1,
|
||||||
|
authorName: 1,
|
||||||
|
createdAt: 1,
|
||||||
|
updatedAt: 1,
|
||||||
|
'productionPrompt.prompt': 1,
|
||||||
|
// 'productionPrompt._id': 1,
|
||||||
|
// 'productionPrompt.type': 1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
category: {
|
];
|
||||||
type: String,
|
};
|
||||||
},
|
|
||||||
},
|
|
||||||
{ timestamps: true },
|
|
||||||
);
|
|
||||||
|
|
||||||
const Prompt = mongoose.models.Prompt || mongoose.model('Prompt', promptSchema);
|
/**
|
||||||
|
* Create a pipeline for the aggregation to get all prompt groups
|
||||||
|
* @param {Object} query
|
||||||
|
* @param {Partial<MongoPromptGroup>} $project
|
||||||
|
* @returns {[Object]} - The pipeline for the aggregation
|
||||||
|
*/
|
||||||
|
const createAllGroupsPipeline = (
|
||||||
|
query,
|
||||||
|
$project = {
|
||||||
|
name: 1,
|
||||||
|
oneliner: 1,
|
||||||
|
category: 1,
|
||||||
|
author: 1,
|
||||||
|
authorName: 1,
|
||||||
|
createdAt: 1,
|
||||||
|
updatedAt: 1,
|
||||||
|
command: 1,
|
||||||
|
'productionPrompt.prompt': 1,
|
||||||
|
},
|
||||||
|
) => {
|
||||||
|
return [
|
||||||
|
{ $match: query },
|
||||||
|
{ $sort: { createdAt: -1 } },
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'prompts',
|
||||||
|
localField: 'productionId',
|
||||||
|
foreignField: '_id',
|
||||||
|
as: 'productionPrompt',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{ $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
|
||||||
|
{
|
||||||
|
$project,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all prompt groups with filters
|
||||||
|
* @param {Object} req
|
||||||
|
* @param {TPromptGroupsWithFilterRequest} filter
|
||||||
|
* @returns {Promise<PromptGroupListResponse>}
|
||||||
|
*/
|
||||||
|
const getAllPromptGroups = async (req, filter) => {
|
||||||
|
try {
|
||||||
|
const { name, ...query } = filter;
|
||||||
|
|
||||||
|
if (!query.author) {
|
||||||
|
throw new Error('Author is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
let searchShared = true;
|
||||||
|
let searchSharedOnly = false;
|
||||||
|
if (name) {
|
||||||
|
query.name = new RegExp(name, 'i');
|
||||||
|
}
|
||||||
|
if (!query.category) {
|
||||||
|
delete query.category;
|
||||||
|
} else if (query.category === SystemCategories.MY_PROMPTS) {
|
||||||
|
searchShared = false;
|
||||||
|
delete query.category;
|
||||||
|
} else if (query.category === SystemCategories.NO_CATEGORY) {
|
||||||
|
query.category = '';
|
||||||
|
} else if (query.category === SystemCategories.SHARED_PROMPTS) {
|
||||||
|
searchSharedOnly = true;
|
||||||
|
delete query.category;
|
||||||
|
}
|
||||||
|
|
||||||
|
let combinedQuery = query;
|
||||||
|
|
||||||
|
if (searchShared) {
|
||||||
|
const project = await getProjectByName('instance', 'promptGroupIds');
|
||||||
|
if (project && project.promptGroupIds.length > 0) {
|
||||||
|
const projectQuery = { _id: { $in: project.promptGroupIds }, ...query };
|
||||||
|
delete projectQuery.author;
|
||||||
|
combinedQuery = searchSharedOnly ? projectQuery : { $or: [projectQuery, query] };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const promptGroupsPipeline = createAllGroupsPipeline(combinedQuery);
|
||||||
|
return await PromptGroup.aggregate(promptGroupsPipeline).exec();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error getting all prompt groups', error);
|
||||||
|
return { message: 'Error getting all prompt groups' };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get prompt groups with filters
|
||||||
|
* @param {Object} req
|
||||||
|
* @param {TPromptGroupsWithFilterRequest} filter
|
||||||
|
* @returns {Promise<PromptGroupListResponse>}
|
||||||
|
*/
|
||||||
|
const getPromptGroups = async (req, filter) => {
|
||||||
|
try {
|
||||||
|
const { pageNumber = 1, pageSize = 10, name, ...query } = filter;
|
||||||
|
|
||||||
|
const validatedPageNumber = Math.max(parseInt(pageNumber, 10), 1);
|
||||||
|
const validatedPageSize = Math.max(parseInt(pageSize, 10), 1);
|
||||||
|
|
||||||
|
if (!query.author) {
|
||||||
|
throw new Error('Author is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
let searchShared = true;
|
||||||
|
let searchSharedOnly = false;
|
||||||
|
if (name) {
|
||||||
|
query.name = new RegExp(name, 'i');
|
||||||
|
}
|
||||||
|
if (!query.category) {
|
||||||
|
delete query.category;
|
||||||
|
} else if (query.category === SystemCategories.MY_PROMPTS) {
|
||||||
|
searchShared = false;
|
||||||
|
delete query.category;
|
||||||
|
} else if (query.category === SystemCategories.NO_CATEGORY) {
|
||||||
|
query.category = '';
|
||||||
|
} else if (query.category === SystemCategories.SHARED_PROMPTS) {
|
||||||
|
searchSharedOnly = true;
|
||||||
|
delete query.category;
|
||||||
|
}
|
||||||
|
|
||||||
|
let combinedQuery = query;
|
||||||
|
|
||||||
|
if (searchShared) {
|
||||||
|
// const projects = req.user.projects || []; // TODO: handle multiple projects
|
||||||
|
const project = await getProjectByName('instance', 'promptGroupIds');
|
||||||
|
if (project && project.promptGroupIds.length > 0) {
|
||||||
|
const projectQuery = { _id: { $in: project.promptGroupIds }, ...query };
|
||||||
|
delete projectQuery.author;
|
||||||
|
combinedQuery = searchSharedOnly ? projectQuery : { $or: [projectQuery, query] };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (validatedPageNumber - 1) * validatedPageSize;
|
||||||
|
const limit = validatedPageSize;
|
||||||
|
|
||||||
|
const promptGroupsPipeline = createGroupPipeline(combinedQuery, skip, limit);
|
||||||
|
const totalPromptGroupsPipeline = [{ $match: combinedQuery }, { $count: 'total' }];
|
||||||
|
|
||||||
|
const [promptGroupsResults, totalPromptGroupsResults] = await Promise.all([
|
||||||
|
PromptGroup.aggregate(promptGroupsPipeline).exec(),
|
||||||
|
PromptGroup.aggregate(totalPromptGroupsPipeline).exec(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const promptGroups = promptGroupsResults;
|
||||||
|
const totalPromptGroups =
|
||||||
|
totalPromptGroupsResults.length > 0 ? totalPromptGroupsResults[0].total : 0;
|
||||||
|
|
||||||
|
return {
|
||||||
|
promptGroups,
|
||||||
|
pageNumber: validatedPageNumber.toString(),
|
||||||
|
pageSize: validatedPageSize.toString(),
|
||||||
|
pages: Math.ceil(totalPromptGroups / validatedPageSize).toString(),
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error getting prompt groups', error);
|
||||||
|
return { message: 'Error getting prompt groups' };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
savePrompt: async ({ title, prompt }) => {
|
getPromptGroups,
|
||||||
|
getAllPromptGroups,
|
||||||
|
/**
|
||||||
|
* Create a prompt and its respective group
|
||||||
|
* @param {TCreatePromptRecord} saveData
|
||||||
|
* @returns {Promise<TCreatePromptResponse>}
|
||||||
|
*/
|
||||||
|
createPromptGroup: async (saveData) => {
|
||||||
try {
|
try {
|
||||||
await Prompt.create({
|
const { prompt, group, author, authorName } = saveData;
|
||||||
title,
|
|
||||||
prompt,
|
let newPromptGroup = await PromptGroup.findOneAndUpdate(
|
||||||
});
|
{ ...group, author, authorName, productionId: null },
|
||||||
return { title, prompt };
|
{ $setOnInsert: { ...group, author, authorName, productionId: null } },
|
||||||
|
{ new: true, upsert: true },
|
||||||
|
)
|
||||||
|
.lean()
|
||||||
|
.select('-__v')
|
||||||
|
.exec();
|
||||||
|
|
||||||
|
const newPrompt = await Prompt.findOneAndUpdate(
|
||||||
|
{ ...prompt, author, groupId: newPromptGroup._id },
|
||||||
|
{ $setOnInsert: { ...prompt, author, groupId: newPromptGroup._id } },
|
||||||
|
{ new: true, upsert: true },
|
||||||
|
)
|
||||||
|
.lean()
|
||||||
|
.select('-__v')
|
||||||
|
.exec();
|
||||||
|
|
||||||
|
newPromptGroup = await PromptGroup.findByIdAndUpdate(
|
||||||
|
newPromptGroup._id,
|
||||||
|
{ productionId: newPrompt._id },
|
||||||
|
{ new: true },
|
||||||
|
)
|
||||||
|
.lean()
|
||||||
|
.select('-__v')
|
||||||
|
.exec();
|
||||||
|
|
||||||
|
return {
|
||||||
|
prompt: newPrompt,
|
||||||
|
group: {
|
||||||
|
...newPromptGroup,
|
||||||
|
productionPrompt: { prompt: newPrompt.prompt },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error saving prompt group', error);
|
||||||
|
throw new Error('Error saving prompt group');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* Save a prompt
|
||||||
|
* @param {TCreatePromptRecord} saveData
|
||||||
|
* @returns {Promise<TCreatePromptResponse>}
|
||||||
|
*/
|
||||||
|
savePrompt: async (saveData) => {
|
||||||
|
try {
|
||||||
|
const { prompt, author } = saveData;
|
||||||
|
const newPromptData = {
|
||||||
|
...prompt,
|
||||||
|
author,
|
||||||
|
};
|
||||||
|
|
||||||
|
/** @type {TPrompt} */
|
||||||
|
let newPrompt;
|
||||||
|
try {
|
||||||
|
newPrompt = await Prompt.create(newPromptData);
|
||||||
|
} catch (error) {
|
||||||
|
if (error?.message?.includes('groupId_1_version_1')) {
|
||||||
|
await Prompt.db.collection('prompts').dropIndex('groupId_1_version_1');
|
||||||
|
} else {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
newPrompt = await Prompt.create(newPromptData);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { prompt: newPrompt };
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error saving prompt', error);
|
logger.error('Error saving prompt', error);
|
||||||
return { prompt: 'Error saving prompt' };
|
return { message: 'Error saving prompt' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
getPrompts: async (filter) => {
|
getPrompts: async (filter) => {
|
||||||
try {
|
try {
|
||||||
return await Prompt.find(filter).lean();
|
return await Prompt.find(filter).sort({ createdAt: -1 }).lean();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error getting prompts', error);
|
logger.error('Error getting prompts', error);
|
||||||
return { prompt: 'Error getting prompts' };
|
return { message: 'Error getting prompts' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
deletePrompts: async (filter) => {
|
getPrompt: async (filter) => {
|
||||||
try {
|
try {
|
||||||
return await Prompt.deleteMany(filter);
|
if (filter.groupId) {
|
||||||
|
filter.groupId = new ObjectId(filter.groupId);
|
||||||
|
}
|
||||||
|
return await Prompt.findOne(filter).lean();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error deleting prompts', error);
|
logger.error('Error getting prompt', error);
|
||||||
return { prompt: 'Error deleting prompts' };
|
return { message: 'Error getting prompt' };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* Get prompt groups with filters
|
||||||
|
* @param {TGetRandomPromptsRequest} filter
|
||||||
|
* @returns {Promise<TGetRandomPromptsResponse>}
|
||||||
|
*/
|
||||||
|
getRandomPromptGroups: async (filter) => {
|
||||||
|
try {
|
||||||
|
const result = await PromptGroup.aggregate([
|
||||||
|
{
|
||||||
|
$match: {
|
||||||
|
category: { $ne: '' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$group: {
|
||||||
|
_id: '$category',
|
||||||
|
promptGroup: { $first: '$$ROOT' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$replaceRoot: { newRoot: '$promptGroup' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$sample: { size: +filter.limit + +filter.skip },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$skip: +filter.skip,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$limit: +filter.limit,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
return { prompts: result };
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error getting prompt groups', error);
|
||||||
|
return { message: 'Error getting prompt groups' };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
getPromptGroupsWithPrompts: async (filter) => {
|
||||||
|
try {
|
||||||
|
return await PromptGroup.findOne(filter)
|
||||||
|
.populate({
|
||||||
|
path: 'prompts',
|
||||||
|
select: '-_id -__v -user',
|
||||||
|
})
|
||||||
|
.select('-_id -__v -user')
|
||||||
|
.lean();
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error getting prompt groups', error);
|
||||||
|
return { message: 'Error getting prompt groups' };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
getPromptGroup: async (filter) => {
|
||||||
|
try {
|
||||||
|
return await PromptGroup.findOne(filter).lean();
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error getting prompt group', error);
|
||||||
|
return { message: 'Error getting prompt group' };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* Deletes a prompt and its corresponding prompt group if it is the last prompt in the group.
|
||||||
|
*
|
||||||
|
* @param {Object} options - The options for deleting the prompt.
|
||||||
|
* @param {ObjectId|string} options.promptId - The ID of the prompt to delete.
|
||||||
|
* @param {ObjectId|string} options.groupId - The ID of the prompt's group.
|
||||||
|
* @param {ObjectId|string} options.author - The ID of the prompt's author.
|
||||||
|
* @param {string} options.role - The role of the prompt's author.
|
||||||
|
* @return {Promise<TDeletePromptResponse>} An object containing the result of the deletion.
|
||||||
|
* If the prompt was deleted successfully, the object will have a property 'prompt' with the value 'Prompt deleted successfully'.
|
||||||
|
* If the prompt group was deleted successfully, the object will have a property 'promptGroup' with the message 'Prompt group deleted successfully' and id of the deleted group.
|
||||||
|
* If there was an error deleting the prompt, the object will have a property 'message' with the value 'Error deleting prompt'.
|
||||||
|
*/
|
||||||
|
deletePrompt: async ({ promptId, groupId, author, role }) => {
|
||||||
|
const query = { _id: promptId, groupId, author };
|
||||||
|
if (role === SystemRoles.ADMIN) {
|
||||||
|
delete query.author;
|
||||||
|
}
|
||||||
|
const { deletedCount } = await Prompt.deleteOne(query);
|
||||||
|
if (deletedCount === 0) {
|
||||||
|
throw new Error('Failed to delete the prompt');
|
||||||
|
}
|
||||||
|
|
||||||
|
const remainingPrompts = await Prompt.find({ groupId })
|
||||||
|
.select('_id')
|
||||||
|
.sort({ createdAt: 1 })
|
||||||
|
.lean();
|
||||||
|
|
||||||
|
if (remainingPrompts.length === 0) {
|
||||||
|
await PromptGroup.deleteOne({ _id: groupId });
|
||||||
|
await removeGroupFromAllProjects(groupId);
|
||||||
|
|
||||||
|
return {
|
||||||
|
prompt: 'Prompt deleted successfully',
|
||||||
|
promptGroup: {
|
||||||
|
message: 'Prompt group deleted successfully',
|
||||||
|
id: groupId,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
const promptGroup = await PromptGroup.findById(groupId).lean();
|
||||||
|
if (promptGroup.productionId.toString() === promptId.toString()) {
|
||||||
|
await PromptGroup.updateOne(
|
||||||
|
{ _id: groupId },
|
||||||
|
{ productionId: remainingPrompts[remainingPrompts.length - 1]._id },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { prompt: 'Prompt deleted successfully' };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* Update prompt group
|
||||||
|
* @param {Partial<MongoPromptGroup>} filter - Filter to find prompt group
|
||||||
|
* @param {Partial<MongoPromptGroup>} data - Data to update
|
||||||
|
* @returns {Promise<TUpdatePromptGroupResponse>}
|
||||||
|
*/
|
||||||
|
updatePromptGroup: async (filter, data) => {
|
||||||
|
try {
|
||||||
|
const updateOps = {};
|
||||||
|
if (data.removeProjectIds) {
|
||||||
|
for (const projectId of data.removeProjectIds) {
|
||||||
|
await removeGroupIdsFromProject(projectId, [filter._id]);
|
||||||
|
}
|
||||||
|
|
||||||
|
updateOps.$pull = { projectIds: { $in: data.removeProjectIds } };
|
||||||
|
delete data.removeProjectIds;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.projectIds) {
|
||||||
|
for (const projectId of data.projectIds) {
|
||||||
|
await addGroupIdsToProject(projectId, [filter._id]);
|
||||||
|
}
|
||||||
|
|
||||||
|
updateOps.$addToSet = { projectIds: { $each: data.projectIds } };
|
||||||
|
delete data.projectIds;
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateData = { ...data, ...updateOps };
|
||||||
|
const updatedDoc = await PromptGroup.findOneAndUpdate(filter, updateData, {
|
||||||
|
new: true,
|
||||||
|
upsert: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!updatedDoc) {
|
||||||
|
throw new Error('Prompt group not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
return updatedDoc;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error updating prompt group', error);
|
||||||
|
return { message: 'Error updating prompt group' };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* Function to make a prompt production based on its ID.
|
||||||
|
* @param {String} promptId - The ID of the prompt to make production.
|
||||||
|
* @returns {Object} The result of the production operation.
|
||||||
|
*/
|
||||||
|
makePromptProduction: async (promptId) => {
|
||||||
|
try {
|
||||||
|
const prompt = await Prompt.findById(promptId).lean();
|
||||||
|
|
||||||
|
if (!prompt) {
|
||||||
|
throw new Error('Prompt not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
await PromptGroup.findByIdAndUpdate(
|
||||||
|
prompt.groupId,
|
||||||
|
{ productionId: prompt._id },
|
||||||
|
{ new: true },
|
||||||
|
)
|
||||||
|
.lean()
|
||||||
|
.exec();
|
||||||
|
|
||||||
|
return {
|
||||||
|
message: 'Prompt production made successfully',
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error making prompt production', error);
|
||||||
|
return { message: 'Error making prompt production' };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
updatePromptLabels: async (_id, labels) => {
|
||||||
|
try {
|
||||||
|
const response = await Prompt.updateOne({ _id }, { $set: { labels } });
|
||||||
|
if (response.matchedCount === 0) {
|
||||||
|
return { message: 'Prompt not found' };
|
||||||
|
}
|
||||||
|
return { message: 'Prompt labels updated successfully' };
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error updating prompt labels', error);
|
||||||
|
return { message: 'Error updating prompt labels' };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
deletePromptGroup: async (_id) => {
|
||||||
|
try {
|
||||||
|
const response = await PromptGroup.deleteOne({ _id });
|
||||||
|
|
||||||
|
if (response.deletedCount === 0) {
|
||||||
|
return { promptGroup: 'Prompt group not found' };
|
||||||
|
}
|
||||||
|
|
||||||
|
await Prompt.deleteMany({ groupId: new ObjectId(_id) });
|
||||||
|
await removeGroupFromAllProjects(_id);
|
||||||
|
return { promptGroup: 'Prompt group deleted successfully' };
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error deleting prompt group', error);
|
||||||
|
return { message: 'Error deleting prompt group' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|||||||
86
api/models/Role.js
Normal file
86
api/models/Role.js
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
const { SystemRoles, CacheKeys, roleDefaults } = require('librechat-data-provider');
|
||||||
|
const getLogStores = require('~/cache/getLogStores');
|
||||||
|
const Role = require('~/models/schema/roleSchema');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a role by name and convert the found role document to a plain object.
|
||||||
|
* If the role with the given name doesn't exist and the name is a system defined role, create it and return the lean version.
|
||||||
|
*
|
||||||
|
* @param {string} roleName - The name of the role to find or create.
|
||||||
|
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||||
|
* @returns {Promise<Object>} A plain object representing the role document.
|
||||||
|
*/
|
||||||
|
const getRoleByName = async function (roleName, fieldsToSelect = null) {
|
||||||
|
try {
|
||||||
|
const cache = getLogStores(CacheKeys.ROLES);
|
||||||
|
const cachedRole = await cache.get(roleName);
|
||||||
|
if (cachedRole) {
|
||||||
|
return cachedRole;
|
||||||
|
}
|
||||||
|
let query = Role.findOne({ name: roleName });
|
||||||
|
if (fieldsToSelect) {
|
||||||
|
query = query.select(fieldsToSelect);
|
||||||
|
}
|
||||||
|
let role = await query.lean().exec();
|
||||||
|
|
||||||
|
if (!role && SystemRoles[roleName]) {
|
||||||
|
role = roleDefaults[roleName];
|
||||||
|
role = await new Role(role).save();
|
||||||
|
await cache.set(roleName, role);
|
||||||
|
return role.toObject();
|
||||||
|
}
|
||||||
|
await cache.set(roleName, role);
|
||||||
|
return role;
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to retrieve or create role: ${error.message}`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update role values by name.
|
||||||
|
*
|
||||||
|
* @param {string} roleName - The name of the role to update.
|
||||||
|
* @param {Partial<TRole>} updates - The fields to update.
|
||||||
|
* @returns {Promise<TRole>} Updated role document.
|
||||||
|
*/
|
||||||
|
const updateRoleByName = async function (roleName, updates) {
|
||||||
|
try {
|
||||||
|
const cache = getLogStores(CacheKeys.ROLES);
|
||||||
|
const role = await Role.findOneAndUpdate(
|
||||||
|
{ name: roleName },
|
||||||
|
{ $set: updates },
|
||||||
|
{ new: true, lean: true },
|
||||||
|
)
|
||||||
|
.select('-__v')
|
||||||
|
.lean()
|
||||||
|
.exec();
|
||||||
|
await cache.set(roleName, role);
|
||||||
|
return role;
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to update role: ${error.message}`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize default roles in the system.
|
||||||
|
* Creates the default roles (ADMIN, USER) if they don't exist in the database.
|
||||||
|
*
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
const initializeRoles = async function () {
|
||||||
|
const defaultRoles = [SystemRoles.ADMIN, SystemRoles.USER];
|
||||||
|
|
||||||
|
for (const roleName of defaultRoles) {
|
||||||
|
let role = await Role.findOne({ name: roleName }).select('name').lean();
|
||||||
|
if (!role) {
|
||||||
|
role = new Role(roleDefaults[roleName]);
|
||||||
|
await role.save();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getRoleByName,
|
||||||
|
initializeRoles,
|
||||||
|
updateRoleByName,
|
||||||
|
};
|
||||||
117
api/models/Share.js
Normal file
117
api/models/Share.js
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
const crypto = require('crypto');
|
||||||
|
const { getMessages } = require('./Message');
|
||||||
|
const SharedLink = require('./schema/shareSchema');
|
||||||
|
const logger = require('~/config/winston');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
SharedLink,
|
||||||
|
getSharedMessages: async (shareId) => {
|
||||||
|
try {
|
||||||
|
const share = await SharedLink.findOne({ shareId })
|
||||||
|
.populate({
|
||||||
|
path: 'messages',
|
||||||
|
select: '-_id -__v -user',
|
||||||
|
})
|
||||||
|
.select('-_id -__v -user')
|
||||||
|
.lean();
|
||||||
|
|
||||||
|
if (!share || !share.conversationId || !share.isPublic) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return share;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[getShare] Error getting share link', error);
|
||||||
|
throw new Error('Error getting share link');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
getSharedLinks: async (user, pageNumber = 1, pageSize = 25, isPublic = true) => {
|
||||||
|
const query = { user, isPublic };
|
||||||
|
try {
|
||||||
|
const totalConvos = (await SharedLink.countDocuments(query)) || 1;
|
||||||
|
const totalPages = Math.ceil(totalConvos / pageSize);
|
||||||
|
const shares = await SharedLink.find(query)
|
||||||
|
.sort({ updatedAt: -1 })
|
||||||
|
.skip((pageNumber - 1) * pageSize)
|
||||||
|
.limit(pageSize)
|
||||||
|
.select('-_id -__v -user')
|
||||||
|
.lean();
|
||||||
|
|
||||||
|
return { sharedLinks: shares, pages: totalPages, pageNumber, pageSize };
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[getShareByPage] Error getting shares', error);
|
||||||
|
throw new Error('Error getting shares');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
createSharedLink: async (user, { conversationId, ...shareData }) => {
|
||||||
|
try {
|
||||||
|
const share = await SharedLink.findOne({ conversationId }).select('-_id -__v -user').lean();
|
||||||
|
if (share) {
|
||||||
|
return share;
|
||||||
|
}
|
||||||
|
|
||||||
|
const shareId = crypto.randomUUID();
|
||||||
|
const messages = await getMessages({ conversationId });
|
||||||
|
const update = { ...shareData, shareId, messages, user };
|
||||||
|
return await SharedLink.findOneAndUpdate({ conversationId: conversationId, user }, update, {
|
||||||
|
new: true,
|
||||||
|
upsert: true,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[createSharedLink] Error creating shared link', error);
|
||||||
|
throw new Error('Error creating shared link');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
updateSharedLink: async (user, { conversationId, ...shareData }) => {
|
||||||
|
try {
|
||||||
|
const share = await SharedLink.findOne({ conversationId }).select('-_id -__v -user').lean();
|
||||||
|
if (!share) {
|
||||||
|
return { message: 'Share not found' };
|
||||||
|
}
|
||||||
|
|
||||||
|
// update messages to the latest
|
||||||
|
const messages = await getMessages({ conversationId });
|
||||||
|
const update = { ...shareData, messages, user };
|
||||||
|
return await SharedLink.findOneAndUpdate({ conversationId: conversationId, user }, update, {
|
||||||
|
new: true,
|
||||||
|
upsert: false,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[updateSharedLink] Error updating shared link', error);
|
||||||
|
throw new Error('Error updating shared link');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteSharedLink: async (user, { shareId }) => {
|
||||||
|
try {
|
||||||
|
const share = await SharedLink.findOne({ shareId, user });
|
||||||
|
if (!share) {
|
||||||
|
return { message: 'Share not found' };
|
||||||
|
}
|
||||||
|
return await SharedLink.findOneAndDelete({ shareId, user });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[deleteSharedLink] Error deleting shared link', error);
|
||||||
|
throw new Error('Error deleting shared link');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* Deletes all shared links for a specific user.
|
||||||
|
* @param {string} user - The user ID.
|
||||||
|
* @returns {Promise<{ message: string, deletedCount?: number }>} A result object indicating success or error message.
|
||||||
|
*/
|
||||||
|
deleteAllSharedLinks: async (user) => {
|
||||||
|
try {
|
||||||
|
const result = await SharedLink.deleteMany({ user });
|
||||||
|
return {
|
||||||
|
message: 'All shared links have been deleted successfully',
|
||||||
|
deletedCount: result.deletedCount,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[deleteAllSharedLinks] Error deleting shared links', error);
|
||||||
|
throw new Error('Error deleting shared links');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
@@ -1,61 +1,5 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const bcrypt = require('bcryptjs');
|
const userSchema = require('~/models/schema/userSchema');
|
||||||
const signPayload = require('../server/services/signPayload');
|
|
||||||
const userSchema = require('./schema/userSchema.js');
|
|
||||||
const { SESSION_EXPIRY } = process.env ?? {};
|
|
||||||
const expires = eval(SESSION_EXPIRY) ?? 1000 * 60 * 15;
|
|
||||||
|
|
||||||
userSchema.methods.toJSON = function () {
|
|
||||||
return {
|
|
||||||
id: this._id,
|
|
||||||
provider: this.provider,
|
|
||||||
email: this.email,
|
|
||||||
name: this.name,
|
|
||||||
username: this.username,
|
|
||||||
avatar: this.avatar,
|
|
||||||
role: this.role,
|
|
||||||
emailVerified: this.emailVerified,
|
|
||||||
plugins: this.plugins,
|
|
||||||
createdAt: this.createdAt,
|
|
||||||
updatedAt: this.updatedAt,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
userSchema.methods.generateToken = async function () {
|
|
||||||
return await signPayload({
|
|
||||||
payload: {
|
|
||||||
id: this._id,
|
|
||||||
username: this.username,
|
|
||||||
provider: this.provider,
|
|
||||||
email: this.email,
|
|
||||||
},
|
|
||||||
secret: process.env.JWT_SECRET,
|
|
||||||
expirationTime: expires / 1000,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
userSchema.methods.comparePassword = function (candidatePassword, callback) {
|
|
||||||
bcrypt.compare(candidatePassword, this.password, (err, isMatch) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
callback(null, isMatch);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.hashPassword = async (password) => {
|
|
||||||
const hashedPassword = await new Promise((resolve, reject) => {
|
|
||||||
bcrypt.hash(password, 10, function (err, hash) {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
} else {
|
|
||||||
resolve(hash);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
return hashedPassword;
|
|
||||||
};
|
|
||||||
|
|
||||||
const User = mongoose.model('User', userSchema);
|
const User = mongoose.model('User', userSchema);
|
||||||
|
|
||||||
|
|||||||
@@ -6,9 +6,18 @@ const {
|
|||||||
deleteMessagesSince,
|
deleteMessagesSince,
|
||||||
deleteMessages,
|
deleteMessages,
|
||||||
} = require('./Message');
|
} = require('./Message');
|
||||||
|
const {
|
||||||
|
comparePassword,
|
||||||
|
deleteUserById,
|
||||||
|
generateToken,
|
||||||
|
getUserById,
|
||||||
|
updateUser,
|
||||||
|
createUser,
|
||||||
|
countUsers,
|
||||||
|
findUser,
|
||||||
|
} = require('./userMethods');
|
||||||
const { getConvoTitle, getConvo, saveConvo, deleteConvos } = require('./Conversation');
|
const { getConvoTitle, getConvo, saveConvo, deleteConvos } = require('./Conversation');
|
||||||
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
|
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
|
||||||
const { hashPassword, getUser, updateUser } = require('./userMethods');
|
|
||||||
const {
|
const {
|
||||||
findFileById,
|
findFileById,
|
||||||
createFile,
|
createFile,
|
||||||
@@ -29,9 +38,14 @@ module.exports = {
|
|||||||
Session,
|
Session,
|
||||||
Balance,
|
Balance,
|
||||||
|
|
||||||
hashPassword,
|
comparePassword,
|
||||||
|
deleteUserById,
|
||||||
|
generateToken,
|
||||||
|
getUserById,
|
||||||
|
countUsers,
|
||||||
|
createUser,
|
||||||
updateUser,
|
updateUser,
|
||||||
getUser,
|
findUser,
|
||||||
|
|
||||||
getMessages,
|
getMessages,
|
||||||
saveMessage,
|
saveMessage,
|
||||||
|
|||||||
@@ -155,7 +155,7 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
|||||||
function (results, value, key) {
|
function (results, value, key) {
|
||||||
return { ...results, [key]: 1 };
|
return { ...results, [key]: 1 };
|
||||||
},
|
},
|
||||||
{ _id: 1 },
|
{ _id: 1, __v: 1 },
|
||||||
),
|
),
|
||||||
).lean();
|
).lean();
|
||||||
|
|
||||||
@@ -348,7 +348,7 @@ module.exports = function mongoMeili(schema, options) {
|
|||||||
try {
|
try {
|
||||||
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
|
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(
|
logger.debug(
|
||||||
'[MeiliMongooseModel.findOneAndUpdate] Convo not found in MeiliSearch and will index ' +
|
'[MeiliMongooseModel.findOneAndUpdate] Convo not found in MeiliSearch and will index ' +
|
||||||
doc.conversationId,
|
doc.conversationId,
|
||||||
error,
|
error,
|
||||||
|
|||||||
19
api/models/schema/categories.js
Normal file
19
api/models/schema/categories.js
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
const mongoose = require('mongoose');
|
||||||
|
const Schema = mongoose.Schema;
|
||||||
|
|
||||||
|
const categoriesSchema = new Schema({
|
||||||
|
label: {
|
||||||
|
type: String,
|
||||||
|
required: true,
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
value: {
|
||||||
|
type: String,
|
||||||
|
required: true,
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const categories = mongoose.model('categories', categoriesSchema);
|
||||||
|
|
||||||
|
module.exports = { Categories: categories };
|
||||||
@@ -88,6 +88,28 @@ const conversationPreset = {
|
|||||||
instructions: {
|
instructions: {
|
||||||
type: String,
|
type: String,
|
||||||
},
|
},
|
||||||
|
stop: { type: [{ type: String }], default: undefined },
|
||||||
|
isArchived: {
|
||||||
|
type: Boolean,
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
|
/* UI Components */
|
||||||
|
iconURL: {
|
||||||
|
type: String,
|
||||||
|
},
|
||||||
|
greeting: {
|
||||||
|
type: String,
|
||||||
|
},
|
||||||
|
spec: {
|
||||||
|
type: String,
|
||||||
|
},
|
||||||
|
tools: { type: [{ type: String }], default: undefined },
|
||||||
|
maxContextTokens: {
|
||||||
|
type: Number,
|
||||||
|
},
|
||||||
|
max_tokens: {
|
||||||
|
type: Number,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const agentOptions = {
|
const agentOptions = {
|
||||||
|
|||||||
@@ -3,9 +3,9 @@ const mongoose = require('mongoose');
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef {Object} MongoFile
|
* @typedef {Object} MongoFile
|
||||||
* @property {mongoose.Schema.Types.ObjectId} [_id] - MongoDB Document ID
|
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||||
* @property {number} [__v] - MongoDB Version Key
|
* @property {number} [__v] - MongoDB Version Key
|
||||||
* @property {mongoose.Schema.Types.ObjectId} user - User ID
|
* @property {ObjectId} user - User ID
|
||||||
* @property {string} [conversationId] - Optional conversation ID
|
* @property {string} [conversationId] - Optional conversation ID
|
||||||
* @property {string} file_id - File identifier
|
* @property {string} file_id - File identifier
|
||||||
* @property {string} [temp_file_id] - Temporary File identifier
|
* @property {string} [temp_file_id] - Temporary File identifier
|
||||||
@@ -14,17 +14,19 @@ const mongoose = require('mongoose');
|
|||||||
* @property {string} filepath - Location of the file
|
* @property {string} filepath - Location of the file
|
||||||
* @property {'file'} object - Type of object, always 'file'
|
* @property {'file'} object - Type of object, always 'file'
|
||||||
* @property {string} type - Type of file
|
* @property {string} type - Type of file
|
||||||
* @property {number} usage - Number of uses of the file
|
* @property {number} [usage=0] - Number of uses of the file
|
||||||
* @property {string} [context] - Context of the file origin
|
* @property {string} [context] - Context of the file origin
|
||||||
* @property {boolean} [embedded] - Whether or not the file is embedded in vector db
|
* @property {boolean} [embedded=false] - Whether or not the file is embedded in vector db
|
||||||
* @property {string} [model] - The model to identify the group region of the file (for Azure OpenAI hosting)
|
* @property {string} [model] - The model to identify the group region of the file (for Azure OpenAI hosting)
|
||||||
* @property {string} [source] - The source of the file
|
* @property {string} [source] - The source of the file (e.g., from FileSources)
|
||||||
* @property {number} [width] - Optional width of the file
|
* @property {number} [width] - Optional width of the file
|
||||||
* @property {number} [height] - Optional height of the file
|
* @property {number} [height] - Optional height of the file
|
||||||
* @property {Date} [expiresAt] - Optional height of the file
|
* @property {Date} [expiresAt] - Optional expiration date of the file
|
||||||
* @property {Date} [createdAt] - Date when the file was created
|
* @property {Date} [createdAt] - Date when the file was created
|
||||||
* @property {Date} [updatedAt] - Date when the file was updated
|
* @property {Date} [updatedAt] - Date when the file was updated
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/** @type {MongooseSchema<MongoFile>} */
|
||||||
const fileSchema = mongoose.Schema(
|
const fileSchema = mongoose.Schema(
|
||||||
{
|
{
|
||||||
user: {
|
user: {
|
||||||
@@ -91,7 +93,7 @@ const fileSchema = mongoose.Schema(
|
|||||||
height: Number,
|
height: Number,
|
||||||
expiresAt: {
|
expiresAt: {
|
||||||
type: Date,
|
type: Date,
|
||||||
expires: 3600,
|
expires: 3600, // 1 hour in seconds
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ const messageSchema = mongoose.Schema(
|
|||||||
},
|
},
|
||||||
conversationId: {
|
conversationId: {
|
||||||
type: String,
|
type: String,
|
||||||
|
index: true,
|
||||||
required: true,
|
required: true,
|
||||||
meiliIndex: true,
|
meiliIndex: true,
|
||||||
},
|
},
|
||||||
@@ -110,6 +111,10 @@ const messageSchema = mongoose.Schema(
|
|||||||
thread_id: {
|
thread_id: {
|
||||||
type: String,
|
type: String,
|
||||||
},
|
},
|
||||||
|
/* frontend components */
|
||||||
|
iconURL: {
|
||||||
|
type: String,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{ timestamps: true },
|
{ timestamps: true },
|
||||||
);
|
);
|
||||||
|
|||||||
30
api/models/schema/projectSchema.js
Normal file
30
api/models/schema/projectSchema.js
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
const { Schema } = require('mongoose');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {Object} MongoProject
|
||||||
|
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||||
|
* @property {string} name - The name of the project
|
||||||
|
* @property {ObjectId[]} promptGroupIds - Array of PromptGroup IDs associated with the project
|
||||||
|
* @property {Date} [createdAt] - Date when the project was created (added by timestamps)
|
||||||
|
* @property {Date} [updatedAt] - Date when the project was last updated (added by timestamps)
|
||||||
|
*/
|
||||||
|
|
||||||
|
const projectSchema = new Schema(
|
||||||
|
{
|
||||||
|
name: {
|
||||||
|
type: String,
|
||||||
|
required: true,
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
promptGroupIds: {
|
||||||
|
type: [Schema.Types.ObjectId],
|
||||||
|
ref: 'PromptGroup',
|
||||||
|
default: [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
timestamps: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
module.exports = projectSchema;
|
||||||
118
api/models/schema/promptSchema.js
Normal file
118
api/models/schema/promptSchema.js
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
const mongoose = require('mongoose');
|
||||||
|
const { Constants } = require('librechat-data-provider');
|
||||||
|
const Schema = mongoose.Schema;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {Object} MongoPromptGroup
|
||||||
|
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||||
|
* @property {string} name - The name of the prompt group
|
||||||
|
* @property {ObjectId} author - The author of the prompt group
|
||||||
|
* @property {ObjectId} [projectId=null] - The project ID of the prompt group
|
||||||
|
* @property {ObjectId} [productionId=null] - The project ID of the prompt group
|
||||||
|
* @property {string} authorName - The name of the author of the prompt group
|
||||||
|
* @property {number} [numberOfGenerations=0] - Number of generations the prompt group has
|
||||||
|
* @property {string} [oneliner=''] - Oneliner description of the prompt group
|
||||||
|
* @property {string} [category=''] - Category of the prompt group
|
||||||
|
* @property {string} [command] - Command for the prompt group
|
||||||
|
* @property {Date} [createdAt] - Date when the prompt group was created (added by timestamps)
|
||||||
|
* @property {Date} [updatedAt] - Date when the prompt group was last updated (added by timestamps)
|
||||||
|
*/
|
||||||
|
|
||||||
|
const promptGroupSchema = new Schema(
|
||||||
|
{
|
||||||
|
name: {
|
||||||
|
type: String,
|
||||||
|
required: true,
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
numberOfGenerations: {
|
||||||
|
type: Number,
|
||||||
|
default: 0,
|
||||||
|
},
|
||||||
|
oneliner: {
|
||||||
|
type: String,
|
||||||
|
default: '',
|
||||||
|
},
|
||||||
|
category: {
|
||||||
|
type: String,
|
||||||
|
default: '',
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
projectIds: {
|
||||||
|
type: [Schema.Types.ObjectId],
|
||||||
|
ref: 'Project',
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
productionId: {
|
||||||
|
type: Schema.Types.ObjectId,
|
||||||
|
ref: 'Prompt',
|
||||||
|
required: true,
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
author: {
|
||||||
|
type: Schema.Types.ObjectId,
|
||||||
|
ref: 'User',
|
||||||
|
required: true,
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
authorName: {
|
||||||
|
type: String,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
command: {
|
||||||
|
type: String,
|
||||||
|
index: true,
|
||||||
|
validate: {
|
||||||
|
validator: function (v) {
|
||||||
|
return v === undefined || v === null || v === '' || /^[a-z0-9-]+$/.test(v);
|
||||||
|
},
|
||||||
|
message: (props) =>
|
||||||
|
`${props.value} is not a valid command. Only lowercase alphanumeric characters and highfins (') are allowed.`,
|
||||||
|
},
|
||||||
|
maxlength: [
|
||||||
|
Constants.COMMANDS_MAX_LENGTH,
|
||||||
|
`Command cannot be longer than ${Constants.COMMANDS_MAX_LENGTH} characters`,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
timestamps: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
|
||||||
|
|
||||||
|
const promptSchema = new Schema(
|
||||||
|
{
|
||||||
|
groupId: {
|
||||||
|
type: Schema.Types.ObjectId,
|
||||||
|
ref: 'PromptGroup',
|
||||||
|
required: true,
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
author: {
|
||||||
|
type: Schema.Types.ObjectId,
|
||||||
|
ref: 'User',
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
prompt: {
|
||||||
|
type: String,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
type: {
|
||||||
|
type: String,
|
||||||
|
enum: ['text', 'chat'],
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
timestamps: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const Prompt = mongoose.model('Prompt', promptSchema);
|
||||||
|
|
||||||
|
promptSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||||
|
promptGroupSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||||
|
|
||||||
|
module.exports = { Prompt, PromptGroup };
|
||||||
29
api/models/schema/roleSchema.js
Normal file
29
api/models/schema/roleSchema.js
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
const { PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||||
|
const mongoose = require('mongoose');
|
||||||
|
|
||||||
|
const roleSchema = new mongoose.Schema({
|
||||||
|
name: {
|
||||||
|
type: String,
|
||||||
|
required: true,
|
||||||
|
unique: true,
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
[PermissionTypes.PROMPTS]: {
|
||||||
|
[Permissions.SHARED_GLOBAL]: {
|
||||||
|
type: Boolean,
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
|
[Permissions.USE]: {
|
||||||
|
type: Boolean,
|
||||||
|
default: true,
|
||||||
|
},
|
||||||
|
[Permissions.CREATE]: {
|
||||||
|
type: Boolean,
|
||||||
|
default: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const Role = mongoose.model('Role', roleSchema);
|
||||||
|
|
||||||
|
module.exports = Role;
|
||||||
38
api/models/schema/shareSchema.js
Normal file
38
api/models/schema/shareSchema.js
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
const mongoose = require('mongoose');
|
||||||
|
|
||||||
|
const shareSchema = mongoose.Schema(
|
||||||
|
{
|
||||||
|
conversationId: {
|
||||||
|
type: String,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
title: {
|
||||||
|
type: String,
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
user: {
|
||||||
|
type: String,
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }],
|
||||||
|
shareId: {
|
||||||
|
type: String,
|
||||||
|
index: true,
|
||||||
|
},
|
||||||
|
isPublic: {
|
||||||
|
type: Boolean,
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
|
isVisible: {
|
||||||
|
type: Boolean,
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
|
isAnonymous: {
|
||||||
|
type: Boolean,
|
||||||
|
default: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{ timestamps: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
module.exports = mongoose.model('SharedLink', shareSchema);
|
||||||
@@ -7,6 +7,9 @@ const tokenSchema = new Schema({
|
|||||||
required: true,
|
required: true,
|
||||||
ref: 'user',
|
ref: 'user',
|
||||||
},
|
},
|
||||||
|
email: {
|
||||||
|
type: String,
|
||||||
|
},
|
||||||
token: {
|
token: {
|
||||||
type: String,
|
type: String,
|
||||||
required: true,
|
required: true,
|
||||||
|
|||||||
@@ -1,5 +1,36 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
|
const { SystemRoles } = require('librechat-data-provider');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {Object} MongoSession
|
||||||
|
* @property {string} [refreshToken] - The refresh token
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {Object} MongoUser
|
||||||
|
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||||
|
* @property {string} [name] - The user's name
|
||||||
|
* @property {string} [username] - The user's username, in lowercase
|
||||||
|
* @property {string} email - The user's email address
|
||||||
|
* @property {boolean} emailVerified - Whether the user's email is verified
|
||||||
|
* @property {string} [password] - The user's password, trimmed with 8-128 characters
|
||||||
|
* @property {string} [avatar] - The URL of the user's avatar
|
||||||
|
* @property {string} provider - The provider of the user's account (e.g., 'local', 'google')
|
||||||
|
* @property {string} [role='USER'] - The role of the user
|
||||||
|
* @property {string} [googleId] - Optional Google ID for the user
|
||||||
|
* @property {string} [facebookId] - Optional Facebook ID for the user
|
||||||
|
* @property {string} [openidId] - Optional OpenID ID for the user
|
||||||
|
* @property {string} [ldapId] - Optional LDAP ID for the user
|
||||||
|
* @property {string} [githubId] - Optional GitHub ID for the user
|
||||||
|
* @property {string} [discordId] - Optional Discord ID for the user
|
||||||
|
* @property {Array} [plugins=[]] - List of plugins used by the user
|
||||||
|
* @property {Array.<MongoSession>} [refreshToken] - List of sessions with refresh tokens
|
||||||
|
* @property {Date} [expiresAt] - Optional expiration date of the file
|
||||||
|
* @property {Date} [createdAt] - Date when the user was created (added by timestamps)
|
||||||
|
* @property {Date} [updatedAt] - Date when the user was last updated (added by timestamps)
|
||||||
|
*/
|
||||||
|
|
||||||
|
/** @type {MongooseSchema<MongoSession>} */
|
||||||
const Session = mongoose.Schema({
|
const Session = mongoose.Schema({
|
||||||
refreshToken: {
|
refreshToken: {
|
||||||
type: String,
|
type: String,
|
||||||
@@ -7,6 +38,7 @@ const Session = mongoose.Schema({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
/** @type {MongooseSchema<MongoUser>} */
|
||||||
const userSchema = mongoose.Schema(
|
const userSchema = mongoose.Schema(
|
||||||
{
|
{
|
||||||
name: {
|
name: {
|
||||||
@@ -47,7 +79,7 @@ const userSchema = mongoose.Schema(
|
|||||||
},
|
},
|
||||||
role: {
|
role: {
|
||||||
type: String,
|
type: String,
|
||||||
default: 'USER',
|
default: SystemRoles.USER,
|
||||||
},
|
},
|
||||||
googleId: {
|
googleId: {
|
||||||
type: String,
|
type: String,
|
||||||
@@ -64,6 +96,11 @@ const userSchema = mongoose.Schema(
|
|||||||
unique: true,
|
unique: true,
|
||||||
sparse: true,
|
sparse: true,
|
||||||
},
|
},
|
||||||
|
ldapId: {
|
||||||
|
type: String,
|
||||||
|
unique: true,
|
||||||
|
sparse: true,
|
||||||
|
},
|
||||||
githubId: {
|
githubId: {
|
||||||
type: String,
|
type: String,
|
||||||
unique: true,
|
unique: true,
|
||||||
@@ -81,6 +118,10 @@ const userSchema = mongoose.Schema(
|
|||||||
refreshToken: {
|
refreshToken: {
|
||||||
type: [Session],
|
type: [Session],
|
||||||
},
|
},
|
||||||
|
expiresAt: {
|
||||||
|
type: Date,
|
||||||
|
expires: 604800, // 7 days in seconds
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{ timestamps: true },
|
{ timestamps: true },
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!completionTokens) {
|
if (!completionTokens && isNaN(completionTokens)) {
|
||||||
logger.debug('[spendTokens] !completionTokens', { prompt, completion });
|
logger.debug('[spendTokens] !completionTokens', { prompt, completion });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,10 +12,12 @@ const tokenValues = {
|
|||||||
'4k': { prompt: 1.5, completion: 2 },
|
'4k': { prompt: 1.5, completion: 2 },
|
||||||
'16k': { prompt: 3, completion: 4 },
|
'16k': { prompt: 3, completion: 4 },
|
||||||
'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 },
|
'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 },
|
||||||
|
'gpt-4o': { prompt: 5, completion: 15 },
|
||||||
'gpt-4-1106': { prompt: 10, completion: 30 },
|
'gpt-4-1106': { prompt: 10, completion: 30 },
|
||||||
'gpt-3.5-turbo-0125': { prompt: 0.5, completion: 1.5 },
|
'gpt-3.5-turbo-0125': { prompt: 0.5, completion: 1.5 },
|
||||||
'claude-3-opus': { prompt: 15, completion: 75 },
|
'claude-3-opus': { prompt: 15, completion: 75 },
|
||||||
'claude-3-sonnet': { prompt: 3, completion: 15 },
|
'claude-3-sonnet': { prompt: 3, completion: 15 },
|
||||||
|
'claude-3-5-sonnet': { prompt: 3, completion: 15 },
|
||||||
'claude-3-haiku': { prompt: 0.25, completion: 1.25 },
|
'claude-3-haiku': { prompt: 0.25, completion: 1.25 },
|
||||||
'claude-2.1': { prompt: 8, completion: 24 },
|
'claude-2.1': { prompt: 8, completion: 24 },
|
||||||
'claude-2': { prompt: 8, completion: 24 },
|
'claude-2': { prompt: 8, completion: 24 },
|
||||||
@@ -52,6 +54,8 @@ const getValueKey = (model, endpoint) => {
|
|||||||
return 'gpt-3.5-turbo-1106';
|
return 'gpt-3.5-turbo-1106';
|
||||||
} else if (modelName.includes('gpt-3.5')) {
|
} else if (modelName.includes('gpt-3.5')) {
|
||||||
return '4k';
|
return '4k';
|
||||||
|
} else if (modelName.includes('gpt-4o')) {
|
||||||
|
return 'gpt-4o';
|
||||||
} else if (modelName.includes('gpt-4-vision')) {
|
} else if (modelName.includes('gpt-4-vision')) {
|
||||||
return 'gpt-4-1106';
|
return 'gpt-4-1106';
|
||||||
} else if (modelName.includes('gpt-4-1106')) {
|
} else if (modelName.includes('gpt-4-1106')) {
|
||||||
|
|||||||
@@ -41,6 +41,20 @@ describe('getValueKey', () => {
|
|||||||
expect(getValueKey('gpt-4-turbo')).toBe('gpt-4-1106');
|
expect(getValueKey('gpt-4-turbo')).toBe('gpt-4-1106');
|
||||||
expect(getValueKey('gpt-4-0125')).toBe('gpt-4-1106');
|
expect(getValueKey('gpt-4-0125')).toBe('gpt-4-1106');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should return "gpt-4o" for model type of "gpt-4o"', () => {
|
||||||
|
expect(getValueKey('gpt-4o-2024-05-13')).toBe('gpt-4o');
|
||||||
|
expect(getValueKey('openai/gpt-4o')).toBe('gpt-4o');
|
||||||
|
expect(getValueKey('gpt-4o-turbo')).toBe('gpt-4o');
|
||||||
|
expect(getValueKey('gpt-4o-0125')).toBe('gpt-4o');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return "claude-3-5-sonnet" for model type of "claude-3-5-sonnet-"', () => {
|
||||||
|
expect(getValueKey('claude-3-5-sonnet-20240620')).toBe('claude-3-5-sonnet');
|
||||||
|
expect(getValueKey('anthropic/claude-3-5-sonnet')).toBe('claude-3-5-sonnet');
|
||||||
|
expect(getValueKey('claude-3-5-sonnet-turbo')).toBe('claude-3-5-sonnet');
|
||||||
|
expect(getValueKey('claude-3-5-sonnet-0125')).toBe('claude-3-5-sonnet');
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getMultiplier', () => {
|
describe('getMultiplier', () => {
|
||||||
@@ -84,6 +98,17 @@ describe('getMultiplier', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should return the correct multiplier for gpt-4o', () => {
|
||||||
|
const valueKey = getValueKey('gpt-4o-2024-05-13');
|
||||||
|
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-4o'].prompt);
|
||||||
|
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
|
||||||
|
tokenValues['gpt-4o'].completion,
|
||||||
|
);
|
||||||
|
expect(getMultiplier({ valueKey, tokenType: 'completion' })).not.toBe(
|
||||||
|
tokenValues['gpt-4-1106'].completion,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
it('should derive the valueKey from the model if not provided for new models', () => {
|
it('should derive the valueKey from the model if not provided for new models', () => {
|
||||||
expect(
|
expect(
|
||||||
getMultiplier({ tokenType: 'prompt', model: 'gpt-3.5-turbo-1106-some-other-info' }),
|
getMultiplier({ tokenType: 'prompt', model: 'gpt-3.5-turbo-1106-some-other-info' }),
|
||||||
|
|||||||
@@ -1,28 +1,37 @@
|
|||||||
const bcrypt = require('bcryptjs');
|
const bcrypt = require('bcryptjs');
|
||||||
|
const signPayload = require('~/server/services/signPayload');
|
||||||
const User = require('./User');
|
const User = require('./User');
|
||||||
|
|
||||||
const hashPassword = async (password) => {
|
|
||||||
const hashedPassword = await new Promise((resolve, reject) => {
|
|
||||||
bcrypt.hash(password, 10, function (err, hash) {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
} else {
|
|
||||||
resolve(hash);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
return hashedPassword;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieve a user by ID and convert the found user document to a plain object.
|
* Retrieve a user by ID and convert the found user document to a plain object.
|
||||||
*
|
*
|
||||||
* @param {string} userId - The ID of the user to find and return as a plain object.
|
* @param {string} userId - The ID of the user to find and return as a plain object.
|
||||||
* @returns {Promise<Object>} A plain object representing the user document, or `null` if no user is found.
|
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||||
|
* @returns {Promise<MongoUser>} A plain object representing the user document, or `null` if no user is found.
|
||||||
*/
|
*/
|
||||||
const getUser = async function (userId) {
|
const getUserById = async function (userId, fieldsToSelect = null) {
|
||||||
return await User.findById(userId).lean();
|
const query = User.findById(userId);
|
||||||
|
|
||||||
|
if (fieldsToSelect) {
|
||||||
|
query.select(fieldsToSelect);
|
||||||
|
}
|
||||||
|
|
||||||
|
return await query.lean();
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search for a single user based on partial data and return matching user document as plain object.
|
||||||
|
* @param {Partial<MongoUser>} searchCriteria - The partial data to use for searching the user.
|
||||||
|
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||||
|
* @returns {Promise<MongoUser>} A plain object representing the user document, or `null` if no user is found.
|
||||||
|
*/
|
||||||
|
const findUser = async function (searchCriteria, fieldsToSelect = null) {
|
||||||
|
const query = User.findOne(searchCriteria);
|
||||||
|
if (fieldsToSelect) {
|
||||||
|
query.select(fieldsToSelect);
|
||||||
|
}
|
||||||
|
|
||||||
|
return await query.lean();
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -30,17 +39,127 @@ const getUser = async function (userId) {
|
|||||||
*
|
*
|
||||||
* @param {string} userId - The ID of the user to update.
|
* @param {string} userId - The ID of the user to update.
|
||||||
* @param {Object} updateData - An object containing the properties to update.
|
* @param {Object} updateData - An object containing the properties to update.
|
||||||
* @returns {Promise<Object>} The updated user document as a plain object, or `null` if no user is found.
|
* @returns {Promise<MongoUser>} The updated user document as a plain object, or `null` if no user is found.
|
||||||
*/
|
*/
|
||||||
const updateUser = async function (userId, updateData) {
|
const updateUser = async function (userId, updateData) {
|
||||||
return await User.findByIdAndUpdate(userId, updateData, {
|
const updateOperation = {
|
||||||
|
$set: updateData,
|
||||||
|
$unset: { expiresAt: '' }, // Remove the expiresAt field to prevent TTL
|
||||||
|
};
|
||||||
|
return await User.findByIdAndUpdate(userId, updateOperation, {
|
||||||
new: true,
|
new: true,
|
||||||
runValidators: true,
|
runValidators: true,
|
||||||
}).lean();
|
}).lean();
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
/**
|
||||||
hashPassword,
|
* Creates a new user, optionally with a TTL of 1 week.
|
||||||
updateUser,
|
* @param {MongoUser} data - The user data to be created, must contain user_id.
|
||||||
getUser,
|
* @param {boolean} [disableTTL=true] - Whether to disable the TTL. Defaults to `true`.
|
||||||
|
* @param {boolean} [returnUser=false] - Whether to disable the TTL. Defaults to `true`.
|
||||||
|
* @returns {Promise<ObjectId>} A promise that resolves to the created user document ID.
|
||||||
|
* @throws {Error} If a user with the same user_id already exists.
|
||||||
|
*/
|
||||||
|
const createUser = async (data, disableTTL = true, returnUser = false) => {
|
||||||
|
const userData = {
|
||||||
|
...data,
|
||||||
|
expiresAt: disableTTL ? null : new Date(Date.now() + 604800 * 1000), // 1 week in milliseconds
|
||||||
|
};
|
||||||
|
|
||||||
|
if (disableTTL) {
|
||||||
|
delete userData.expiresAt;
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await User.create(userData);
|
||||||
|
if (returnUser) {
|
||||||
|
return user.toObject();
|
||||||
|
}
|
||||||
|
return user._id;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count the number of user documents in the collection based on the provided filter.
|
||||||
|
*
|
||||||
|
* @param {Object} [filter={}] - The filter to apply when counting the documents.
|
||||||
|
* @returns {Promise<number>} The count of documents that match the filter.
|
||||||
|
*/
|
||||||
|
const countUsers = async function (filter = {}) {
|
||||||
|
return await User.countDocuments(filter);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a user by their unique ID.
|
||||||
|
*
|
||||||
|
* @param {string} userId - The ID of the user to delete.
|
||||||
|
* @returns {Promise<{ deletedCount: number }>} An object indicating the number of deleted documents.
|
||||||
|
*/
|
||||||
|
const deleteUserById = async function (userId) {
|
||||||
|
try {
|
||||||
|
const result = await User.deleteOne({ _id: userId });
|
||||||
|
if (result.deletedCount === 0) {
|
||||||
|
return { deletedCount: 0, message: 'No user found with that ID.' };
|
||||||
|
}
|
||||||
|
return { deletedCount: result.deletedCount, message: 'User was deleted successfully.' };
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error('Error deleting user: ' + error.message);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const { SESSION_EXPIRY } = process.env ?? {};
|
||||||
|
const expires = eval(SESSION_EXPIRY) ?? 1000 * 60 * 15;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a JWT token for a given user.
|
||||||
|
*
|
||||||
|
* @param {MongoUser} user - ID of the user for whom the token is being generated.
|
||||||
|
* @returns {Promise<string>} A promise that resolves to a JWT token.
|
||||||
|
*/
|
||||||
|
const generateToken = async (user) => {
|
||||||
|
if (!user) {
|
||||||
|
throw new Error('No user provided');
|
||||||
|
}
|
||||||
|
|
||||||
|
return await signPayload({
|
||||||
|
payload: {
|
||||||
|
id: user._id,
|
||||||
|
username: user.username,
|
||||||
|
provider: user.provider,
|
||||||
|
email: user.email,
|
||||||
|
},
|
||||||
|
secret: process.env.JWT_SECRET,
|
||||||
|
expirationTime: expires / 1000,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compares the provided password with the user's password.
|
||||||
|
*
|
||||||
|
* @param {MongoUser} user - the user to compare password for.
|
||||||
|
* @param {string} candidatePassword - The password to test against the user's password.
|
||||||
|
* @returns {Promise<boolean>} A promise that resolves to a boolean indicating if the password matches.
|
||||||
|
*/
|
||||||
|
const comparePassword = async (user, candidatePassword) => {
|
||||||
|
if (!user) {
|
||||||
|
throw new Error('No user provided');
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
bcrypt.compare(candidatePassword, user.password, (err, isMatch) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
resolve(isMatch);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
comparePassword,
|
||||||
|
deleteUserById,
|
||||||
|
generateToken,
|
||||||
|
getUserById,
|
||||||
|
countUsers,
|
||||||
|
createUser,
|
||||||
|
updateUser,
|
||||||
|
findUser,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@librechat/backend",
|
"name": "@librechat/backend",
|
||||||
"version": "0.7.1",
|
"version": "0.7.4-rc1",
|
||||||
"description": "",
|
"description": "",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "echo 'please run this from the root directory'",
|
"start": "echo 'please run this from the root directory'",
|
||||||
@@ -40,7 +40,7 @@
|
|||||||
"@keyv/redis": "^2.8.1",
|
"@keyv/redis": "^2.8.1",
|
||||||
"@langchain/community": "^0.0.46",
|
"@langchain/community": "^0.0.46",
|
||||||
"@langchain/google-genai": "^0.0.11",
|
"@langchain/google-genai": "^0.0.11",
|
||||||
"@langchain/google-vertexai": "^0.0.5",
|
"@langchain/google-vertexai": "^0.0.17",
|
||||||
"axios": "^1.3.4",
|
"axios": "^1.3.4",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
"cheerio": "^1.0.0-rc.12",
|
"cheerio": "^1.0.0-rc.12",
|
||||||
@@ -74,7 +74,8 @@
|
|||||||
"multer": "^1.4.5-lts.1",
|
"multer": "^1.4.5-lts.1",
|
||||||
"nodejs-gpt": "^1.37.4",
|
"nodejs-gpt": "^1.37.4",
|
||||||
"nodemailer": "^6.9.4",
|
"nodemailer": "^6.9.4",
|
||||||
"openai": "4.36.0",
|
"ollama": "^0.5.0",
|
||||||
|
"openai": "^4.47.1",
|
||||||
"openai-chat-tokens": "^0.2.8",
|
"openai-chat-tokens": "^0.2.8",
|
||||||
"openid-client": "^5.4.2",
|
"openid-client": "^5.4.2",
|
||||||
"passport": "^0.6.0",
|
"passport": "^0.6.0",
|
||||||
@@ -84,14 +85,16 @@
|
|||||||
"passport-github2": "^0.1.12",
|
"passport-github2": "^0.1.12",
|
||||||
"passport-google-oauth20": "^2.0.0",
|
"passport-google-oauth20": "^2.0.0",
|
||||||
"passport-jwt": "^4.0.1",
|
"passport-jwt": "^4.0.1",
|
||||||
|
"passport-ldapauth": "^3.0.1",
|
||||||
"passport-local": "^1.0.0",
|
"passport-local": "^1.0.0",
|
||||||
"pino": "^8.12.1",
|
"pino": "^8.12.1",
|
||||||
"sharp": "^0.32.6",
|
"sharp": "^0.32.6",
|
||||||
"tiktoken": "^1.0.10",
|
"tiktoken": "^1.0.15",
|
||||||
"traverse": "^0.6.7",
|
"traverse": "^0.6.7",
|
||||||
"ua-parser-js": "^1.0.36",
|
"ua-parser-js": "^1.0.36",
|
||||||
"winston": "^3.11.0",
|
"winston": "^3.11.0",
|
||||||
"winston-daily-rotate-file": "^4.7.1",
|
"winston-daily-rotate-file": "^4.7.1",
|
||||||
|
"ws": "^8.17.0",
|
||||||
"zod": "^3.22.4"
|
"zod": "^3.22.4"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ const throttle = require('lodash/throttle');
|
|||||||
const { getResponseSender, Constants, EModelEndpoint } = require('librechat-data-provider');
|
const { getResponseSender, Constants, EModelEndpoint } = require('librechat-data-provider');
|
||||||
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
||||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||||
const { saveMessage, getConvo } = require('~/models');
|
const { saveMessage } = require('~/models');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const AskController = async (req, res, next, initializeClient, addTitle) => {
|
const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||||
@@ -18,6 +18,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
|||||||
logger.debug('[AskController]', { text, conversationId, ...endpointOption });
|
logger.debug('[AskController]', { text, conversationId, ...endpointOption });
|
||||||
|
|
||||||
let userMessage;
|
let userMessage;
|
||||||
|
let userMessagePromise;
|
||||||
let promptTokens;
|
let promptTokens;
|
||||||
let userMessageId;
|
let userMessageId;
|
||||||
let responseMessageId;
|
let responseMessageId;
|
||||||
@@ -34,6 +35,8 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
|||||||
if (key === 'userMessage') {
|
if (key === 'userMessage') {
|
||||||
userMessage = data[key];
|
userMessage = data[key];
|
||||||
userMessageId = data[key].messageId;
|
userMessageId = data[key].messageId;
|
||||||
|
} else if (key === 'userMessagePromise') {
|
||||||
|
userMessagePromise = data[key];
|
||||||
} else if (key === 'responseMessageId') {
|
} else if (key === 'responseMessageId') {
|
||||||
responseMessageId = data[key];
|
responseMessageId = data[key];
|
||||||
} else if (key === 'promptTokens') {
|
} else if (key === 'promptTokens') {
|
||||||
@@ -74,6 +77,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
|||||||
const getAbortData = () => ({
|
const getAbortData = () => ({
|
||||||
sender,
|
sender,
|
||||||
conversationId,
|
conversationId,
|
||||||
|
userMessagePromise,
|
||||||
messageId: responseMessageId,
|
messageId: responseMessageId,
|
||||||
parentMessageId: overrideParentMessageId ?? userMessageId,
|
parentMessageId: overrideParentMessageId ?? userMessageId,
|
||||||
text: getPartialText(),
|
text: getPartialText(),
|
||||||
@@ -81,7 +85,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
|||||||
promptTokens,
|
promptTokens,
|
||||||
});
|
});
|
||||||
|
|
||||||
const { abortController, onStart } = createAbortController(req, res, getAbortData);
|
const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData);
|
||||||
|
|
||||||
res.on('close', () => {
|
res.on('close', () => {
|
||||||
logger.debug('[AskController] Request closed');
|
logger.debug('[AskController] Request closed');
|
||||||
@@ -105,11 +109,11 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
|||||||
getReqData,
|
getReqData,
|
||||||
onStart,
|
onStart,
|
||||||
abortController,
|
abortController,
|
||||||
onProgress: progressCallback.call(null, {
|
progressCallback,
|
||||||
|
progressOptions: {
|
||||||
res,
|
res,
|
||||||
text,
|
// parentMessageId: overrideParentMessageId || userMessageId,
|
||||||
parentMessageId: overrideParentMessageId || userMessageId,
|
},
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let response = await client.sendMessage(text, messageOptions);
|
let response = await client.sendMessage(text, messageOptions);
|
||||||
@@ -120,7 +124,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
|||||||
|
|
||||||
response.endpoint = endpointOption.endpoint;
|
response.endpoint = endpointOption.endpoint;
|
||||||
|
|
||||||
const conversation = await getConvo(user, conversationId);
|
const { conversation = {} } = await client.responsePromise;
|
||||||
conversation.title =
|
conversation.title =
|
||||||
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
|
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
|
||||||
|
|
||||||
@@ -143,7 +147,9 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
|||||||
await saveMessage({ ...response, user });
|
await saveMessage({ ...response, user });
|
||||||
}
|
}
|
||||||
|
|
||||||
await saveMessage(userMessage);
|
if (!client.skipSaveUserMessage) {
|
||||||
|
await saveMessage(userMessage);
|
||||||
|
}
|
||||||
|
|
||||||
if (addTitle && parentMessageId === Constants.NO_PARENT && newConvo) {
|
if (addTitle && parentMessageId === Constants.NO_PARENT && newConvo) {
|
||||||
addTitle(req, {
|
addTitle(req, {
|
||||||
|
|||||||
@@ -1,45 +1,29 @@
|
|||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const cookies = require('cookie');
|
const cookies = require('cookie');
|
||||||
const jwt = require('jsonwebtoken');
|
const jwt = require('jsonwebtoken');
|
||||||
const { Session, User } = require('~/models');
|
|
||||||
const {
|
const {
|
||||||
registerUser,
|
registerUser,
|
||||||
resetPassword,
|
resetPassword,
|
||||||
setAuthTokens,
|
setAuthTokens,
|
||||||
requestPasswordReset,
|
requestPasswordReset,
|
||||||
} = require('~/server/services/AuthService');
|
} = require('~/server/services/AuthService');
|
||||||
|
const { Session, getUserById } = require('~/models');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const registrationController = async (req, res) => {
|
const registrationController = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const response = await registerUser(req.body);
|
const response = await registerUser(req.body);
|
||||||
if (response.status === 200) {
|
const { status, message } = response;
|
||||||
const { status, user } = response;
|
res.status(status).send({ message });
|
||||||
let newUser = await User.findOne({ _id: user._id });
|
|
||||||
if (!newUser) {
|
|
||||||
newUser = new User(user);
|
|
||||||
await newUser.save();
|
|
||||||
}
|
|
||||||
const token = await setAuthTokens(user._id, res);
|
|
||||||
res.setHeader('Authorization', `Bearer ${token}`);
|
|
||||||
res.status(status).send({ user });
|
|
||||||
} else {
|
|
||||||
const { status, message } = response;
|
|
||||||
res.status(status).send({ message });
|
|
||||||
}
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('[registrationController]', err);
|
logger.error('[registrationController]', err);
|
||||||
return res.status(500).json({ message: err.message });
|
return res.status(500).json({ message: err.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const getUserController = async (req, res) => {
|
|
||||||
return res.status(200).send(req.user);
|
|
||||||
};
|
|
||||||
|
|
||||||
const resetPasswordRequestController = async (req, res) => {
|
const resetPasswordRequestController = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const resetService = await requestPasswordReset(req.body.email);
|
const resetService = await requestPasswordReset(req);
|
||||||
if (resetService instanceof Error) {
|
if (resetService instanceof Error) {
|
||||||
return res.status(400).json(resetService);
|
return res.status(400).json(resetService);
|
||||||
} else {
|
} else {
|
||||||
@@ -77,7 +61,7 @@ const refreshController = async (req, res) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||||
const user = await User.findOne({ _id: payload.id });
|
const user = await getUserById(payload.id, '-password -__v');
|
||||||
if (!user) {
|
if (!user) {
|
||||||
return res.status(401).redirect('/login');
|
return res.status(401).redirect('/login');
|
||||||
}
|
}
|
||||||
@@ -86,8 +70,7 @@ const refreshController = async (req, res) => {
|
|||||||
|
|
||||||
if (process.env.NODE_ENV === 'CI') {
|
if (process.env.NODE_ENV === 'CI') {
|
||||||
const token = await setAuthTokens(userId, res);
|
const token = await setAuthTokens(userId, res);
|
||||||
const userObj = user.toJSON();
|
return res.status(200).send({ token, user });
|
||||||
return res.status(200).send({ token, user: userObj });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hash the refresh token
|
// Hash the refresh token
|
||||||
@@ -98,8 +81,7 @@ const refreshController = async (req, res) => {
|
|||||||
const session = await Session.findOne({ user: userId, refreshTokenHash: hashedToken });
|
const session = await Session.findOne({ user: userId, refreshTokenHash: hashedToken });
|
||||||
if (session && session.expiration > new Date()) {
|
if (session && session.expiration > new Date()) {
|
||||||
const token = await setAuthTokens(userId, res, session._id);
|
const token = await setAuthTokens(userId, res, session._id);
|
||||||
const userObj = user.toJSON();
|
res.status(200).send({ token, user });
|
||||||
res.status(200).send({ token, user: userObj });
|
|
||||||
} else if (req?.query?.retry) {
|
} else if (req?.query?.retry) {
|
||||||
// Retrying from a refresh token request that failed (401)
|
// Retrying from a refresh token request that failed (401)
|
||||||
res.status(403).send('No session found');
|
res.status(403).send('No session found');
|
||||||
@@ -115,7 +97,6 @@ const refreshController = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getUserController,
|
|
||||||
refreshController,
|
refreshController,
|
||||||
registrationController,
|
registrationController,
|
||||||
resetPasswordController,
|
resetPasswordController,
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ const throttle = require('lodash/throttle');
|
|||||||
const { getResponseSender, EModelEndpoint } = require('librechat-data-provider');
|
const { getResponseSender, EModelEndpoint } = require('librechat-data-provider');
|
||||||
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
||||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||||
const { saveMessage, getConvo } = require('~/models');
|
const { saveMessage } = require('~/models');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const EditController = async (req, res, next, initializeClient) => {
|
const EditController = async (req, res, next, initializeClient) => {
|
||||||
@@ -27,6 +27,7 @@ const EditController = async (req, res, next, initializeClient) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let userMessage;
|
let userMessage;
|
||||||
|
let userMessagePromise;
|
||||||
let promptTokens;
|
let promptTokens;
|
||||||
const sender = getResponseSender({
|
const sender = getResponseSender({
|
||||||
...endpointOption,
|
...endpointOption,
|
||||||
@@ -40,6 +41,8 @@ const EditController = async (req, res, next, initializeClient) => {
|
|||||||
for (let key in data) {
|
for (let key in data) {
|
||||||
if (key === 'userMessage') {
|
if (key === 'userMessage') {
|
||||||
userMessage = data[key];
|
userMessage = data[key];
|
||||||
|
} else if (key === 'userMessagePromise') {
|
||||||
|
userMessagePromise = data[key];
|
||||||
} else if (key === 'responseMessageId') {
|
} else if (key === 'responseMessageId') {
|
||||||
responseMessageId = data[key];
|
responseMessageId = data[key];
|
||||||
} else if (key === 'promptTokens') {
|
} else if (key === 'promptTokens') {
|
||||||
@@ -73,6 +76,7 @@ const EditController = async (req, res, next, initializeClient) => {
|
|||||||
|
|
||||||
const getAbortData = () => ({
|
const getAbortData = () => ({
|
||||||
conversationId,
|
conversationId,
|
||||||
|
userMessagePromise,
|
||||||
messageId: responseMessageId,
|
messageId: responseMessageId,
|
||||||
sender,
|
sender,
|
||||||
parentMessageId: overrideParentMessageId ?? userMessageId,
|
parentMessageId: overrideParentMessageId ?? userMessageId,
|
||||||
@@ -81,7 +85,7 @@ const EditController = async (req, res, next, initializeClient) => {
|
|||||||
promptTokens,
|
promptTokens,
|
||||||
});
|
});
|
||||||
|
|
||||||
const { abortController, onStart } = createAbortController(req, res, getAbortData);
|
const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData);
|
||||||
|
|
||||||
res.on('close', () => {
|
res.on('close', () => {
|
||||||
logger.debug('[EditController] Request closed');
|
logger.debug('[EditController] Request closed');
|
||||||
@@ -112,14 +116,14 @@ const EditController = async (req, res, next, initializeClient) => {
|
|||||||
getReqData,
|
getReqData,
|
||||||
onStart,
|
onStart,
|
||||||
abortController,
|
abortController,
|
||||||
onProgress: progressCallback.call(null, {
|
progressCallback,
|
||||||
|
progressOptions: {
|
||||||
res,
|
res,
|
||||||
text,
|
// parentMessageId: overrideParentMessageId || userMessageId,
|
||||||
parentMessageId: overrideParentMessageId || userMessageId,
|
},
|
||||||
}),
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const conversation = await getConvo(user, conversationId);
|
const { conversation = {} } = await client.responsePromise;
|
||||||
conversation.title =
|
conversation.title =
|
||||||
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
|
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
|
||||||
|
|
||||||
|
|||||||
@@ -16,10 +16,28 @@ async function endpointController(req, res) {
|
|||||||
/** @type {TEndpointsConfig} */
|
/** @type {TEndpointsConfig} */
|
||||||
const mergedConfig = { ...defaultEndpointsConfig, ...customConfigEndpoints };
|
const mergedConfig = { ...defaultEndpointsConfig, ...customConfigEndpoints };
|
||||||
if (mergedConfig[EModelEndpoint.assistants] && req.app.locals?.[EModelEndpoint.assistants]) {
|
if (mergedConfig[EModelEndpoint.assistants] && req.app.locals?.[EModelEndpoint.assistants]) {
|
||||||
const { disableBuilder, retrievalModels, capabilities, ..._rest } =
|
const { disableBuilder, retrievalModels, capabilities, version, ..._rest } =
|
||||||
req.app.locals[EModelEndpoint.assistants];
|
req.app.locals[EModelEndpoint.assistants];
|
||||||
|
|
||||||
mergedConfig[EModelEndpoint.assistants] = {
|
mergedConfig[EModelEndpoint.assistants] = {
|
||||||
...mergedConfig[EModelEndpoint.assistants],
|
...mergedConfig[EModelEndpoint.assistants],
|
||||||
|
version,
|
||||||
|
retrievalModels,
|
||||||
|
disableBuilder,
|
||||||
|
capabilities,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
mergedConfig[EModelEndpoint.azureAssistants] &&
|
||||||
|
req.app.locals?.[EModelEndpoint.azureAssistants]
|
||||||
|
) {
|
||||||
|
const { disableBuilder, retrievalModels, capabilities, version, ..._rest } =
|
||||||
|
req.app.locals[EModelEndpoint.azureAssistants];
|
||||||
|
|
||||||
|
mergedConfig[EModelEndpoint.azureAssistants] = {
|
||||||
|
...mergedConfig[EModelEndpoint.azureAssistants],
|
||||||
|
version,
|
||||||
retrievalModels,
|
retrievalModels,
|
||||||
disableBuilder,
|
disableBuilder,
|
||||||
capabilities,
|
capabilities,
|
||||||
|
|||||||
@@ -55,19 +55,27 @@ const getAvailablePluginsController = async (req, res) => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** @type {{ filteredTools: string[], includedTools: string[] }} */
|
||||||
|
const { filteredTools = [], includedTools = [] } = req.app.locals;
|
||||||
const pluginManifest = await fs.readFile(req.app.locals.paths.pluginManifest, 'utf8');
|
const pluginManifest = await fs.readFile(req.app.locals.paths.pluginManifest, 'utf8');
|
||||||
|
|
||||||
const jsonData = JSON.parse(pluginManifest);
|
const jsonData = JSON.parse(pluginManifest);
|
||||||
/** @type {TPlugin[]} */
|
|
||||||
const uniquePlugins = filterUniquePlugins(jsonData);
|
const uniquePlugins = filterUniquePlugins(jsonData);
|
||||||
const authenticatedPlugins = uniquePlugins.map((plugin) => {
|
let authenticatedPlugins = [];
|
||||||
if (isPluginAuthenticated(plugin)) {
|
for (const plugin of uniquePlugins) {
|
||||||
return { ...plugin, authenticated: true };
|
authenticatedPlugins.push(
|
||||||
} else {
|
isPluginAuthenticated(plugin) ? { ...plugin, authenticated: true } : plugin,
|
||||||
return plugin;
|
);
|
||||||
}
|
}
|
||||||
});
|
|
||||||
const plugins = await addOpenAPISpecs(authenticatedPlugins);
|
let plugins = await addOpenAPISpecs(authenticatedPlugins);
|
||||||
|
|
||||||
|
if (includedTools.length > 0) {
|
||||||
|
plugins = plugins.filter((plugin) => includedTools.includes(plugin.pluginKey));
|
||||||
|
} else {
|
||||||
|
plugins = plugins.filter((plugin) => !filteredTools.includes(plugin.pluginKey));
|
||||||
|
}
|
||||||
|
|
||||||
await cache.set(CacheKeys.PLUGINS, plugins);
|
await cache.set(CacheKeys.PLUGINS, plugins);
|
||||||
res.status(200).json(plugins);
|
res.status(200).json(plugins);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -1,11 +1,37 @@
|
|||||||
const { updateUserPluginsService } = require('~/server/services/UserService');
|
const {
|
||||||
|
Session,
|
||||||
|
Balance,
|
||||||
|
getFiles,
|
||||||
|
deleteFiles,
|
||||||
|
deleteConvos,
|
||||||
|
deletePresets,
|
||||||
|
deleteMessages,
|
||||||
|
deleteUserById,
|
||||||
|
} = require('~/models');
|
||||||
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
|
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
|
||||||
|
const { updateUserPluginsService, deleteUserKey } = require('~/server/services/UserService');
|
||||||
|
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
|
||||||
|
const { processDeleteRequest } = require('~/server/services/Files/process');
|
||||||
|
const { deleteAllSharedLinks } = require('~/models/Share');
|
||||||
|
const { Transaction } = require('~/models/Transaction');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const getUserController = async (req, res) => {
|
const getUserController = async (req, res) => {
|
||||||
res.status(200).send(req.user);
|
res.status(200).send(req.user);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const deleteUserFiles = async (req) => {
|
||||||
|
try {
|
||||||
|
const userFiles = await getFiles({ user: req.user.id });
|
||||||
|
await processDeleteRequest({
|
||||||
|
req,
|
||||||
|
files: userFiles,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[deleteUserFiles]', error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
const updateUserPluginsController = async (req, res) => {
|
const updateUserPluginsController = async (req, res) => {
|
||||||
const { user } = req;
|
const { user } = req;
|
||||||
const { pluginKey, action, auth, isAssistantTool } = req.body;
|
const { pluginKey, action, auth, isAssistantTool } = req.body;
|
||||||
@@ -49,11 +75,68 @@ const updateUserPluginsController = async (req, res) => {
|
|||||||
res.status(200).send();
|
res.status(200).send();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('[updateUserPluginsController]', err);
|
logger.error('[updateUserPluginsController]', err);
|
||||||
res.status(500).json({ message: err.message });
|
return res.status(500).json({ message: 'Something went wrong.' });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const deleteUserController = async (req, res) => {
|
||||||
|
const { user } = req;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await deleteMessages({ user: user.id }); // delete user messages
|
||||||
|
await Session.deleteMany({ user: user.id }); // delete user sessions
|
||||||
|
await Transaction.deleteMany({ user: user.id }); // delete user transactions
|
||||||
|
await deleteUserKey({ userId: user.id, all: true }); // delete user keys
|
||||||
|
await Balance.deleteMany({ user: user._id }); // delete user balances
|
||||||
|
await deletePresets(user.id); // delete user presets
|
||||||
|
/* TODO: Delete Assistant Threads */
|
||||||
|
await deleteConvos(user.id); // delete user convos
|
||||||
|
await deleteUserPluginAuth(user.id, null, true); // delete user plugin auth
|
||||||
|
await deleteUserById(user.id); // delete user
|
||||||
|
await deleteAllSharedLinks(user.id); // delete user shared links
|
||||||
|
await deleteUserFiles(req); // delete user files
|
||||||
|
await deleteFiles(null, user.id); // delete database files in case of orphaned files from previous steps
|
||||||
|
/* TODO: queue job for cleaning actions and assistants of non-existant users */
|
||||||
|
logger.info(`User deleted account. Email: ${user.email} ID: ${user.id}`);
|
||||||
|
res.status(200).send({ message: 'User deleted' });
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[deleteUserController]', err);
|
||||||
|
return res.status(500).json({ message: 'Something went wrong.' });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const verifyEmailController = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const verifyEmailService = await verifyEmail(req);
|
||||||
|
if (verifyEmailService instanceof Error) {
|
||||||
|
return res.status(400).json(verifyEmailService);
|
||||||
|
} else {
|
||||||
|
return res.status(200).json(verifyEmailService);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
logger.error('[verifyEmailController]', e);
|
||||||
|
return res.status(500).json({ message: 'Something went wrong.' });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const resendVerificationController = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const result = await resendVerificationEmail(req);
|
||||||
|
if (result instanceof Error) {
|
||||||
|
return res.status(400).json(result);
|
||||||
|
} else {
|
||||||
|
return res.status(200).json(result);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
logger.error('[verifyEmailController]', e);
|
||||||
|
return res.status(500).json({ message: 'Something went wrong.' });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getUserController,
|
getUserController,
|
||||||
|
deleteUserController,
|
||||||
|
verifyEmailController,
|
||||||
updateUserPluginsController,
|
updateUserPluginsController,
|
||||||
|
resendVerificationController,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
const { v4 } = require('uuid');
|
const { v4 } = require('uuid');
|
||||||
const express = require('express');
|
|
||||||
const {
|
const {
|
||||||
Constants,
|
Constants,
|
||||||
RunStatus,
|
RunStatus,
|
||||||
CacheKeys,
|
CacheKeys,
|
||||||
FileSources,
|
|
||||||
ContentTypes,
|
ContentTypes,
|
||||||
EModelEndpoint,
|
EModelEndpoint,
|
||||||
ViolationTypes,
|
ViolationTypes,
|
||||||
ImageVisionTool,
|
ImageVisionTool,
|
||||||
|
checkOpenAIStorage,
|
||||||
AssistantStreamEvents,
|
AssistantStreamEvents,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const {
|
const {
|
||||||
@@ -21,44 +20,36 @@ const {
|
|||||||
} = require('~/server/services/Threads');
|
} = require('~/server/services/Threads');
|
||||||
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
|
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
|
||||||
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
|
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
|
||||||
const { addTitle, initializeClient } = require('~/server/services/Endpoints/assistants');
|
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||||
const { formatMessage, createVisionPrompt } = require('~/app/clients/prompts');
|
const { formatMessage, createVisionPrompt } = require('~/app/clients/prompts');
|
||||||
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||||
|
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||||
const { getTransactions } = require('~/models/Transaction');
|
const { getTransactions } = require('~/models/Transaction');
|
||||||
const checkBalance = require('~/models/checkBalance');
|
const checkBalance = require('~/models/checkBalance');
|
||||||
const { getConvo } = require('~/models/Conversation');
|
const { getConvo } = require('~/models/Conversation');
|
||||||
const getLogStores = require('~/cache/getLogStores');
|
const getLogStores = require('~/cache/getLogStores');
|
||||||
const { getModelMaxTokens } = require('~/utils');
|
const { getModelMaxTokens } = require('~/utils');
|
||||||
|
const { getOpenAIClient } = require('./helpers');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const router = express.Router();
|
|
||||||
const {
|
|
||||||
setHeaders,
|
|
||||||
handleAbort,
|
|
||||||
validateModel,
|
|
||||||
handleAbortError,
|
|
||||||
// validateEndpoint,
|
|
||||||
buildEndpointOption,
|
|
||||||
} = require('~/server/middleware');
|
|
||||||
|
|
||||||
router.post('/abort', handleAbort());
|
|
||||||
|
|
||||||
const ten_minutes = 1000 * 60 * 10;
|
const ten_minutes = 1000 * 60 * 10;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @route POST /
|
* @route POST /
|
||||||
* @desc Chat with an assistant
|
* @desc Chat with an assistant
|
||||||
* @access Public
|
* @access Public
|
||||||
* @param {express.Request} req - The request object, containing the request data.
|
* @param {object} req - The request object, containing the request data.
|
||||||
* @param {express.Response} res - The response object, used to send back a response.
|
* @param {object} req.body - The request payload.
|
||||||
|
* @param {Express.Response} res - The response object, used to send back a response.
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res) => {
|
const chatV1 = async (req, res) => {
|
||||||
logger.debug('[/assistants/chat/] req.body', req.body);
|
logger.debug('[/assistants/chat/] req.body', req.body);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
text,
|
text,
|
||||||
model,
|
model,
|
||||||
|
endpoint,
|
||||||
files = [],
|
files = [],
|
||||||
promptPrefix,
|
promptPrefix,
|
||||||
assistant_id,
|
assistant_id,
|
||||||
@@ -69,30 +60,6 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
parentMessageId: _parentId = Constants.NO_PARENT,
|
parentMessageId: _parentId = Constants.NO_PARENT,
|
||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
/** @type {Partial<TAssistantEndpoint>} */
|
|
||||||
const assistantsConfig = req.app.locals?.[EModelEndpoint.assistants];
|
|
||||||
|
|
||||||
if (assistantsConfig) {
|
|
||||||
const { supportedIds, excludedIds } = assistantsConfig;
|
|
||||||
const error = { message: 'Assistant not supported' };
|
|
||||||
if (supportedIds?.length && !supportedIds.includes(assistant_id)) {
|
|
||||||
return await handleAbortError(res, req, error, {
|
|
||||||
sender: 'System',
|
|
||||||
conversationId: convoId,
|
|
||||||
messageId: v4(),
|
|
||||||
parentMessageId: _messageId,
|
|
||||||
error,
|
|
||||||
});
|
|
||||||
} else if (excludedIds?.length && excludedIds.includes(assistant_id)) {
|
|
||||||
return await handleAbortError(res, req, error, {
|
|
||||||
sender: 'System',
|
|
||||||
conversationId: convoId,
|
|
||||||
messageId: v4(),
|
|
||||||
parentMessageId: _messageId,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** @type {OpenAIClient} */
|
/** @type {OpenAIClient} */
|
||||||
let openai;
|
let openai;
|
||||||
/** @type {string|undefined} - the current thread id */
|
/** @type {string|undefined} - the current thread id */
|
||||||
@@ -138,7 +105,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
user: req.user.id,
|
user: req.user.id,
|
||||||
shouldSaveMessage: false,
|
shouldSaveMessage: false,
|
||||||
messageId: responseMessageId,
|
messageId: responseMessageId,
|
||||||
endpoint: EModelEndpoint.assistants,
|
endpoint,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (error.message === 'Run cancelled') {
|
if (error.message === 'Run cancelled') {
|
||||||
@@ -149,7 +116,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
logger.debug('[/assistants/chat/] Request aborted on close');
|
logger.debug('[/assistants/chat/] Request aborted on close');
|
||||||
} else if (/Files.*are invalid/.test(error.message)) {
|
} else if (/Files.*are invalid/.test(error.message)) {
|
||||||
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
||||||
req.app.locals?.[EModelEndpoint.azureOpenAI].assistants
|
endpoint === EModelEndpoint.azureAssistants
|
||||||
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
||||||
: ''
|
: ''
|
||||||
}`;
|
}`;
|
||||||
@@ -205,6 +172,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
const runMessages = await checkMessageGaps({
|
const runMessages = await checkMessageGaps({
|
||||||
openai,
|
openai,
|
||||||
run_id,
|
run_id,
|
||||||
|
endpoint,
|
||||||
thread_id,
|
thread_id,
|
||||||
conversationId,
|
conversationId,
|
||||||
latestMessageId: responseMessageId,
|
latestMessageId: responseMessageId,
|
||||||
@@ -311,8 +279,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
/** @type {{ openai: OpenAIClient }} */
|
const { openai: _openai, client } = await getOpenAIClient({
|
||||||
const { openai: _openai, client } = await initializeClient({
|
|
||||||
req,
|
req,
|
||||||
res,
|
res,
|
||||||
endpointOption: req.body.endpointOption,
|
endpointOption: req.body.endpointOption,
|
||||||
@@ -320,6 +287,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
});
|
});
|
||||||
|
|
||||||
openai = _openai;
|
openai = _openai;
|
||||||
|
await validateAuthor({ req, openai });
|
||||||
|
|
||||||
if (previousMessages.length) {
|
if (previousMessages.length) {
|
||||||
parentMessageId = previousMessages[previousMessages.length - 1].messageId;
|
parentMessageId = previousMessages[previousMessages.length - 1].messageId;
|
||||||
@@ -370,10 +338,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
|
|
||||||
/** @type {MongoFile[]} */
|
/** @type {MongoFile[]} */
|
||||||
const attachments = await req.body.endpointOption.attachments;
|
const attachments = await req.body.endpointOption.attachments;
|
||||||
if (
|
if (attachments && attachments.every((attachment) => checkOpenAIStorage(attachment.source))) {
|
||||||
attachments &&
|
|
||||||
attachments.every((attachment) => attachment.source === FileSources.openai)
|
|
||||||
) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -431,7 +396,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
|
|
||||||
if (processedFiles) {
|
if (processedFiles) {
|
||||||
for (const file of processedFiles) {
|
for (const file of processedFiles) {
|
||||||
if (file.source !== FileSources.openai) {
|
if (!checkOpenAIStorage(file.source)) {
|
||||||
attachedFileIds.delete(file.file_id);
|
attachedFileIds.delete(file.file_id);
|
||||||
const index = file_ids.indexOf(file.file_id);
|
const index = file_ids.indexOf(file.file_id);
|
||||||
if (index > -1) {
|
if (index > -1) {
|
||||||
@@ -467,6 +432,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
assistant_id,
|
assistant_id,
|
||||||
thread_id,
|
thread_id,
|
||||||
model: assistant_id,
|
model: assistant_id,
|
||||||
|
endpoint,
|
||||||
};
|
};
|
||||||
|
|
||||||
previousMessages.push(requestMessage);
|
previousMessages.push(requestMessage);
|
||||||
@@ -476,7 +442,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
|
|
||||||
conversation = {
|
conversation = {
|
||||||
conversationId,
|
conversationId,
|
||||||
endpoint: EModelEndpoint.assistants,
|
endpoint,
|
||||||
promptPrefix: promptPrefix,
|
promptPrefix: promptPrefix,
|
||||||
instructions: instructions,
|
instructions: instructions,
|
||||||
assistant_id,
|
assistant_id,
|
||||||
@@ -513,7 +479,8 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
let response;
|
let response;
|
||||||
|
|
||||||
const processRun = async (retry = false) => {
|
const processRun = async (retry = false) => {
|
||||||
if (req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) {
|
if (endpoint === EModelEndpoint.azureAssistants) {
|
||||||
|
body.model = openai._options.model;
|
||||||
openai.attachedFileIds = attachedFileIds;
|
openai.attachedFileIds = attachedFileIds;
|
||||||
openai.visionPromise = visionPromise;
|
openai.visionPromise = visionPromise;
|
||||||
if (retry) {
|
if (retry) {
|
||||||
@@ -602,6 +569,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
assistant_id,
|
assistant_id,
|
||||||
thread_id,
|
thread_id,
|
||||||
model: assistant_id,
|
model: assistant_id,
|
||||||
|
endpoint,
|
||||||
};
|
};
|
||||||
|
|
||||||
sendMessage(res, {
|
sendMessage(res, {
|
||||||
@@ -654,6 +622,6 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
await handleError(error);
|
await handleError(error);
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = chatV1;
|
||||||
597
api/server/controllers/assistants/chatV2.js
Normal file
597
api/server/controllers/assistants/chatV2.js
Normal file
@@ -0,0 +1,597 @@
|
|||||||
|
const { v4 } = require('uuid');
|
||||||
|
const {
|
||||||
|
Constants,
|
||||||
|
RunStatus,
|
||||||
|
CacheKeys,
|
||||||
|
ContentTypes,
|
||||||
|
ToolCallTypes,
|
||||||
|
EModelEndpoint,
|
||||||
|
ViolationTypes,
|
||||||
|
retrievalMimeTypes,
|
||||||
|
AssistantStreamEvents,
|
||||||
|
} = require('librechat-data-provider');
|
||||||
|
const {
|
||||||
|
initThread,
|
||||||
|
recordUsage,
|
||||||
|
saveUserMessage,
|
||||||
|
checkMessageGaps,
|
||||||
|
addThreadMetadata,
|
||||||
|
saveAssistantMessage,
|
||||||
|
} = require('~/server/services/Threads');
|
||||||
|
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
|
||||||
|
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
|
||||||
|
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||||
|
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||||
|
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||||
|
const { getTransactions } = require('~/models/Transaction');
|
||||||
|
const checkBalance = require('~/models/checkBalance');
|
||||||
|
const { getConvo } = require('~/models/Conversation');
|
||||||
|
const getLogStores = require('~/cache/getLogStores');
|
||||||
|
const { getModelMaxTokens } = require('~/utils');
|
||||||
|
const { getOpenAIClient } = require('./helpers');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
const ten_minutes = 1000 * 60 * 10;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @route POST /
|
||||||
|
* @desc Chat with an assistant
|
||||||
|
* @access Public
|
||||||
|
* @param {Express.Request} req - The request object, containing the request data.
|
||||||
|
* @param {Express.Response} res - The response object, used to send back a response.
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
const chatV2 = async (req, res) => {
|
||||||
|
logger.debug('[/assistants/chat/] req.body', req.body);
|
||||||
|
|
||||||
|
/** @type {{ files: MongoFile[]}} */
|
||||||
|
const {
|
||||||
|
text,
|
||||||
|
model,
|
||||||
|
endpoint,
|
||||||
|
files = [],
|
||||||
|
promptPrefix,
|
||||||
|
assistant_id,
|
||||||
|
instructions,
|
||||||
|
thread_id: _thread_id,
|
||||||
|
messageId: _messageId,
|
||||||
|
conversationId: convoId,
|
||||||
|
parentMessageId: _parentId = Constants.NO_PARENT,
|
||||||
|
} = req.body;
|
||||||
|
|
||||||
|
/** @type {OpenAIClient} */
|
||||||
|
let openai;
|
||||||
|
/** @type {string|undefined} - the current thread id */
|
||||||
|
let thread_id = _thread_id;
|
||||||
|
/** @type {string|undefined} - the current run id */
|
||||||
|
let run_id;
|
||||||
|
/** @type {string|undefined} - the parent messageId */
|
||||||
|
let parentMessageId = _parentId;
|
||||||
|
/** @type {TMessage[]} */
|
||||||
|
let previousMessages = [];
|
||||||
|
/** @type {import('librechat-data-provider').TConversation | null} */
|
||||||
|
let conversation = null;
|
||||||
|
/** @type {string[]} */
|
||||||
|
let file_ids = [];
|
||||||
|
/** @type {Set<string>} */
|
||||||
|
let attachedFileIds = new Set();
|
||||||
|
/** @type {TMessage | null} */
|
||||||
|
let requestMessage = null;
|
||||||
|
|
||||||
|
const userMessageId = v4();
|
||||||
|
const responseMessageId = v4();
|
||||||
|
|
||||||
|
/** @type {string} - The conversation UUID - created if undefined */
|
||||||
|
const conversationId = convoId ?? v4();
|
||||||
|
|
||||||
|
const cache = getLogStores(CacheKeys.ABORT_KEYS);
|
||||||
|
const cacheKey = `${req.user.id}:${conversationId}`;
|
||||||
|
|
||||||
|
/** @type {Run | undefined} - The completed run, undefined if incomplete */
|
||||||
|
let completedRun;
|
||||||
|
|
||||||
|
const handleError = async (error) => {
|
||||||
|
const defaultErrorMessage =
|
||||||
|
'The Assistant run failed to initialize. Try sending a message in a new conversation.';
|
||||||
|
const messageData = {
|
||||||
|
thread_id,
|
||||||
|
assistant_id,
|
||||||
|
conversationId,
|
||||||
|
parentMessageId,
|
||||||
|
sender: 'System',
|
||||||
|
user: req.user.id,
|
||||||
|
shouldSaveMessage: false,
|
||||||
|
messageId: responseMessageId,
|
||||||
|
endpoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (error.message === 'Run cancelled') {
|
||||||
|
return res.end();
|
||||||
|
} else if (error.message === 'Request closed' && completedRun) {
|
||||||
|
return;
|
||||||
|
} else if (error.message === 'Request closed') {
|
||||||
|
logger.debug('[/assistants/chat/] Request aborted on close');
|
||||||
|
} else if (/Files.*are invalid/.test(error.message)) {
|
||||||
|
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
||||||
|
endpoint === EModelEndpoint.azureAssistants
|
||||||
|
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
||||||
|
: ''
|
||||||
|
}`;
|
||||||
|
return sendResponse(res, messageData, errorMessage);
|
||||||
|
} else if (error?.message?.includes('string too long')) {
|
||||||
|
return sendResponse(
|
||||||
|
res,
|
||||||
|
messageData,
|
||||||
|
'Message too long. The Assistants API has a limit of 32,768 characters per message. Please shorten it and try again.',
|
||||||
|
);
|
||||||
|
} else if (error?.message?.includes(ViolationTypes.TOKEN_BALANCE)) {
|
||||||
|
return sendResponse(res, messageData, error.message);
|
||||||
|
} else {
|
||||||
|
logger.error('[/assistants/chat/]', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!openai || !thread_id || !run_id) {
|
||||||
|
return sendResponse(res, messageData, defaultErrorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
await sleep(2000);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const status = await cache.get(cacheKey);
|
||||||
|
if (status === 'cancelled') {
|
||||||
|
logger.debug('[/assistants/chat/] Run already cancelled');
|
||||||
|
return res.end();
|
||||||
|
}
|
||||||
|
await cache.delete(cacheKey);
|
||||||
|
const cancelledRun = await openai.beta.threads.runs.cancel(thread_id, run_id);
|
||||||
|
logger.debug('[/assistants/chat/] Cancelled run:', cancelledRun);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[/assistants/chat/] Error cancelling run', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
await sleep(2000);
|
||||||
|
|
||||||
|
let run;
|
||||||
|
try {
|
||||||
|
run = await openai.beta.threads.runs.retrieve(thread_id, run_id);
|
||||||
|
await recordUsage({
|
||||||
|
...run.usage,
|
||||||
|
model: run.model,
|
||||||
|
user: req.user.id,
|
||||||
|
conversationId,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[/assistants/chat/] Error fetching or processing run', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
let finalEvent;
|
||||||
|
try {
|
||||||
|
const runMessages = await checkMessageGaps({
|
||||||
|
openai,
|
||||||
|
run_id,
|
||||||
|
endpoint,
|
||||||
|
thread_id,
|
||||||
|
conversationId,
|
||||||
|
latestMessageId: responseMessageId,
|
||||||
|
});
|
||||||
|
|
||||||
|
const errorContentPart = {
|
||||||
|
text: {
|
||||||
|
value:
|
||||||
|
error?.message ?? 'There was an error processing your request. Please try again later.',
|
||||||
|
},
|
||||||
|
type: ContentTypes.ERROR,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!Array.isArray(runMessages[runMessages.length - 1]?.content)) {
|
||||||
|
runMessages[runMessages.length - 1].content = [errorContentPart];
|
||||||
|
} else {
|
||||||
|
const contentParts = runMessages[runMessages.length - 1].content;
|
||||||
|
for (let i = 0; i < contentParts.length; i++) {
|
||||||
|
const currentPart = contentParts[i];
|
||||||
|
/** @type {CodeToolCall | RetrievalToolCall | FunctionToolCall | undefined} */
|
||||||
|
const toolCall = currentPart?.[ContentTypes.TOOL_CALL];
|
||||||
|
if (
|
||||||
|
toolCall &&
|
||||||
|
toolCall?.function &&
|
||||||
|
!(toolCall?.function?.output || toolCall?.function?.output?.length)
|
||||||
|
) {
|
||||||
|
contentParts[i] = {
|
||||||
|
...currentPart,
|
||||||
|
[ContentTypes.TOOL_CALL]: {
|
||||||
|
...toolCall,
|
||||||
|
function: {
|
||||||
|
...toolCall.function,
|
||||||
|
output: 'error processing tool',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
runMessages[runMessages.length - 1].content.push(errorContentPart);
|
||||||
|
}
|
||||||
|
|
||||||
|
finalEvent = {
|
||||||
|
final: true,
|
||||||
|
conversation: await getConvo(req.user.id, conversationId),
|
||||||
|
runMessages,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[/assistants/chat/] Error finalizing error process', error);
|
||||||
|
return sendResponse(res, messageData, 'The Assistant run failed');
|
||||||
|
}
|
||||||
|
|
||||||
|
return sendResponse(res, finalEvent);
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
res.on('close', async () => {
|
||||||
|
if (!completedRun) {
|
||||||
|
await handleError(new Error('Request closed'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (convoId && !_thread_id) {
|
||||||
|
completedRun = true;
|
||||||
|
throw new Error('Missing thread_id for existing conversation');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!assistant_id) {
|
||||||
|
completedRun = true;
|
||||||
|
throw new Error('Missing assistant_id');
|
||||||
|
}
|
||||||
|
|
||||||
|
const checkBalanceBeforeRun = async () => {
|
||||||
|
if (!isEnabled(process.env.CHECK_BALANCE)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const transactions =
|
||||||
|
(await getTransactions({
|
||||||
|
user: req.user.id,
|
||||||
|
context: 'message',
|
||||||
|
conversationId,
|
||||||
|
})) ?? [];
|
||||||
|
|
||||||
|
const totalPreviousTokens = Math.abs(
|
||||||
|
transactions.reduce((acc, curr) => acc + curr.rawAmount, 0),
|
||||||
|
);
|
||||||
|
|
||||||
|
// TODO: make promptBuffer a config option; buffer for titles, needs buffer for system instructions
|
||||||
|
const promptBuffer = parentMessageId === Constants.NO_PARENT && !_thread_id ? 200 : 0;
|
||||||
|
// 5 is added for labels
|
||||||
|
let promptTokens = (await countTokens(text + (promptPrefix ?? ''))) + 5;
|
||||||
|
promptTokens += totalPreviousTokens + promptBuffer;
|
||||||
|
// Count tokens up to the current context window
|
||||||
|
promptTokens = Math.min(promptTokens, getModelMaxTokens(model));
|
||||||
|
|
||||||
|
await checkBalance({
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
txData: {
|
||||||
|
model,
|
||||||
|
user: req.user.id,
|
||||||
|
tokenType: 'prompt',
|
||||||
|
amount: promptTokens,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const { openai: _openai, client } = await getOpenAIClient({
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
endpointOption: req.body.endpointOption,
|
||||||
|
initAppClient: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
openai = _openai;
|
||||||
|
await validateAuthor({ req, openai });
|
||||||
|
|
||||||
|
if (previousMessages.length) {
|
||||||
|
parentMessageId = previousMessages[previousMessages.length - 1].messageId;
|
||||||
|
}
|
||||||
|
|
||||||
|
let userMessage = {
|
||||||
|
role: 'user',
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: ContentTypes.TEXT,
|
||||||
|
text,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
metadata: {
|
||||||
|
messageId: userMessageId,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/** @type {CreateRunBody | undefined} */
|
||||||
|
const body = {
|
||||||
|
assistant_id,
|
||||||
|
model,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (promptPrefix) {
|
||||||
|
body.additional_instructions = promptPrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (instructions) {
|
||||||
|
body.instructions = instructions;
|
||||||
|
}
|
||||||
|
|
||||||
|
const getRequestFileIds = async () => {
|
||||||
|
let thread_file_ids = [];
|
||||||
|
if (convoId) {
|
||||||
|
const convo = await getConvo(req.user.id, convoId);
|
||||||
|
if (convo && convo.file_ids) {
|
||||||
|
thread_file_ids = convo.file_ids;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (files.length || thread_file_ids.length) {
|
||||||
|
attachedFileIds = new Set([...file_ids, ...thread_file_ids]);
|
||||||
|
|
||||||
|
let attachmentIndex = 0;
|
||||||
|
for (const file of files) {
|
||||||
|
file_ids.push(file.file_id);
|
||||||
|
if (file.type.startsWith('image')) {
|
||||||
|
userMessage.content.push({
|
||||||
|
type: ContentTypes.IMAGE_FILE,
|
||||||
|
[ContentTypes.IMAGE_FILE]: { file_id: file.file_id },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!userMessage.attachments) {
|
||||||
|
userMessage.attachments = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
userMessage.attachments.push({
|
||||||
|
file_id: file.file_id,
|
||||||
|
tools: [{ type: ToolCallTypes.CODE_INTERPRETER }],
|
||||||
|
});
|
||||||
|
|
||||||
|
if (file.type.startsWith('image')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const mimeType = file.type;
|
||||||
|
const isSupportedByRetrieval = retrievalMimeTypes.some((regex) => regex.test(mimeType));
|
||||||
|
if (isSupportedByRetrieval) {
|
||||||
|
userMessage.attachments[attachmentIndex].tools.push({
|
||||||
|
type: ToolCallTypes.FILE_SEARCH,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
attachmentIndex++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const initializeThread = async () => {
|
||||||
|
await getRequestFileIds();
|
||||||
|
|
||||||
|
// TODO: may allow multiple messages to be created beforehand in a future update
|
||||||
|
const initThreadBody = {
|
||||||
|
messages: [userMessage],
|
||||||
|
metadata: {
|
||||||
|
user: req.user.id,
|
||||||
|
conversationId,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await initThread({ openai, body: initThreadBody, thread_id });
|
||||||
|
thread_id = result.thread_id;
|
||||||
|
|
||||||
|
createOnTextProgress({
|
||||||
|
openai,
|
||||||
|
conversationId,
|
||||||
|
userMessageId,
|
||||||
|
messageId: responseMessageId,
|
||||||
|
thread_id,
|
||||||
|
});
|
||||||
|
|
||||||
|
requestMessage = {
|
||||||
|
user: req.user.id,
|
||||||
|
text,
|
||||||
|
messageId: userMessageId,
|
||||||
|
parentMessageId,
|
||||||
|
// TODO: make sure client sends correct format for `files`, use zod
|
||||||
|
files,
|
||||||
|
file_ids,
|
||||||
|
conversationId,
|
||||||
|
isCreatedByUser: true,
|
||||||
|
assistant_id,
|
||||||
|
thread_id,
|
||||||
|
model: assistant_id,
|
||||||
|
endpoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
previousMessages.push(requestMessage);
|
||||||
|
|
||||||
|
/* asynchronous */
|
||||||
|
saveUserMessage({ ...requestMessage, model });
|
||||||
|
|
||||||
|
conversation = {
|
||||||
|
conversationId,
|
||||||
|
endpoint,
|
||||||
|
promptPrefix: promptPrefix,
|
||||||
|
instructions: instructions,
|
||||||
|
assistant_id,
|
||||||
|
// model,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (file_ids.length) {
|
||||||
|
conversation.file_ids = file_ids;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const promises = [initializeThread(), checkBalanceBeforeRun()];
|
||||||
|
await Promise.all(promises);
|
||||||
|
|
||||||
|
const sendInitialResponse = () => {
|
||||||
|
sendMessage(res, {
|
||||||
|
sync: true,
|
||||||
|
conversationId,
|
||||||
|
// messages: previousMessages,
|
||||||
|
requestMessage,
|
||||||
|
responseMessage: {
|
||||||
|
user: req.user.id,
|
||||||
|
messageId: openai.responseMessage.messageId,
|
||||||
|
parentMessageId: userMessageId,
|
||||||
|
conversationId,
|
||||||
|
assistant_id,
|
||||||
|
thread_id,
|
||||||
|
model: assistant_id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/** @type {RunResponse | typeof StreamRunManager | undefined} */
|
||||||
|
let response;
|
||||||
|
|
||||||
|
const processRun = async (retry = false) => {
|
||||||
|
if (endpoint === EModelEndpoint.azureAssistants) {
|
||||||
|
body.model = openai._options.model;
|
||||||
|
openai.attachedFileIds = attachedFileIds;
|
||||||
|
if (retry) {
|
||||||
|
response = await runAssistant({
|
||||||
|
openai,
|
||||||
|
thread_id,
|
||||||
|
run_id,
|
||||||
|
in_progress: openai.in_progress,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* NOTE:
|
||||||
|
* By default, a Run will use the model and tools configuration specified in Assistant object,
|
||||||
|
* but you can override most of these when creating the Run for added flexibility:
|
||||||
|
*/
|
||||||
|
const run = await createRun({
|
||||||
|
openai,
|
||||||
|
thread_id,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
|
||||||
|
run_id = run.id;
|
||||||
|
await cache.set(cacheKey, `${thread_id}:${run_id}`, ten_minutes);
|
||||||
|
sendInitialResponse();
|
||||||
|
|
||||||
|
// todo: retry logic
|
||||||
|
response = await runAssistant({ openai, thread_id, run_id });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @type {{[AssistantStreamEvents.ThreadRunCreated]: (event: ThreadRunCreated) => Promise<void>}} */
|
||||||
|
const handlers = {
|
||||||
|
[AssistantStreamEvents.ThreadRunCreated]: async (event) => {
|
||||||
|
await cache.set(cacheKey, `${thread_id}:${event.data.id}`, ten_minutes);
|
||||||
|
run_id = event.data.id;
|
||||||
|
sendInitialResponse();
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const streamRunManager = new StreamRunManager({
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
openai,
|
||||||
|
handlers,
|
||||||
|
thread_id,
|
||||||
|
attachedFileIds,
|
||||||
|
parentMessageId: userMessageId,
|
||||||
|
responseMessage: openai.responseMessage,
|
||||||
|
// streamOptions: {
|
||||||
|
|
||||||
|
// },
|
||||||
|
});
|
||||||
|
|
||||||
|
await streamRunManager.runAssistant({
|
||||||
|
thread_id,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
|
||||||
|
response = streamRunManager;
|
||||||
|
response.text = streamRunManager.intermediateText;
|
||||||
|
};
|
||||||
|
|
||||||
|
await processRun();
|
||||||
|
logger.debug('[/assistants/chat/] response', {
|
||||||
|
run: response.run,
|
||||||
|
steps: response.steps,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.run.status === RunStatus.CANCELLED) {
|
||||||
|
logger.debug('[/assistants/chat/] Run cancelled, handled by `abortRun`');
|
||||||
|
return res.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.run.status === RunStatus.IN_PROGRESS) {
|
||||||
|
processRun(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
completedRun = response.run;
|
||||||
|
|
||||||
|
/** @type {ResponseMessage} */
|
||||||
|
const responseMessage = {
|
||||||
|
...(response.responseMessage ?? response.finalMessage),
|
||||||
|
text: response.text,
|
||||||
|
parentMessageId: userMessageId,
|
||||||
|
conversationId,
|
||||||
|
user: req.user.id,
|
||||||
|
assistant_id,
|
||||||
|
thread_id,
|
||||||
|
model: assistant_id,
|
||||||
|
endpoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
sendMessage(res, {
|
||||||
|
final: true,
|
||||||
|
conversation,
|
||||||
|
requestMessage: {
|
||||||
|
parentMessageId,
|
||||||
|
thread_id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
res.end();
|
||||||
|
|
||||||
|
await saveAssistantMessage({ ...responseMessage, model });
|
||||||
|
|
||||||
|
if (parentMessageId === Constants.NO_PARENT && !_thread_id) {
|
||||||
|
addTitle(req, {
|
||||||
|
text,
|
||||||
|
responseText: response.text,
|
||||||
|
conversationId,
|
||||||
|
client,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await addThreadMetadata({
|
||||||
|
openai,
|
||||||
|
thread_id,
|
||||||
|
messageId: responseMessage.messageId,
|
||||||
|
messages: response.messages,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.run.usage) {
|
||||||
|
await sleep(3000);
|
||||||
|
completedRun = await openai.beta.threads.runs.retrieve(thread_id, response.run.id);
|
||||||
|
if (completedRun.usage) {
|
||||||
|
await recordUsage({
|
||||||
|
...completedRun.usage,
|
||||||
|
user: req.user.id,
|
||||||
|
model: completedRun.model ?? model,
|
||||||
|
conversationId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await recordUsage({
|
||||||
|
...response.run.usage,
|
||||||
|
user: req.user.id,
|
||||||
|
model: response.run.model ?? model,
|
||||||
|
conversationId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
await handleError(error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = chatV2;
|
||||||
270
api/server/controllers/assistants/helpers.js
Normal file
270
api/server/controllers/assistants/helpers.js
Normal file
@@ -0,0 +1,270 @@
|
|||||||
|
const {
|
||||||
|
CacheKeys,
|
||||||
|
SystemRoles,
|
||||||
|
EModelEndpoint,
|
||||||
|
defaultOrderQuery,
|
||||||
|
defaultAssistantsVersion,
|
||||||
|
} = require('librechat-data-provider');
|
||||||
|
const {
|
||||||
|
initializeClient: initAzureClient,
|
||||||
|
} = require('~/server/services/Endpoints/azureAssistants');
|
||||||
|
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||||
|
const { getLogStores } = require('~/cache');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Express.Request} req
|
||||||
|
* @param {string} [endpoint]
|
||||||
|
* @returns {Promise<string>}
|
||||||
|
*/
|
||||||
|
const getCurrentVersion = async (req, endpoint) => {
|
||||||
|
const index = req.baseUrl.lastIndexOf('/v');
|
||||||
|
let version = index !== -1 ? req.baseUrl.substring(index + 1, index + 3) : null;
|
||||||
|
if (!version && req.body.version) {
|
||||||
|
version = `v${req.body.version}`;
|
||||||
|
}
|
||||||
|
if (!version && endpoint) {
|
||||||
|
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||||
|
const cachedEndpointsConfig = await cache.get(CacheKeys.ENDPOINT_CONFIG);
|
||||||
|
version = `v${
|
||||||
|
cachedEndpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint]
|
||||||
|
}`;
|
||||||
|
}
|
||||||
|
if (!version?.startsWith('v') && version.length !== 2) {
|
||||||
|
throw new Error(`[${req.baseUrl}] Invalid version: ${version}`);
|
||||||
|
}
|
||||||
|
return version;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronously lists assistants based on provided query parameters.
|
||||||
|
*
|
||||||
|
* Initializes the client with the current request and response objects and lists assistants
|
||||||
|
* according to the query parameters. This function abstracts the logic for non-Azure paths.
|
||||||
|
*
|
||||||
|
* @deprecated
|
||||||
|
* @async
|
||||||
|
* @param {object} params - The parameters object.
|
||||||
|
* @param {object} params.req - The request object, used for initializing the client.
|
||||||
|
* @param {object} params.res - The response object, used for initializing the client.
|
||||||
|
* @param {string} params.version - The API version to use.
|
||||||
|
* @param {object} params.query - The query parameters to list assistants (e.g., limit, order).
|
||||||
|
* @returns {Promise<object>} A promise that resolves to the response from the `openai.beta.assistants.list` method call.
|
||||||
|
*/
|
||||||
|
const _listAssistants = async ({ req, res, version, query }) => {
|
||||||
|
const { openai } = await getOpenAIClient({ req, res, version });
|
||||||
|
return openai.beta.assistants.list(query);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches all assistants based on provided query params, until `has_more` is `false`.
|
||||||
|
*
|
||||||
|
* @async
|
||||||
|
* @param {object} params - The parameters object.
|
||||||
|
* @param {object} params.req - The request object, used for initializing the client.
|
||||||
|
* @param {object} params.res - The response object, used for initializing the client.
|
||||||
|
* @param {string} params.version - The API version to use.
|
||||||
|
* @param {Omit<AssistantListParams, 'endpoint'>} params.query - The query parameters to list assistants (e.g., limit, order).
|
||||||
|
* @returns {Promise<object>} A promise that resolves to the response from the `openai.beta.assistants.list` method call.
|
||||||
|
*/
|
||||||
|
const listAllAssistants = async ({ req, res, version, query }) => {
|
||||||
|
/** @type {{ openai: OpenAIClient }} */
|
||||||
|
const { openai } = await getOpenAIClient({ req, res, version });
|
||||||
|
const allAssistants = [];
|
||||||
|
|
||||||
|
let first_id;
|
||||||
|
let last_id;
|
||||||
|
let afterToken = query.after;
|
||||||
|
let hasMore = true;
|
||||||
|
|
||||||
|
while (hasMore) {
|
||||||
|
const response = await openai.beta.assistants.list({
|
||||||
|
...query,
|
||||||
|
after: afterToken,
|
||||||
|
});
|
||||||
|
|
||||||
|
const { body } = response;
|
||||||
|
|
||||||
|
allAssistants.push(...body.data);
|
||||||
|
hasMore = body.has_more;
|
||||||
|
|
||||||
|
if (!first_id) {
|
||||||
|
first_id = body.first_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasMore) {
|
||||||
|
afterToken = body.last_id;
|
||||||
|
} else {
|
||||||
|
last_id = body.last_id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
data: allAssistants,
|
||||||
|
body: {
|
||||||
|
data: allAssistants,
|
||||||
|
has_more: false,
|
||||||
|
first_id,
|
||||||
|
last_id,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronously lists assistants for Azure configured groups.
|
||||||
|
*
|
||||||
|
* Iterates through Azure configured assistant groups, initializes the client with the current request and response objects,
|
||||||
|
* lists assistants based on the provided query parameters, and merges their data alongside the model information into a single array.
|
||||||
|
*
|
||||||
|
* @async
|
||||||
|
* @param {object} params - The parameters object.
|
||||||
|
* @param {object} params.req - The request object, used for initializing the client and manipulating the request body.
|
||||||
|
* @param {object} params.res - The response object, used for initializing the client.
|
||||||
|
* @param {string} params.version - The API version to use.
|
||||||
|
* @param {TAzureConfig} params.azureConfig - The Azure configuration object containing assistantGroups and groupMap.
|
||||||
|
* @param {object} params.query - The query parameters to list assistants (e.g., limit, order).
|
||||||
|
* @returns {Promise<AssistantListResponse>} A promise that resolves to an array of assistant data merged with their respective model information.
|
||||||
|
*/
|
||||||
|
const listAssistantsForAzure = async ({ req, res, version, azureConfig = {}, query }) => {
|
||||||
|
/** @type {Array<[string, TAzureModelConfig]>} */
|
||||||
|
const groupModelTuples = [];
|
||||||
|
const promises = [];
|
||||||
|
/** @type {Array<TAzureGroup>} */
|
||||||
|
const groups = [];
|
||||||
|
|
||||||
|
const { groupMap, assistantGroups } = azureConfig;
|
||||||
|
|
||||||
|
for (const groupName of assistantGroups) {
|
||||||
|
const group = groupMap[groupName];
|
||||||
|
groups.push(group);
|
||||||
|
|
||||||
|
const currentModelTuples = Object.entries(group?.models);
|
||||||
|
groupModelTuples.push(currentModelTuples);
|
||||||
|
|
||||||
|
/* The specified model is only necessary to
|
||||||
|
fetch assistants for the shared instance */
|
||||||
|
req.body.model = currentModelTuples[0][0];
|
||||||
|
promises.push(listAllAssistants({ req, res, version, query }));
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedQueries = await Promise.all(promises);
|
||||||
|
const data = resolvedQueries.flatMap((res, i) =>
|
||||||
|
res.data.map((assistant) => {
|
||||||
|
const deploymentName = assistant.model;
|
||||||
|
const currentGroup = groups[i];
|
||||||
|
const currentModelTuples = groupModelTuples[i];
|
||||||
|
const firstModel = currentModelTuples[0][0];
|
||||||
|
|
||||||
|
if (currentGroup.deploymentName === deploymentName) {
|
||||||
|
return { ...assistant, model: firstModel };
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [model, modelConfig] of currentModelTuples) {
|
||||||
|
if (modelConfig.deploymentName === deploymentName) {
|
||||||
|
return { ...assistant, model };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ...assistant, model: firstModel };
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
first_id: data[0]?.id,
|
||||||
|
last_id: data[data.length - 1]?.id,
|
||||||
|
object: 'list',
|
||||||
|
has_more: false,
|
||||||
|
data,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
async function getOpenAIClient({ req, res, endpointOption, initAppClient, overrideEndpoint }) {
|
||||||
|
let endpoint = overrideEndpoint ?? req.body.endpoint ?? req.query.endpoint;
|
||||||
|
const version = await getCurrentVersion(req, endpoint);
|
||||||
|
if (!endpoint) {
|
||||||
|
throw new Error(`[${req.baseUrl}] Endpoint is required`);
|
||||||
|
}
|
||||||
|
|
||||||
|
let result;
|
||||||
|
if (endpoint === EModelEndpoint.assistants) {
|
||||||
|
result = await initializeClient({ req, res, version, endpointOption, initAppClient });
|
||||||
|
} else if (endpoint === EModelEndpoint.azureAssistants) {
|
||||||
|
result = await initAzureClient({ req, res, version, endpointOption, initAppClient });
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of assistants.
|
||||||
|
* @param {object} params
|
||||||
|
* @param {object} params.req - Express Request
|
||||||
|
* @param {AssistantListParams} [params.req.query] - The assistant list parameters for pagination and sorting.
|
||||||
|
* @param {object} params.res - Express Response
|
||||||
|
* @param {string} [params.overrideEndpoint] - The endpoint to override the request endpoint.
|
||||||
|
* @returns {Promise<AssistantListResponse>} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
const fetchAssistants = async ({ req, res, overrideEndpoint }) => {
|
||||||
|
const {
|
||||||
|
limit = 100,
|
||||||
|
order = 'desc',
|
||||||
|
after,
|
||||||
|
before,
|
||||||
|
endpoint,
|
||||||
|
} = req.query ?? {
|
||||||
|
endpoint: overrideEndpoint,
|
||||||
|
...defaultOrderQuery,
|
||||||
|
};
|
||||||
|
|
||||||
|
const version = await getCurrentVersion(req, endpoint);
|
||||||
|
const query = { limit, order, after, before };
|
||||||
|
|
||||||
|
/** @type {AssistantListResponse} */
|
||||||
|
let body;
|
||||||
|
|
||||||
|
if (endpoint === EModelEndpoint.assistants) {
|
||||||
|
({ body } = await listAllAssistants({ req, res, version, query }));
|
||||||
|
} else if (endpoint === EModelEndpoint.azureAssistants) {
|
||||||
|
const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI];
|
||||||
|
body = await listAssistantsForAzure({ req, res, version, azureConfig, query });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.user.role === SystemRoles.ADMIN) {
|
||||||
|
return body;
|
||||||
|
} else if (!req.app.locals[endpoint]) {
|
||||||
|
return body;
|
||||||
|
}
|
||||||
|
|
||||||
|
body.data = filterAssistants({
|
||||||
|
userId: req.user.id,
|
||||||
|
assistants: body.data,
|
||||||
|
assistantsConfig: req.app.locals[endpoint],
|
||||||
|
});
|
||||||
|
return body;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter assistants based on configuration.
|
||||||
|
*
|
||||||
|
* @param {object} params - The parameters object.
|
||||||
|
* @param {string} params.userId - The user ID to filter private assistants.
|
||||||
|
* @param {Assistant[]} params.assistants - The list of assistants to filter.
|
||||||
|
* @param {Partial<TAssistantEndpoint>} params.assistantsConfig - The assistant configuration.
|
||||||
|
* @returns {Assistant[]} - The filtered list of assistants.
|
||||||
|
*/
|
||||||
|
function filterAssistants({ assistants, userId, assistantsConfig }) {
|
||||||
|
const { supportedIds, excludedIds, privateAssistants } = assistantsConfig;
|
||||||
|
if (privateAssistants) {
|
||||||
|
return assistants.filter((assistant) => userId === assistant.metadata?.author);
|
||||||
|
} else if (supportedIds?.length) {
|
||||||
|
return assistants.filter((assistant) => supportedIds.includes(assistant.id));
|
||||||
|
} else if (excludedIds?.length) {
|
||||||
|
return assistants.filter((assistant) => !excludedIds.includes(assistant.id));
|
||||||
|
}
|
||||||
|
return assistants;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getOpenAIClient,
|
||||||
|
fetchAssistants,
|
||||||
|
getCurrentVersion,
|
||||||
|
};
|
||||||
@@ -1,34 +1,12 @@
|
|||||||
const multer = require('multer');
|
const { FileContext } = require('librechat-data-provider');
|
||||||
const express = require('express');
|
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||||
const { FileContext, EModelEndpoint } = require('librechat-data-provider');
|
|
||||||
const {
|
|
||||||
initializeClient,
|
|
||||||
listAssistantsForAzure,
|
|
||||||
listAssistants,
|
|
||||||
} = require('~/server/services/Endpoints/assistants');
|
|
||||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||||
|
const { deleteAssistantActions } = require('~/server/services/ActionService');
|
||||||
|
const { updateAssistantDoc, getAssistants } = require('~/models/Assistant');
|
||||||
const { uploadImageBuffer } = require('~/server/services/Files/process');
|
const { uploadImageBuffer } = require('~/server/services/Files/process');
|
||||||
const { updateAssistant, getAssistants } = require('~/models/Assistant');
|
const { getOpenAIClient, fetchAssistants } = require('./helpers');
|
||||||
const { deleteFileByFilter } = require('~/models/File');
|
const { deleteFileByFilter } = require('~/models/File');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
const actions = require('./actions');
|
|
||||||
const tools = require('./tools');
|
|
||||||
|
|
||||||
const upload = multer();
|
|
||||||
const router = express.Router();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Assistant actions route.
|
|
||||||
* @route GET|POST /assistants/actions
|
|
||||||
*/
|
|
||||||
router.use('/actions', actions);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create an assistant.
|
|
||||||
* @route GET /assistants/tools
|
|
||||||
* @returns {TPlugin[]} 200 - application/json
|
|
||||||
*/
|
|
||||||
router.use('/tools', tools);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an assistant.
|
* Create an assistant.
|
||||||
@@ -36,12 +14,11 @@ router.use('/tools', tools);
|
|||||||
* @param {AssistantCreateParams} req.body - The assistant creation parameters.
|
* @param {AssistantCreateParams} req.body - The assistant creation parameters.
|
||||||
* @returns {Assistant} 201 - success response - application/json
|
* @returns {Assistant} 201 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.post('/', async (req, res) => {
|
const createAssistant = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
/** @type {{ openai: OpenAI }} */
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
const { openai } = await initializeClient({ req, res });
|
|
||||||
|
|
||||||
const { tools = [], ...assistantData } = req.body;
|
const { tools = [], endpoint, ...assistantData } = req.body;
|
||||||
assistantData.tools = tools
|
assistantData.tools = tools
|
||||||
.map((tool) => {
|
.map((tool) => {
|
||||||
if (typeof tool !== 'string') {
|
if (typeof tool !== 'string') {
|
||||||
@@ -52,18 +29,30 @@ router.post('/', async (req, res) => {
|
|||||||
})
|
})
|
||||||
.filter((tool) => tool);
|
.filter((tool) => tool);
|
||||||
|
|
||||||
|
let azureModelIdentifier = null;
|
||||||
if (openai.locals?.azureOptions) {
|
if (openai.locals?.azureOptions) {
|
||||||
|
azureModelIdentifier = assistantData.model;
|
||||||
assistantData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName;
|
assistantData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
assistantData.metadata = {
|
||||||
|
author: req.user.id,
|
||||||
|
endpoint,
|
||||||
|
};
|
||||||
|
|
||||||
const assistant = await openai.beta.assistants.create(assistantData);
|
const assistant = await openai.beta.assistants.create(assistantData);
|
||||||
|
const promise = updateAssistantDoc({ assistant_id: assistant.id }, { user: req.user.id });
|
||||||
|
if (azureModelIdentifier) {
|
||||||
|
assistant.model = azureModelIdentifier;
|
||||||
|
}
|
||||||
|
await promise;
|
||||||
logger.debug('/assistants/', assistant);
|
logger.debug('/assistants/', assistant);
|
||||||
res.status(201).json(assistant);
|
res.status(201).json(assistant);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/assistants] Error creating assistant', error);
|
logger.error('[/assistants] Error creating assistant', error);
|
||||||
res.status(500).json({ error: error.message });
|
res.status(500).json({ error: error.message });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves an assistant.
|
* Retrieves an assistant.
|
||||||
@@ -71,11 +60,10 @@ router.post('/', async (req, res) => {
|
|||||||
* @param {string} req.params.id - Assistant identifier.
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
* @returns {Assistant} 200 - success response - application/json
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.get('/:id', async (req, res) => {
|
const retrieveAssistant = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
/** @type {{ openai: OpenAI }} */
|
/* NOTE: not actually being used right now */
|
||||||
const { openai } = await initializeClient({ req, res });
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
|
|
||||||
const assistant_id = req.params.id;
|
const assistant_id = req.params.id;
|
||||||
const assistant = await openai.beta.assistants.retrieve(assistant_id);
|
const assistant = await openai.beta.assistants.retrieve(assistant_id);
|
||||||
res.json(assistant);
|
res.json(assistant);
|
||||||
@@ -83,22 +71,24 @@ router.get('/:id', async (req, res) => {
|
|||||||
logger.error('[/assistants/:id] Error retrieving assistant', error);
|
logger.error('[/assistants/:id] Error retrieving assistant', error);
|
||||||
res.status(500).json({ error: error.message });
|
res.status(500).json({ error: error.message });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Modifies an assistant.
|
* Modifies an assistant.
|
||||||
* @route PATCH /assistants/:id
|
* @route PATCH /assistants/:id
|
||||||
|
* @param {object} req - Express Request
|
||||||
|
* @param {object} req.params - Request params
|
||||||
* @param {string} req.params.id - Assistant identifier.
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
* @param {AssistantUpdateParams} req.body - The assistant update parameters.
|
* @param {AssistantUpdateParams} req.body - The assistant update parameters.
|
||||||
* @returns {Assistant} 200 - success response - application/json
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.patch('/:id', async (req, res) => {
|
const patchAssistant = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
/** @type {{ openai: OpenAI }} */
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
const { openai } = await initializeClient({ req, res });
|
await validateAuthor({ req, openai });
|
||||||
|
|
||||||
const assistant_id = req.params.id;
|
const assistant_id = req.params.id;
|
||||||
const updateData = req.body;
|
const { endpoint: _e, ...updateData } = req.body;
|
||||||
updateData.tools = (updateData.tools ?? [])
|
updateData.tools = (updateData.tools ?? [])
|
||||||
.map((tool) => {
|
.map((tool) => {
|
||||||
if (typeof tool !== 'string') {
|
if (typeof tool !== 'string') {
|
||||||
@@ -119,90 +109,76 @@ router.patch('/:id', async (req, res) => {
|
|||||||
logger.error('[/assistants/:id] Error updating assistant', error);
|
logger.error('[/assistants/:id] Error updating assistant', error);
|
||||||
res.status(500).json({ error: error.message });
|
res.status(500).json({ error: error.message });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Deletes an assistant.
|
* Deletes an assistant.
|
||||||
* @route DELETE /assistants/:id
|
* @route DELETE /assistants/:id
|
||||||
|
* @param {object} req - Express Request
|
||||||
|
* @param {object} req.params - Request params
|
||||||
* @param {string} req.params.id - Assistant identifier.
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
* @returns {Assistant} 200 - success response - application/json
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.delete('/:id', async (req, res) => {
|
const deleteAssistant = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
/** @type {{ openai: OpenAI }} */
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
const { openai } = await initializeClient({ req, res });
|
await validateAuthor({ req, openai });
|
||||||
|
|
||||||
const assistant_id = req.params.id;
|
const assistant_id = req.params.id;
|
||||||
const deletionStatus = await openai.beta.assistants.del(assistant_id);
|
const deletionStatus = await openai.beta.assistants.del(assistant_id);
|
||||||
|
if (deletionStatus?.deleted) {
|
||||||
|
await deleteAssistantActions({ req, assistant_id });
|
||||||
|
}
|
||||||
res.json(deletionStatus);
|
res.json(deletionStatus);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/assistants/:id] Error deleting assistant', error);
|
logger.error('[/assistants/:id] Error deleting assistant', error);
|
||||||
res.status(500).json({ error: 'Error deleting assistant' });
|
res.status(500).json({ error: 'Error deleting assistant' });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a list of assistants.
|
* Returns a list of assistants.
|
||||||
* @route GET /assistants
|
* @route GET /assistants
|
||||||
|
* @param {object} req - Express Request
|
||||||
* @param {AssistantListParams} req.query - The assistant list parameters for pagination and sorting.
|
* @param {AssistantListParams} req.query - The assistant list parameters for pagination and sorting.
|
||||||
* @returns {AssistantListResponse} 200 - success response - application/json
|
* @returns {AssistantListResponse} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.get('/', async (req, res) => {
|
const listAssistants = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { limit = 100, order = 'desc', after, before } = req.query;
|
const body = await fetchAssistants({ req, res });
|
||||||
const query = { limit, order, after, before };
|
|
||||||
|
|
||||||
const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI];
|
|
||||||
/** @type {AssistantListResponse} */
|
|
||||||
let body;
|
|
||||||
|
|
||||||
if (azureConfig?.assistants) {
|
|
||||||
body = await listAssistantsForAzure({ req, res, azureConfig, query });
|
|
||||||
} else {
|
|
||||||
({ body } = await listAssistants({ req, res, query }));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.app.locals?.[EModelEndpoint.assistants]) {
|
|
||||||
/** @type {Partial<TAssistantEndpoint>} */
|
|
||||||
const assistantsConfig = req.app.locals[EModelEndpoint.assistants];
|
|
||||||
const { supportedIds, excludedIds } = assistantsConfig;
|
|
||||||
if (supportedIds?.length) {
|
|
||||||
body.data = body.data.filter((assistant) => supportedIds.includes(assistant.id));
|
|
||||||
} else if (excludedIds?.length) {
|
|
||||||
body.data = body.data.filter((assistant) => !excludedIds.includes(assistant.id));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json(body);
|
res.json(body);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/assistants] Error listing assistants', error);
|
logger.error('[/assistants] Error listing assistants', error);
|
||||||
res.status(500).json({ message: 'Error listing assistants' });
|
res.status(500).json({ message: 'Error listing assistants' });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a list of the user's assistant documents (metadata saved to database).
|
* Returns a list of the user's assistant documents (metadata saved to database).
|
||||||
* @route GET /assistants/documents
|
* @route GET /assistants/documents
|
||||||
* @returns {AssistantDocument[]} 200 - success response - application/json
|
* @returns {AssistantDocument[]} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.get('/documents', async (req, res) => {
|
const getAssistantDocuments = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
res.json(await getAssistants({ user: req.user.id }));
|
res.json(await getAssistants({ user: req.user.id }));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/assistants/documents] Error listing assistant documents', error);
|
logger.error('[/assistants/documents] Error listing assistant documents', error);
|
||||||
res.status(500).json({ error: error.message });
|
res.status(500).json({ error: error.message });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Uploads and updates an avatar for a specific assistant.
|
* Uploads and updates an avatar for a specific assistant.
|
||||||
* @route POST /avatar/:assistant_id
|
* @route POST /avatar/:assistant_id
|
||||||
|
* @param {object} req - Express Request
|
||||||
|
* @param {object} req.params - Request params
|
||||||
* @param {string} req.params.assistant_id - The ID of the assistant.
|
* @param {string} req.params.assistant_id - The ID of the assistant.
|
||||||
* @param {Express.Multer.File} req.file - The avatar image file.
|
* @param {Express.Multer.File} req.file - The avatar image file.
|
||||||
|
* @param {object} req.body - Request body
|
||||||
* @param {string} [req.body.metadata] - Optional metadata for the assistant's avatar.
|
* @param {string} [req.body.metadata] - Optional metadata for the assistant's avatar.
|
||||||
* @returns {Object} 200 - success response - application/json
|
* @returns {Object} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) => {
|
const uploadAssistantAvatar = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { assistant_id } = req.params;
|
const { assistant_id } = req.params;
|
||||||
if (!assistant_id) {
|
if (!assistant_id) {
|
||||||
@@ -210,8 +186,8 @@ router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) =>
|
|||||||
}
|
}
|
||||||
|
|
||||||
let { metadata: _metadata = '{}' } = req.body;
|
let { metadata: _metadata = '{}' } = req.body;
|
||||||
/** @type {{ openai: OpenAI }} */
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
const { openai } = await initializeClient({ req, res });
|
await validateAuthor({ req, openai });
|
||||||
|
|
||||||
const image = await uploadImageBuffer({
|
const image = await uploadImageBuffer({
|
||||||
req,
|
req,
|
||||||
@@ -246,7 +222,7 @@ router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) =>
|
|||||||
|
|
||||||
const promises = [];
|
const promises = [];
|
||||||
promises.push(
|
promises.push(
|
||||||
updateAssistant(
|
updateAssistantDoc(
|
||||||
{ assistant_id },
|
{ assistant_id },
|
||||||
{
|
{
|
||||||
avatar: {
|
avatar: {
|
||||||
@@ -266,6 +242,14 @@ router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) =>
|
|||||||
logger.error(message, error);
|
logger.error(message, error);
|
||||||
res.status(500).json({ message });
|
res.status(500).json({ message });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = {
|
||||||
|
createAssistant,
|
||||||
|
retrieveAssistant,
|
||||||
|
patchAssistant,
|
||||||
|
deleteAssistant,
|
||||||
|
listAssistants,
|
||||||
|
getAssistantDocuments,
|
||||||
|
uploadAssistantAvatar,
|
||||||
|
};
|
||||||
213
api/server/controllers/assistants/v2.js
Normal file
213
api/server/controllers/assistants/v2.js
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
const { ToolCallTypes } = require('librechat-data-provider');
|
||||||
|
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||||
|
const { validateAndUpdateTool } = require('~/server/services/ActionService');
|
||||||
|
const { updateAssistantDoc } = require('~/models/Assistant');
|
||||||
|
const { getOpenAIClient } = require('./helpers');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an assistant.
|
||||||
|
* @route POST /assistants
|
||||||
|
* @param {AssistantCreateParams} req.body - The assistant creation parameters.
|
||||||
|
* @returns {Assistant} 201 - success response - application/json
|
||||||
|
*/
|
||||||
|
const createAssistant = async (req, res) => {
|
||||||
|
try {
|
||||||
|
/** @type {{ openai: OpenAIClient }} */
|
||||||
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
|
|
||||||
|
const { tools = [], endpoint, ...assistantData } = req.body;
|
||||||
|
assistantData.tools = tools
|
||||||
|
.map((tool) => {
|
||||||
|
if (typeof tool !== 'string') {
|
||||||
|
return tool;
|
||||||
|
}
|
||||||
|
|
||||||
|
return req.app.locals.availableTools[tool];
|
||||||
|
})
|
||||||
|
.filter((tool) => tool);
|
||||||
|
|
||||||
|
let azureModelIdentifier = null;
|
||||||
|
if (openai.locals?.azureOptions) {
|
||||||
|
azureModelIdentifier = assistantData.model;
|
||||||
|
assistantData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName;
|
||||||
|
}
|
||||||
|
|
||||||
|
assistantData.metadata = {
|
||||||
|
author: req.user.id,
|
||||||
|
endpoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
const assistant = await openai.beta.assistants.create(assistantData);
|
||||||
|
const promise = updateAssistantDoc({ assistant_id: assistant.id }, { user: req.user.id });
|
||||||
|
if (azureModelIdentifier) {
|
||||||
|
assistant.model = azureModelIdentifier;
|
||||||
|
}
|
||||||
|
await promise;
|
||||||
|
logger.debug('/assistants/', assistant);
|
||||||
|
res.status(201).json(assistant);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[/assistants] Error creating assistant', error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Modifies an assistant.
|
||||||
|
* @param {object} params
|
||||||
|
* @param {Express.Request} params.req
|
||||||
|
* @param {OpenAIClient} params.openai
|
||||||
|
* @param {string} params.assistant_id
|
||||||
|
* @param {AssistantUpdateParams} params.updateData
|
||||||
|
* @returns {Promise<Assistant>} The updated assistant.
|
||||||
|
*/
|
||||||
|
const updateAssistant = async ({ req, openai, assistant_id, updateData }) => {
|
||||||
|
await validateAuthor({ req, openai });
|
||||||
|
const tools = [];
|
||||||
|
|
||||||
|
let hasFileSearch = false;
|
||||||
|
for (const tool of updateData.tools ?? []) {
|
||||||
|
let actualTool = typeof tool === 'string' ? req.app.locals.availableTools[tool] : tool;
|
||||||
|
|
||||||
|
if (!actualTool) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (actualTool.type === ToolCallTypes.FILE_SEARCH) {
|
||||||
|
hasFileSearch = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!actualTool.function) {
|
||||||
|
tools.push(actualTool);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const updatedTool = await validateAndUpdateTool({ req, tool: actualTool, assistant_id });
|
||||||
|
if (updatedTool) {
|
||||||
|
tools.push(updatedTool);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasFileSearch && !updateData.tool_resources) {
|
||||||
|
const assistant = await openai.beta.assistants.retrieve(assistant_id);
|
||||||
|
updateData.tool_resources = assistant.tool_resources ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasFileSearch && !updateData.tool_resources?.file_search) {
|
||||||
|
updateData.tool_resources = {
|
||||||
|
...(updateData.tool_resources ?? {}),
|
||||||
|
file_search: {
|
||||||
|
vector_store_ids: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
updateData.tools = tools;
|
||||||
|
|
||||||
|
if (openai.locals?.azureOptions && updateData.model) {
|
||||||
|
updateData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await openai.beta.assistants.update(assistant_id, updateData);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Modifies an assistant with the resource file id.
|
||||||
|
* @param {object} params
|
||||||
|
* @param {Express.Request} params.req
|
||||||
|
* @param {OpenAIClient} params.openai
|
||||||
|
* @param {string} params.assistant_id
|
||||||
|
* @param {string} params.tool_resource
|
||||||
|
* @param {string} params.file_id
|
||||||
|
* @param {AssistantUpdateParams} params.updateData
|
||||||
|
* @returns {Promise<Assistant>} The updated assistant.
|
||||||
|
*/
|
||||||
|
const addResourceFileId = async ({ req, openai, assistant_id, tool_resource, file_id }) => {
|
||||||
|
const assistant = await openai.beta.assistants.retrieve(assistant_id);
|
||||||
|
const { tool_resources = {} } = assistant;
|
||||||
|
if (tool_resources[tool_resource]) {
|
||||||
|
tool_resources[tool_resource].file_ids.push(file_id);
|
||||||
|
} else {
|
||||||
|
tool_resources[tool_resource] = { file_ids: [file_id] };
|
||||||
|
}
|
||||||
|
|
||||||
|
delete assistant.id;
|
||||||
|
return await updateAssistant({
|
||||||
|
req,
|
||||||
|
openai,
|
||||||
|
assistant_id,
|
||||||
|
updateData: { tools: assistant.tools, tool_resources },
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes a file ID from an assistant's resource.
|
||||||
|
* @param {object} params
|
||||||
|
* @param {Express.Request} params.req
|
||||||
|
* @param {OpenAIClient} params.openai
|
||||||
|
* @param {string} params.assistant_id
|
||||||
|
* @param {string} [params.tool_resource]
|
||||||
|
* @param {string} params.file_id
|
||||||
|
* @param {AssistantUpdateParams} params.updateData
|
||||||
|
* @returns {Promise<Assistant>} The updated assistant.
|
||||||
|
*/
|
||||||
|
const deleteResourceFileId = async ({ req, openai, assistant_id, tool_resource, file_id }) => {
|
||||||
|
const assistant = await openai.beta.assistants.retrieve(assistant_id);
|
||||||
|
const { tool_resources = {} } = assistant;
|
||||||
|
|
||||||
|
if (tool_resource && tool_resources[tool_resource]) {
|
||||||
|
const resource = tool_resources[tool_resource];
|
||||||
|
const index = resource.file_ids.indexOf(file_id);
|
||||||
|
if (index !== -1) {
|
||||||
|
resource.file_ids.splice(index, 1);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (const resourceKey in tool_resources) {
|
||||||
|
const resource = tool_resources[resourceKey];
|
||||||
|
const index = resource.file_ids.indexOf(file_id);
|
||||||
|
if (index !== -1) {
|
||||||
|
resource.file_ids.splice(index, 1);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
delete assistant.id;
|
||||||
|
return await updateAssistant({
|
||||||
|
req,
|
||||||
|
openai,
|
||||||
|
assistant_id,
|
||||||
|
updateData: { tools: assistant.tools, tool_resources },
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Modifies an assistant.
|
||||||
|
* @route PATCH /assistants/:id
|
||||||
|
* @param {object} req - Express Request
|
||||||
|
* @param {object} req.params - Request params
|
||||||
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
|
* @param {AssistantUpdateParams} req.body - The assistant update parameters.
|
||||||
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
const patchAssistant = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
|
const assistant_id = req.params.id;
|
||||||
|
const { endpoint: _e, ...updateData } = req.body;
|
||||||
|
updateData.tools = updateData.tools ?? [];
|
||||||
|
const updatedAssistant = await updateAssistant({ req, openai, assistant_id, updateData });
|
||||||
|
res.json(updatedAssistant);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[/assistants/:id] Error updating assistant', error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
patchAssistant,
|
||||||
|
createAssistant,
|
||||||
|
updateAssistant,
|
||||||
|
addResourceFileId,
|
||||||
|
deleteResourceFileId,
|
||||||
|
};
|
||||||
@@ -1,26 +1,22 @@
|
|||||||
const User = require('~/models/User');
|
|
||||||
const { setAuthTokens } = require('~/server/services/AuthService');
|
const { setAuthTokens } = require('~/server/services/AuthService');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const loginController = async (req, res) => {
|
const loginController = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const user = await User.findById(req.user._id);
|
if (!req.user) {
|
||||||
|
|
||||||
// If user doesn't exist, return error
|
|
||||||
if (!user) {
|
|
||||||
// typeof user !== User) { // this doesn't seem to resolve the User type ??
|
|
||||||
return res.status(400).json({ message: 'Invalid credentials' });
|
return res.status(400).json({ message: 'Invalid credentials' });
|
||||||
}
|
}
|
||||||
|
|
||||||
const token = await setAuthTokens(user._id, res);
|
const { password: _, __v, ...user } = req.user;
|
||||||
|
user.id = user._id.toString();
|
||||||
|
|
||||||
|
const token = await setAuthTokens(req.user._id, res);
|
||||||
|
|
||||||
return res.status(200).send({ token, user });
|
return res.status(200).send({ token, user });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('[loginController]', err);
|
logger.error('[loginController]', err);
|
||||||
|
return res.status(500).json({ message: 'Something went wrong' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generic error messages are safer
|
|
||||||
return res.status(500).json({ message: 'Something went wrong' });
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
|||||||
@@ -6,16 +6,16 @@ const axios = require('axios');
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const passport = require('passport');
|
const passport = require('passport');
|
||||||
const mongoSanitize = require('express-mongo-sanitize');
|
const mongoSanitize = require('express-mongo-sanitize');
|
||||||
|
const { jwtLogin, passportLogin } = require('~/strategies');
|
||||||
|
const { connectDb, indexSync } = require('~/lib/db');
|
||||||
|
const { isEnabled } = require('~/server/utils');
|
||||||
|
const { ldapLogin } = require('~/strategies');
|
||||||
|
const { logger } = require('~/config');
|
||||||
const validateImageRequest = require('./middleware/validateImageRequest');
|
const validateImageRequest = require('./middleware/validateImageRequest');
|
||||||
const errorController = require('./controllers/ErrorController');
|
const errorController = require('./controllers/ErrorController');
|
||||||
const { jwtLogin, passportLogin } = require('~/strategies');
|
|
||||||
const configureSocialLogins = require('./socialLogins');
|
const configureSocialLogins = require('./socialLogins');
|
||||||
const { connectDb, indexSync } = require('~/lib/db');
|
|
||||||
const AppService = require('./services/AppService');
|
const AppService = require('./services/AppService');
|
||||||
const noIndex = require('./middleware/noIndex');
|
const noIndex = require('./middleware/noIndex');
|
||||||
const { isEnabled } = require('~/server/utils');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const routes = require('./routes');
|
const routes = require('./routes');
|
||||||
|
|
||||||
const { PORT, HOST, ALLOW_SOCIAL_LOGIN } = process.env ?? {};
|
const { PORT, HOST, ALLOW_SOCIAL_LOGIN } = process.env ?? {};
|
||||||
@@ -60,6 +60,11 @@ const startServer = async () => {
|
|||||||
passport.use(await jwtLogin());
|
passport.use(await jwtLogin());
|
||||||
passport.use(passportLogin());
|
passport.use(passportLogin());
|
||||||
|
|
||||||
|
// LDAP Auth
|
||||||
|
if (process.env.LDAP_URL && process.env.LDAP_USER_SEARCH_BASE) {
|
||||||
|
passport.use(ldapLogin);
|
||||||
|
}
|
||||||
|
|
||||||
if (isEnabled(ALLOW_SOCIAL_LOGIN)) {
|
if (isEnabled(ALLOW_SOCIAL_LOGIN)) {
|
||||||
configureSocialLogins(app);
|
configureSocialLogins(app);
|
||||||
}
|
}
|
||||||
@@ -76,6 +81,7 @@ const startServer = async () => {
|
|||||||
app.use('/api/convos', routes.convos);
|
app.use('/api/convos', routes.convos);
|
||||||
app.use('/api/presets', routes.presets);
|
app.use('/api/presets', routes.presets);
|
||||||
app.use('/api/prompts', routes.prompts);
|
app.use('/api/prompts', routes.prompts);
|
||||||
|
app.use('/api/categories', routes.categories);
|
||||||
app.use('/api/tokenizer', routes.tokenizer);
|
app.use('/api/tokenizer', routes.tokenizer);
|
||||||
app.use('/api/endpoints', routes.endpoints);
|
app.use('/api/endpoints', routes.endpoints);
|
||||||
app.use('/api/balance', routes.balance);
|
app.use('/api/balance', routes.balance);
|
||||||
@@ -85,9 +91,11 @@ const startServer = async () => {
|
|||||||
app.use('/api/assistants', routes.assistants);
|
app.use('/api/assistants', routes.assistants);
|
||||||
app.use('/api/files', await routes.files.initialize());
|
app.use('/api/files', await routes.files.initialize());
|
||||||
app.use('/images/', validateImageRequest, routes.staticRoute);
|
app.use('/images/', validateImageRequest, routes.staticRoute);
|
||||||
|
app.use('/api/share', routes.share);
|
||||||
|
app.use('/api/roles', routes.roles);
|
||||||
|
|
||||||
app.use((req, res) => {
|
app.use((req, res) => {
|
||||||
res.status(404).sendFile(path.join(app.locals.paths.dist, 'index.html'));
|
res.sendFile(path.join(app.locals.paths.dist, 'index.html'));
|
||||||
});
|
});
|
||||||
|
|
||||||
app.listen(port, host, () => {
|
app.listen(port, host, () => {
|
||||||
|
|||||||
@@ -1,31 +1,36 @@
|
|||||||
const { EModelEndpoint } = require('librechat-data-provider');
|
const { isAssistantsEndpoint } = require('librechat-data-provider');
|
||||||
const { sendMessage, sendError, countTokens, isEnabled } = require('~/server/utils');
|
const { sendMessage, sendError, countTokens, isEnabled } = require('~/server/utils');
|
||||||
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
|
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
|
||||||
const { saveMessage, getConvo, getConvoTitle } = require('~/models');
|
|
||||||
const clearPendingReq = require('~/cache/clearPendingReq');
|
const clearPendingReq = require('~/cache/clearPendingReq');
|
||||||
const abortControllers = require('./abortControllers');
|
const abortControllers = require('./abortControllers');
|
||||||
|
const { saveMessage, getConvo } = require('~/models');
|
||||||
const spendTokens = require('~/models/spendTokens');
|
const spendTokens = require('~/models/spendTokens');
|
||||||
const { abortRun } = require('./abortRun');
|
const { abortRun } = require('./abortRun');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
async function abortMessage(req, res) {
|
async function abortMessage(req, res) {
|
||||||
let { abortKey, conversationId, endpoint } = req.body;
|
let { abortKey, endpoint } = req.body;
|
||||||
|
|
||||||
if (!abortKey && conversationId) {
|
if (isAssistantsEndpoint(endpoint)) {
|
||||||
abortKey = conversationId;
|
return await abortRun(req, res);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (endpoint === EModelEndpoint.assistants) {
|
const conversationId = abortKey?.split(':')?.[0] ?? req.user.id;
|
||||||
return await abortRun(req, res);
|
|
||||||
|
if (!abortControllers.has(abortKey) && abortControllers.has(conversationId)) {
|
||||||
|
abortKey = conversationId;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!abortControllers.has(abortKey) && !res.headersSent) {
|
if (!abortControllers.has(abortKey) && !res.headersSent) {
|
||||||
return res.status(204).send({ message: 'Request not found' });
|
return res.status(204).send({ message: 'Request not found' });
|
||||||
}
|
}
|
||||||
|
|
||||||
const { abortController } = abortControllers.get(abortKey);
|
const { abortController } = abortControllers.get(abortKey) ?? {};
|
||||||
|
if (!abortController) {
|
||||||
|
return res.status(204).send({ message: 'Request not found' });
|
||||||
|
}
|
||||||
const finalEvent = await abortController.abortCompletion();
|
const finalEvent = await abortController.abortCompletion();
|
||||||
logger.debug('[abortMessage] Aborted request', { abortKey });
|
logger.info('[abortMessage] Aborted request', { abortKey });
|
||||||
abortControllers.delete(abortKey);
|
abortControllers.delete(abortKey);
|
||||||
|
|
||||||
if (res.headersSent && finalEvent) {
|
if (res.headersSent && finalEvent) {
|
||||||
@@ -50,12 +55,35 @@ const handleAbort = () => {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const createAbortController = (req, res, getAbortData) => {
|
const createAbortController = (req, res, getAbortData, getReqData) => {
|
||||||
const abortController = new AbortController();
|
const abortController = new AbortController();
|
||||||
const { endpointOption } = req.body;
|
const { endpointOption } = req.body;
|
||||||
const onStart = (userMessage) => {
|
|
||||||
|
abortController.getAbortData = function () {
|
||||||
|
return getAbortData();
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {TMessage} userMessage
|
||||||
|
* @param {string} responseMessageId
|
||||||
|
*/
|
||||||
|
const onStart = (userMessage, responseMessageId) => {
|
||||||
sendMessage(res, { message: userMessage, created: true });
|
sendMessage(res, { message: userMessage, created: true });
|
||||||
|
|
||||||
const abortKey = userMessage?.conversationId ?? req.user.id;
|
const abortKey = userMessage?.conversationId ?? req.user.id;
|
||||||
|
const prevRequest = abortControllers.get(abortKey);
|
||||||
|
|
||||||
|
if (prevRequest && prevRequest?.abortController) {
|
||||||
|
const data = prevRequest.abortController.getAbortData();
|
||||||
|
getReqData({ userMessage: data?.userMessage });
|
||||||
|
const addedAbortKey = `${abortKey}:${responseMessageId}`;
|
||||||
|
abortControllers.set(addedAbortKey, { abortController, ...endpointOption });
|
||||||
|
res.on('finish', function () {
|
||||||
|
abortControllers.delete(addedAbortKey);
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
abortControllers.set(abortKey, { abortController, ...endpointOption });
|
abortControllers.set(abortKey, { abortController, ...endpointOption });
|
||||||
|
|
||||||
res.on('finish', function () {
|
res.on('finish', function () {
|
||||||
@@ -65,7 +93,8 @@ const createAbortController = (req, res, getAbortData) => {
|
|||||||
|
|
||||||
abortController.abortCompletion = async function () {
|
abortController.abortCompletion = async function () {
|
||||||
abortController.abort();
|
abortController.abort();
|
||||||
const { conversationId, userMessage, promptTokens, ...responseData } = getAbortData();
|
const { conversationId, userMessage, userMessagePromise, promptTokens, ...responseData } =
|
||||||
|
getAbortData();
|
||||||
const completionTokens = await countTokens(responseData?.text ?? '');
|
const completionTokens = await countTokens(responseData?.text ?? '');
|
||||||
const user = req.user.id;
|
const user = req.user.id;
|
||||||
|
|
||||||
@@ -73,6 +102,8 @@ const createAbortController = (req, res, getAbortData) => {
|
|||||||
...responseData,
|
...responseData,
|
||||||
conversationId,
|
conversationId,
|
||||||
finish_reason: 'incomplete',
|
finish_reason: 'incomplete',
|
||||||
|
endpoint: endpointOption.endpoint,
|
||||||
|
iconURL: endpointOption.iconURL,
|
||||||
model: endpointOption.modelOptions.model,
|
model: endpointOption.modelOptions.model,
|
||||||
unfinished: false,
|
unfinished: false,
|
||||||
error: false,
|
error: false,
|
||||||
@@ -87,10 +118,20 @@ const createAbortController = (req, res, getAbortData) => {
|
|||||||
|
|
||||||
saveMessage({ ...responseMessage, user });
|
saveMessage({ ...responseMessage, user });
|
||||||
|
|
||||||
|
let conversation;
|
||||||
|
if (userMessagePromise) {
|
||||||
|
const resolved = await userMessagePromise;
|
||||||
|
conversation = resolved?.conversation;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!conversation) {
|
||||||
|
conversation = await getConvo(req.user.id, conversationId);
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
title: await getConvoTitle(user, conversationId),
|
title: conversation && !conversation.title ? null : conversation?.title || 'New Chat',
|
||||||
final: true,
|
final: true,
|
||||||
conversation: await getConvo(user, conversationId),
|
conversation,
|
||||||
requestMessage: userMessage,
|
requestMessage: userMessage,
|
||||||
responseMessage: responseMessage,
|
responseMessage: responseMessage,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
const { CacheKeys, RunStatus, isUUID } = require('librechat-data-provider');
|
const { CacheKeys, RunStatus, isUUID } = require('librechat-data-provider');
|
||||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||||
const { checkMessageGaps, recordUsage } = require('~/server/services/Threads');
|
const { checkMessageGaps, recordUsage } = require('~/server/services/Threads');
|
||||||
|
const { deleteMessages } = require('~/models/Message');
|
||||||
const { getConvo } = require('~/models/Conversation');
|
const { getConvo } = require('~/models/Conversation');
|
||||||
const getLogStores = require('~/cache/getLogStores');
|
const getLogStores = require('~/cache/getLogStores');
|
||||||
const { sendMessage } = require('~/server/utils');
|
const { sendMessage } = require('~/server/utils');
|
||||||
@@ -10,7 +11,7 @@ const three_minutes = 1000 * 60 * 3;
|
|||||||
|
|
||||||
async function abortRun(req, res) {
|
async function abortRun(req, res) {
|
||||||
res.setHeader('Content-Type', 'application/json');
|
res.setHeader('Content-Type', 'application/json');
|
||||||
const { abortKey } = req.body;
|
const { abortKey, endpoint } = req.body;
|
||||||
const [conversationId, latestMessageId] = abortKey.split(':');
|
const [conversationId, latestMessageId] = abortKey.split(':');
|
||||||
const conversation = await getConvo(req.user.id, conversationId);
|
const conversation = await getConvo(req.user.id, conversationId);
|
||||||
|
|
||||||
@@ -66,12 +67,19 @@ async function abortRun(req, res) {
|
|||||||
logger.error('[abortRun] Error fetching or processing run', error);
|
logger.error('[abortRun] Error fetching or processing run', error);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* TODO: a reconciling strategy between the existing intermediate message would be more optimal than deleting it */
|
||||||
|
await deleteMessages({
|
||||||
|
user: req.user.id,
|
||||||
|
unfinished: true,
|
||||||
|
conversationId,
|
||||||
|
});
|
||||||
runMessages = await checkMessageGaps({
|
runMessages = await checkMessageGaps({
|
||||||
openai,
|
openai,
|
||||||
latestMessageId,
|
|
||||||
thread_id,
|
|
||||||
run_id,
|
run_id,
|
||||||
|
endpoint,
|
||||||
|
thread_id,
|
||||||
conversationId,
|
conversationId,
|
||||||
|
latestMessageId,
|
||||||
});
|
});
|
||||||
|
|
||||||
const finalEvent = {
|
const finalEvent = {
|
||||||
|
|||||||
43
api/server/middleware/assistants/validate.js
Normal file
43
api/server/middleware/assistants/validate.js
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
const { v4 } = require('uuid');
|
||||||
|
const { handleAbortError } = require('~/server/middleware/abortMiddleware');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if the assistant is supported or excluded
|
||||||
|
* @param {object} req - Express Request
|
||||||
|
* @param {object} req.body - The request payload.
|
||||||
|
* @param {object} res - Express Response
|
||||||
|
* @param {function} next - Express next middleware function.
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
const validateAssistant = async (req, res, next) => {
|
||||||
|
const { endpoint, conversationId, assistant_id, messageId } = req.body;
|
||||||
|
|
||||||
|
/** @type {Partial<TAssistantEndpoint>} */
|
||||||
|
const assistantsConfig = req.app.locals?.[endpoint];
|
||||||
|
if (!assistantsConfig) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
const { supportedIds, excludedIds } = assistantsConfig;
|
||||||
|
const error = { message: 'Assistant not supported' };
|
||||||
|
if (supportedIds?.length && !supportedIds.includes(assistant_id)) {
|
||||||
|
return await handleAbortError(res, req, error, {
|
||||||
|
sender: 'System',
|
||||||
|
conversationId,
|
||||||
|
messageId: v4(),
|
||||||
|
parentMessageId: messageId,
|
||||||
|
error,
|
||||||
|
});
|
||||||
|
} else if (excludedIds?.length && excludedIds.includes(assistant_id)) {
|
||||||
|
return await handleAbortError(res, req, error, {
|
||||||
|
sender: 'System',
|
||||||
|
conversationId,
|
||||||
|
messageId: v4(),
|
||||||
|
parentMessageId: messageId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return next();
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = validateAssistant;
|
||||||
43
api/server/middleware/assistants/validateAuthor.js
Normal file
43
api/server/middleware/assistants/validateAuthor.js
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
const { SystemRoles } = require('librechat-data-provider');
|
||||||
|
const { getAssistant } = require('~/models/Assistant');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if the assistant is supported or excluded
|
||||||
|
* @param {object} params
|
||||||
|
* @param {object} params.req - Express Request
|
||||||
|
* @param {object} params.req.body - The request payload.
|
||||||
|
* @param {string} params.overrideEndpoint - The override endpoint
|
||||||
|
* @param {string} params.overrideAssistantId - The override assistant ID
|
||||||
|
* @param {OpenAIClient} params.openai - OpenAI API Client
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
const validateAuthor = async ({ req, openai, overrideEndpoint, overrideAssistantId }) => {
|
||||||
|
if (req.user.role === SystemRoles.ADMIN) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const endpoint = overrideEndpoint ?? req.body.endpoint ?? req.query.endpoint;
|
||||||
|
const assistant_id =
|
||||||
|
overrideAssistantId ?? req.params.id ?? req.body.assistant_id ?? req.query.assistant_id;
|
||||||
|
|
||||||
|
/** @type {Partial<TAssistantEndpoint>} */
|
||||||
|
const assistantsConfig = req.app.locals?.[endpoint];
|
||||||
|
if (!assistantsConfig) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!assistantsConfig.privateAssistants) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const assistantDoc = await getAssistant({ assistant_id, user: req.user.id });
|
||||||
|
if (assistantDoc) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const assistant = await openai.beta.assistants.retrieve(assistant_id);
|
||||||
|
if (req.user.id !== assistant?.metadata?.author) {
|
||||||
|
throw new Error(`Assistant ${assistant_id} is not authored by the user.`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = validateAuthor;
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
const { parseConvo, EModelEndpoint } = require('librechat-data-provider');
|
const { parseConvo, EModelEndpoint } = require('librechat-data-provider');
|
||||||
const { getModelsConfig } = require('~/server/controllers/ModelController');
|
const { getModelsConfig } = require('~/server/controllers/ModelController');
|
||||||
|
const azureAssistants = require('~/server/services/Endpoints/azureAssistants');
|
||||||
const assistants = require('~/server/services/Endpoints/assistants');
|
const assistants = require('~/server/services/Endpoints/assistants');
|
||||||
const gptPlugins = require('~/server/services/Endpoints/gptPlugins');
|
const gptPlugins = require('~/server/services/Endpoints/gptPlugins');
|
||||||
const { processFiles } = require('~/server/services/Files/process');
|
const { processFiles } = require('~/server/services/Files/process');
|
||||||
@@ -7,6 +8,8 @@ const anthropic = require('~/server/services/Endpoints/anthropic');
|
|||||||
const openAI = require('~/server/services/Endpoints/openAI');
|
const openAI = require('~/server/services/Endpoints/openAI');
|
||||||
const custom = require('~/server/services/Endpoints/custom');
|
const custom = require('~/server/services/Endpoints/custom');
|
||||||
const google = require('~/server/services/Endpoints/google');
|
const google = require('~/server/services/Endpoints/google');
|
||||||
|
const enforceModelSpec = require('./enforceModelSpec');
|
||||||
|
const { handleError } = require('~/server/utils');
|
||||||
|
|
||||||
const buildFunction = {
|
const buildFunction = {
|
||||||
[EModelEndpoint.openAI]: openAI.buildOptions,
|
[EModelEndpoint.openAI]: openAI.buildOptions,
|
||||||
@@ -16,11 +19,46 @@ const buildFunction = {
|
|||||||
[EModelEndpoint.anthropic]: anthropic.buildOptions,
|
[EModelEndpoint.anthropic]: anthropic.buildOptions,
|
||||||
[EModelEndpoint.gptPlugins]: gptPlugins.buildOptions,
|
[EModelEndpoint.gptPlugins]: gptPlugins.buildOptions,
|
||||||
[EModelEndpoint.assistants]: assistants.buildOptions,
|
[EModelEndpoint.assistants]: assistants.buildOptions,
|
||||||
|
[EModelEndpoint.azureAssistants]: azureAssistants.buildOptions,
|
||||||
};
|
};
|
||||||
|
|
||||||
async function buildEndpointOption(req, res, next) {
|
async function buildEndpointOption(req, res, next) {
|
||||||
const { endpoint, endpointType } = req.body;
|
const { endpoint, endpointType } = req.body;
|
||||||
const parsedBody = parseConvo({ endpoint, endpointType, conversation: req.body });
|
const parsedBody = parseConvo({ endpoint, endpointType, conversation: req.body });
|
||||||
|
|
||||||
|
if (req.app.locals.modelSpecs?.list && req.app.locals.modelSpecs?.enforce) {
|
||||||
|
/** @type {{ list: TModelSpec[] }}*/
|
||||||
|
const { list } = req.app.locals.modelSpecs;
|
||||||
|
const { spec } = parsedBody;
|
||||||
|
|
||||||
|
if (!spec) {
|
||||||
|
return handleError(res, { text: 'No model spec selected' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentModelSpec = list.find((s) => s.name === spec);
|
||||||
|
if (!currentModelSpec) {
|
||||||
|
return handleError(res, { text: 'Invalid model spec' });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (endpoint !== currentModelSpec.preset.endpoint) {
|
||||||
|
return handleError(res, { text: 'Model spec mismatch' });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
currentModelSpec.preset.endpoint !== EModelEndpoint.gptPlugins &&
|
||||||
|
currentModelSpec.preset.tools
|
||||||
|
) {
|
||||||
|
return handleError(res, {
|
||||||
|
text: `Only the "${EModelEndpoint.gptPlugins}" endpoint can have tools defined in the preset`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const isValidModelSpec = enforceModelSpec(currentModelSpec, parsedBody);
|
||||||
|
if (!isValidModelSpec) {
|
||||||
|
return handleError(res, { text: 'Model spec mismatch' });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
req.body.endpointOption = buildFunction[endpointType ?? endpoint](
|
req.body.endpointOption = buildFunction[endpointType ?? endpoint](
|
||||||
endpoint,
|
endpoint,
|
||||||
parsedBody,
|
parsedBody,
|
||||||
|
|||||||
28
api/server/middleware/canDeleteAccount.js
Normal file
28
api/server/middleware/canDeleteAccount.js
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
const { SystemRoles } = require('librechat-data-provider');
|
||||||
|
const { isEnabled } = require('~/server/utils');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if the user can delete their account
|
||||||
|
*
|
||||||
|
* @async
|
||||||
|
* @function
|
||||||
|
* @param {Object} req - Express request object
|
||||||
|
* @param {Object} res - Express response object
|
||||||
|
* @param {Function} next - Next middleware function
|
||||||
|
*
|
||||||
|
* @returns {Promise<function|Object>} - Returns a Promise which when resolved calls next middleware if the user can delete their account
|
||||||
|
*/
|
||||||
|
|
||||||
|
const canDeleteAccount = async (req, res, next = () => {}) => {
|
||||||
|
const { user } = req;
|
||||||
|
const { ALLOW_ACCOUNT_DELETION = true } = process.env;
|
||||||
|
if (user?.role === SystemRoles.ADMIN || isEnabled(ALLOW_ACCOUNT_DELETION)) {
|
||||||
|
return next();
|
||||||
|
} else {
|
||||||
|
logger.error(`[User] [Delete Account] [User cannot delete account] [User: ${user?.id}]`);
|
||||||
|
return res.status(403).send({ message: 'You do not have permission to delete this account' });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = canDeleteAccount;
|
||||||
@@ -1,15 +1,13 @@
|
|||||||
const Keyv = require('keyv');
|
const Keyv = require('keyv');
|
||||||
const uap = require('ua-parser-js');
|
const uap = require('ua-parser-js');
|
||||||
const { ViolationTypes } = require('librechat-data-provider');
|
const { ViolationTypes } = require('librechat-data-provider');
|
||||||
const { isEnabled, removePorts } = require('../utils');
|
const { isEnabled, removePorts } = require('~/server/utils');
|
||||||
const keyvRedis = require('~/cache/keyvRedis');
|
const keyvMongo = require('~/cache/keyvMongo');
|
||||||
const denyRequest = require('./denyRequest');
|
const denyRequest = require('./denyRequest');
|
||||||
const { getLogStores } = require('~/cache');
|
const { getLogStores } = require('~/cache');
|
||||||
const User = require('~/models/User');
|
const { findUser } = require('~/models');
|
||||||
|
|
||||||
const banCache = isEnabled(process.env.USE_REDIS)
|
const banCache = new Keyv({ store: keyvMongo, namespace: ViolationTypes.BAN, ttl: 0 });
|
||||||
? new Keyv({ store: keyvRedis })
|
|
||||||
: new Keyv({ namespace: ViolationTypes.BAN, ttl: 0 });
|
|
||||||
const message = 'Your account has been temporarily banned due to violations of our service.';
|
const message = 'Your account has been temporarily banned due to violations of our service.';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -57,7 +55,7 @@ const checkBan = async (req, res, next = () => {}) => {
|
|||||||
let userId = req.user?.id ?? req.user?._id ?? null;
|
let userId = req.user?.id ?? req.user?._id ?? null;
|
||||||
|
|
||||||
if (!userId && req?.body?.email) {
|
if (!userId && req?.body?.email) {
|
||||||
const user = await User.findOne({ email: req.body.email }, '_id').lean();
|
const user = await findUser({ email: req.body.email }, '_id');
|
||||||
userId = user?._id ? user._id.toString() : userId;
|
userId = user?._id ? user._id.toString() : userId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
25
api/server/middleware/checkDomainAllowed.js
Normal file
25
api/server/middleware/checkDomainAllowed.js
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
const { isDomainAllowed } = require('~/server/services/AuthService');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks the domain's social login is allowed
|
||||||
|
*
|
||||||
|
* @async
|
||||||
|
* @function
|
||||||
|
* @param {Object} req - Express request object.
|
||||||
|
* @param {Object} res - Express response object.
|
||||||
|
* @param {Function} next - Next middleware function.
|
||||||
|
*
|
||||||
|
* @returns {Promise<function|Object>} - Returns a Promise which when resolved calls next middleware if the domain's email is allowed
|
||||||
|
*/
|
||||||
|
const checkDomainAllowed = async (req, res, next = () => {}) => {
|
||||||
|
const email = req?.user?.email;
|
||||||
|
if (email && !(await isDomainAllowed(email))) {
|
||||||
|
logger.error(`[Social Login] [Social Login not allowed] [Email: ${email}]`);
|
||||||
|
return res.redirect('/login');
|
||||||
|
} else {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = checkDomainAllowed;
|
||||||
58
api/server/middleware/enforceModelSpec.js
Normal file
58
api/server/middleware/enforceModelSpec.js
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
const interchangeableKeys = new Map([
|
||||||
|
['chatGptLabel', ['modelLabel']],
|
||||||
|
['modelLabel', ['chatGptLabel']],
|
||||||
|
]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Middleware to enforce the model spec for a conversation
|
||||||
|
* @param {TModelSpec} modelSpec - The model spec to enforce
|
||||||
|
* @param {TConversation} parsedBody - The parsed body of the conversation
|
||||||
|
* @returns {boolean} - Whether the model spec is enforced
|
||||||
|
*/
|
||||||
|
const enforceModelSpec = (modelSpec, parsedBody) => {
|
||||||
|
for (const [key, value] of Object.entries(modelSpec.preset)) {
|
||||||
|
if (key === 'endpoint') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!checkMatch(key, value, parsedBody)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if there is a match for the given key and value in the parsed body
|
||||||
|
* or any of its interchangeable keys, including deep comparison for objects and arrays.
|
||||||
|
* @param {string} key
|
||||||
|
* @param {any} value
|
||||||
|
* @param {object} parsedBody
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
const checkMatch = (key, value, parsedBody) => {
|
||||||
|
const isEqual = (a, b) => {
|
||||||
|
if (Array.isArray(a) && Array.isArray(b)) {
|
||||||
|
return a.length === b.length && a.every((val, index) => isEqual(val, b[index]));
|
||||||
|
} else if (typeof a === 'object' && typeof b === 'object' && a !== null && b !== null) {
|
||||||
|
const keysA = Object.keys(a);
|
||||||
|
const keysB = Object.keys(b);
|
||||||
|
return keysA.length === keysB.length && keysA.every((k) => isEqual(a[k], b[k]));
|
||||||
|
}
|
||||||
|
return a === b;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (isEqual(parsedBody[key], value)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (interchangeableKeys.has(key)) {
|
||||||
|
return interchangeableKeys
|
||||||
|
.get(key)
|
||||||
|
.some((interchangeableKey) => isEqual(parsedBody[interchangeableKey], value));
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = enforceModelSpec;
|
||||||
47
api/server/middleware/enforceModelSpec.spec.js
Normal file
47
api/server/middleware/enforceModelSpec.spec.js
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
// enforceModelSpec.test.js
|
||||||
|
|
||||||
|
const enforceModelSpec = require('./enforceModelSpec');
|
||||||
|
|
||||||
|
describe('enforceModelSpec function', () => {
|
||||||
|
test('returns true when all model specs match parsed body directly', () => {
|
||||||
|
const modelSpec = { preset: { title: 'Dialog', status: 'Active' } };
|
||||||
|
const parsedBody = { title: 'Dialog', status: 'Active' };
|
||||||
|
expect(enforceModelSpec(modelSpec, parsedBody)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('returns true when model specs match via interchangeable keys', () => {
|
||||||
|
const modelSpec = { preset: { chatGptLabel: 'GPT-4' } };
|
||||||
|
const parsedBody = { modelLabel: 'GPT-4' };
|
||||||
|
expect(enforceModelSpec(modelSpec, parsedBody)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('returns false if any key value does not match', () => {
|
||||||
|
const modelSpec = { preset: { language: 'English', level: 'Advanced' } };
|
||||||
|
const parsedBody = { language: 'Spanish', level: 'Advanced' };
|
||||||
|
expect(enforceModelSpec(modelSpec, parsedBody)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('ignores the \'endpoint\' key in model spec', () => {
|
||||||
|
const modelSpec = { preset: { endpoint: 'ignored', feature: 'Special' } };
|
||||||
|
const parsedBody = { feature: 'Special' };
|
||||||
|
expect(enforceModelSpec(modelSpec, parsedBody)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('handles nested objects correctly', () => {
|
||||||
|
const modelSpec = { preset: { details: { time: 'noon', location: 'park' } } };
|
||||||
|
const parsedBody = { details: { time: 'noon', location: 'park' } };
|
||||||
|
expect(enforceModelSpec(modelSpec, parsedBody)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('handles arrays within objects', () => {
|
||||||
|
const modelSpec = { preset: { tags: ['urgent', 'important'] } };
|
||||||
|
const parsedBody = { tags: ['urgent', 'important'] };
|
||||||
|
expect(enforceModelSpec(modelSpec, parsedBody)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('fails when arrays in objects do not match', () => {
|
||||||
|
const modelSpec = { preset: { tags: ['urgent', 'important'] } };
|
||||||
|
const parsedBody = { tags: ['important', 'urgent'] }; // Different order
|
||||||
|
expect(enforceModelSpec(modelSpec, parsedBody)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -1,41 +1,45 @@
|
|||||||
const abortMiddleware = require('./abortMiddleware');
|
const validatePasswordReset = require('./validatePasswordReset');
|
||||||
const checkBan = require('./checkBan');
|
|
||||||
const uaParser = require('./uaParser');
|
|
||||||
const setHeaders = require('./setHeaders');
|
|
||||||
const loginLimiter = require('./loginLimiter');
|
|
||||||
const validateModel = require('./validateModel');
|
|
||||||
const requireJwtAuth = require('./requireJwtAuth');
|
|
||||||
const uploadLimiters = require('./uploadLimiters');
|
|
||||||
const registerLimiter = require('./registerLimiter');
|
|
||||||
const messageLimiters = require('./messageLimiters');
|
|
||||||
const requireLocalAuth = require('./requireLocalAuth');
|
|
||||||
const validateEndpoint = require('./validateEndpoint');
|
|
||||||
const concurrentLimiter = require('./concurrentLimiter');
|
|
||||||
const validateMessageReq = require('./validateMessageReq');
|
|
||||||
const buildEndpointOption = require('./buildEndpointOption');
|
|
||||||
const validateRegistration = require('./validateRegistration');
|
const validateRegistration = require('./validateRegistration');
|
||||||
const validateImageRequest = require('./validateImageRequest');
|
const validateImageRequest = require('./validateImageRequest');
|
||||||
|
const buildEndpointOption = require('./buildEndpointOption');
|
||||||
|
const validateMessageReq = require('./validateMessageReq');
|
||||||
|
const checkDomainAllowed = require('./checkDomainAllowed');
|
||||||
|
const concurrentLimiter = require('./concurrentLimiter');
|
||||||
|
const validateEndpoint = require('./validateEndpoint');
|
||||||
|
const requireLocalAuth = require('./requireLocalAuth');
|
||||||
|
const canDeleteAccount = require('./canDeleteAccount');
|
||||||
|
const requireLdapAuth = require('./requireLdapAuth');
|
||||||
|
const abortMiddleware = require('./abortMiddleware');
|
||||||
|
const requireJwtAuth = require('./requireJwtAuth');
|
||||||
|
const validateModel = require('./validateModel');
|
||||||
const moderateText = require('./moderateText');
|
const moderateText = require('./moderateText');
|
||||||
|
const setHeaders = require('./setHeaders');
|
||||||
|
const limiters = require('./limiters');
|
||||||
|
const uaParser = require('./uaParser');
|
||||||
|
const checkBan = require('./checkBan');
|
||||||
const noIndex = require('./noIndex');
|
const noIndex = require('./noIndex');
|
||||||
|
const roles = require('./roles');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
...uploadLimiters,
|
|
||||||
...abortMiddleware,
|
...abortMiddleware,
|
||||||
...messageLimiters,
|
...limiters,
|
||||||
|
...roles,
|
||||||
|
noIndex,
|
||||||
checkBan,
|
checkBan,
|
||||||
uaParser,
|
uaParser,
|
||||||
setHeaders,
|
setHeaders,
|
||||||
loginLimiter,
|
moderateText,
|
||||||
|
validateModel,
|
||||||
requireJwtAuth,
|
requireJwtAuth,
|
||||||
registerLimiter,
|
requireLdapAuth,
|
||||||
requireLocalAuth,
|
requireLocalAuth,
|
||||||
|
canDeleteAccount,
|
||||||
validateEndpoint,
|
validateEndpoint,
|
||||||
concurrentLimiter,
|
concurrentLimiter,
|
||||||
|
checkDomainAllowed,
|
||||||
validateMessageReq,
|
validateMessageReq,
|
||||||
buildEndpointOption,
|
buildEndpointOption,
|
||||||
validateRegistration,
|
validateRegistration,
|
||||||
validateImageRequest,
|
validateImageRequest,
|
||||||
validateModel,
|
validatePasswordReset,
|
||||||
moderateText,
|
|
||||||
noIndex,
|
|
||||||
};
|
};
|
||||||
|
|||||||
69
api/server/middleware/limiters/importLimiters.js
Normal file
69
api/server/middleware/limiters/importLimiters.js
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
const rateLimit = require('express-rate-limit');
|
||||||
|
const { ViolationTypes } = require('librechat-data-provider');
|
||||||
|
const logViolation = require('~/cache/logViolation');
|
||||||
|
|
||||||
|
const getEnvironmentVariables = () => {
|
||||||
|
const IMPORT_IP_MAX = parseInt(process.env.IMPORT_IP_MAX) || 100;
|
||||||
|
const IMPORT_IP_WINDOW = parseInt(process.env.IMPORT_IP_WINDOW) || 15;
|
||||||
|
const IMPORT_USER_MAX = parseInt(process.env.IMPORT_USER_MAX) || 50;
|
||||||
|
const IMPORT_USER_WINDOW = parseInt(process.env.IMPORT_USER_WINDOW) || 15;
|
||||||
|
|
||||||
|
const importIpWindowMs = IMPORT_IP_WINDOW * 60 * 1000;
|
||||||
|
const importIpMax = IMPORT_IP_MAX;
|
||||||
|
const importIpWindowInMinutes = importIpWindowMs / 60000;
|
||||||
|
|
||||||
|
const importUserWindowMs = IMPORT_USER_WINDOW * 60 * 1000;
|
||||||
|
const importUserMax = IMPORT_USER_MAX;
|
||||||
|
const importUserWindowInMinutes = importUserWindowMs / 60000;
|
||||||
|
|
||||||
|
return {
|
||||||
|
importIpWindowMs,
|
||||||
|
importIpMax,
|
||||||
|
importIpWindowInMinutes,
|
||||||
|
importUserWindowMs,
|
||||||
|
importUserMax,
|
||||||
|
importUserWindowInMinutes,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const createImportHandler = (ip = true) => {
|
||||||
|
const { importIpMax, importIpWindowInMinutes, importUserMax, importUserWindowInMinutes } =
|
||||||
|
getEnvironmentVariables();
|
||||||
|
|
||||||
|
return async (req, res) => {
|
||||||
|
const type = ViolationTypes.FILE_UPLOAD_LIMIT;
|
||||||
|
const errorMessage = {
|
||||||
|
type,
|
||||||
|
max: ip ? importIpMax : importUserMax,
|
||||||
|
limiter: ip ? 'ip' : 'user',
|
||||||
|
windowInMinutes: ip ? importIpWindowInMinutes : importUserWindowInMinutes,
|
||||||
|
};
|
||||||
|
|
||||||
|
await logViolation(req, res, type, errorMessage);
|
||||||
|
res.status(429).json({ message: 'Too many conversation import requests. Try again later' });
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const createImportLimiters = () => {
|
||||||
|
const { importIpWindowMs, importIpMax, importUserWindowMs, importUserMax } =
|
||||||
|
getEnvironmentVariables();
|
||||||
|
|
||||||
|
const importIpLimiter = rateLimit({
|
||||||
|
windowMs: importIpWindowMs,
|
||||||
|
max: importIpMax,
|
||||||
|
handler: createImportHandler(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const importUserLimiter = rateLimit({
|
||||||
|
windowMs: importUserWindowMs,
|
||||||
|
max: importUserMax,
|
||||||
|
handler: createImportHandler(false),
|
||||||
|
keyGenerator: function (req) {
|
||||||
|
return req.user?.id; // Use the user ID or NULL if not available
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { importIpLimiter, importUserLimiter };
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = { createImportLimiters };
|
||||||
22
api/server/middleware/limiters/index.js
Normal file
22
api/server/middleware/limiters/index.js
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
const createTTSLimiters = require('./ttsLimiters');
|
||||||
|
const createSTTLimiters = require('./sttLimiters');
|
||||||
|
|
||||||
|
const loginLimiter = require('./loginLimiter');
|
||||||
|
const importLimiters = require('./importLimiters');
|
||||||
|
const uploadLimiters = require('./uploadLimiters');
|
||||||
|
const registerLimiter = require('./registerLimiter');
|
||||||
|
const messageLimiters = require('./messageLimiters');
|
||||||
|
const verifyEmailLimiter = require('./verifyEmailLimiter');
|
||||||
|
const resetPasswordLimiter = require('./resetPasswordLimiter');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
...uploadLimiters,
|
||||||
|
...importLimiters,
|
||||||
|
...messageLimiters,
|
||||||
|
loginLimiter,
|
||||||
|
registerLimiter,
|
||||||
|
createTTSLimiters,
|
||||||
|
createSTTLimiters,
|
||||||
|
verifyEmailLimiter,
|
||||||
|
resetPasswordLimiter,
|
||||||
|
};
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
const rateLimit = require('express-rate-limit');
|
const rateLimit = require('express-rate-limit');
|
||||||
const { logViolation } = require('../../cache');
|
const { removePorts } = require('~/server/utils');
|
||||||
const { removePorts } = require('../utils');
|
const { logViolation } = require('~/cache');
|
||||||
|
|
||||||
const { LOGIN_WINDOW = 5, LOGIN_MAX = 7, LOGIN_VIOLATION_SCORE: score } = process.env;
|
const { LOGIN_WINDOW = 5, LOGIN_MAX = 7, LOGIN_VIOLATION_SCORE: score } = process.env;
|
||||||
const windowMs = LOGIN_WINDOW * 60 * 1000;
|
const windowMs = LOGIN_WINDOW * 60 * 1000;
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
const rateLimit = require('express-rate-limit');
|
const rateLimit = require('express-rate-limit');
|
||||||
const { logViolation } = require('../../cache');
|
const denyRequest = require('~/server/middleware/denyRequest');
|
||||||
const denyRequest = require('./denyRequest');
|
const { logViolation } = require('~/cache');
|
||||||
|
|
||||||
const {
|
const {
|
||||||
MESSAGE_IP_MAX = 40,
|
MESSAGE_IP_MAX = 40,
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
const rateLimit = require('express-rate-limit');
|
const rateLimit = require('express-rate-limit');
|
||||||
const { logViolation } = require('../../cache');
|
const { removePorts } = require('~/server/utils');
|
||||||
const { removePorts } = require('../utils');
|
const { logViolation } = require('~/cache');
|
||||||
|
|
||||||
const { REGISTER_WINDOW = 60, REGISTER_MAX = 5, REGISTRATION_VIOLATION_SCORE: score } = process.env;
|
const { REGISTER_WINDOW = 60, REGISTER_MAX = 5, REGISTRATION_VIOLATION_SCORE: score } = process.env;
|
||||||
const windowMs = REGISTER_WINDOW * 60 * 1000;
|
const windowMs = REGISTER_WINDOW * 60 * 1000;
|
||||||
35
api/server/middleware/limiters/resetPasswordLimiter.js
Normal file
35
api/server/middleware/limiters/resetPasswordLimiter.js
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
const rateLimit = require('express-rate-limit');
|
||||||
|
const { ViolationTypes } = require('librechat-data-provider');
|
||||||
|
const { removePorts } = require('~/server/utils');
|
||||||
|
const { logViolation } = require('~/cache');
|
||||||
|
|
||||||
|
const {
|
||||||
|
RESET_PASSWORD_WINDOW = 2,
|
||||||
|
RESET_PASSWORD_MAX = 2,
|
||||||
|
RESET_PASSWORD_VIOLATION_SCORE: score,
|
||||||
|
} = process.env;
|
||||||
|
const windowMs = RESET_PASSWORD_WINDOW * 60 * 1000;
|
||||||
|
const max = RESET_PASSWORD_MAX;
|
||||||
|
const windowInMinutes = windowMs / 60000;
|
||||||
|
const message = `Too many attempts, please try again after ${windowInMinutes} minute(s)`;
|
||||||
|
|
||||||
|
const handler = async (req, res) => {
|
||||||
|
const type = ViolationTypes.RESET_PASSWORD_LIMIT;
|
||||||
|
const errorMessage = {
|
||||||
|
type,
|
||||||
|
max,
|
||||||
|
windowInMinutes,
|
||||||
|
};
|
||||||
|
|
||||||
|
await logViolation(req, res, type, errorMessage, score);
|
||||||
|
return res.status(429).json({ message });
|
||||||
|
};
|
||||||
|
|
||||||
|
const resetPasswordLimiter = rateLimit({
|
||||||
|
windowMs,
|
||||||
|
max,
|
||||||
|
handler,
|
||||||
|
keyGenerator: removePorts,
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = resetPasswordLimiter;
|
||||||
68
api/server/middleware/limiters/sttLimiters.js
Normal file
68
api/server/middleware/limiters/sttLimiters.js
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
const rateLimit = require('express-rate-limit');
|
||||||
|
const { ViolationTypes } = require('librechat-data-provider');
|
||||||
|
const logViolation = require('~/cache/logViolation');
|
||||||
|
|
||||||
|
const getEnvironmentVariables = () => {
|
||||||
|
const STT_IP_MAX = parseInt(process.env.STT_IP_MAX) || 100;
|
||||||
|
const STT_IP_WINDOW = parseInt(process.env.STT_IP_WINDOW) || 1;
|
||||||
|
const STT_USER_MAX = parseInt(process.env.STT_USER_MAX) || 50;
|
||||||
|
const STT_USER_WINDOW = parseInt(process.env.STT_USER_WINDOW) || 1;
|
||||||
|
|
||||||
|
const sttIpWindowMs = STT_IP_WINDOW * 60 * 1000;
|
||||||
|
const sttIpMax = STT_IP_MAX;
|
||||||
|
const sttIpWindowInMinutes = sttIpWindowMs / 60000;
|
||||||
|
|
||||||
|
const sttUserWindowMs = STT_USER_WINDOW * 60 * 1000;
|
||||||
|
const sttUserMax = STT_USER_MAX;
|
||||||
|
const sttUserWindowInMinutes = sttUserWindowMs / 60000;
|
||||||
|
|
||||||
|
return {
|
||||||
|
sttIpWindowMs,
|
||||||
|
sttIpMax,
|
||||||
|
sttIpWindowInMinutes,
|
||||||
|
sttUserWindowMs,
|
||||||
|
sttUserMax,
|
||||||
|
sttUserWindowInMinutes,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const createSTTHandler = (ip = true) => {
|
||||||
|
const { sttIpMax, sttIpWindowInMinutes, sttUserMax, sttUserWindowInMinutes } =
|
||||||
|
getEnvironmentVariables();
|
||||||
|
|
||||||
|
return async (req, res) => {
|
||||||
|
const type = ViolationTypes.STT_LIMIT;
|
||||||
|
const errorMessage = {
|
||||||
|
type,
|
||||||
|
max: ip ? sttIpMax : sttUserMax,
|
||||||
|
limiter: ip ? 'ip' : 'user',
|
||||||
|
windowInMinutes: ip ? sttIpWindowInMinutes : sttUserWindowInMinutes,
|
||||||
|
};
|
||||||
|
|
||||||
|
await logViolation(req, res, type, errorMessage);
|
||||||
|
res.status(429).json({ message: 'Too many STT requests. Try again later' });
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const createSTTLimiters = () => {
|
||||||
|
const { sttIpWindowMs, sttIpMax, sttUserWindowMs, sttUserMax } = getEnvironmentVariables();
|
||||||
|
|
||||||
|
const sttIpLimiter = rateLimit({
|
||||||
|
windowMs: sttIpWindowMs,
|
||||||
|
max: sttIpMax,
|
||||||
|
handler: createSTTHandler(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const sttUserLimiter = rateLimit({
|
||||||
|
windowMs: sttUserWindowMs,
|
||||||
|
max: sttUserMax,
|
||||||
|
handler: createSTTHandler(false),
|
||||||
|
keyGenerator: function (req) {
|
||||||
|
return req.user?.id; // Use the user ID or NULL if not available
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { sttIpLimiter, sttUserLimiter };
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = createSTTLimiters;
|
||||||
68
api/server/middleware/limiters/ttsLimiters.js
Normal file
68
api/server/middleware/limiters/ttsLimiters.js
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
const rateLimit = require('express-rate-limit');
|
||||||
|
const { ViolationTypes } = require('librechat-data-provider');
|
||||||
|
const logViolation = require('~/cache/logViolation');
|
||||||
|
|
||||||
|
const getEnvironmentVariables = () => {
|
||||||
|
const TTS_IP_MAX = parseInt(process.env.TTS_IP_MAX) || 100;
|
||||||
|
const TTS_IP_WINDOW = parseInt(process.env.TTS_IP_WINDOW) || 1;
|
||||||
|
const TTS_USER_MAX = parseInt(process.env.TTS_USER_MAX) || 50;
|
||||||
|
const TTS_USER_WINDOW = parseInt(process.env.TTS_USER_WINDOW) || 1;
|
||||||
|
|
||||||
|
const ttsIpWindowMs = TTS_IP_WINDOW * 60 * 1000;
|
||||||
|
const ttsIpMax = TTS_IP_MAX;
|
||||||
|
const ttsIpWindowInMinutes = ttsIpWindowMs / 60000;
|
||||||
|
|
||||||
|
const ttsUserWindowMs = TTS_USER_WINDOW * 60 * 1000;
|
||||||
|
const ttsUserMax = TTS_USER_MAX;
|
||||||
|
const ttsUserWindowInMinutes = ttsUserWindowMs / 60000;
|
||||||
|
|
||||||
|
return {
|
||||||
|
ttsIpWindowMs,
|
||||||
|
ttsIpMax,
|
||||||
|
ttsIpWindowInMinutes,
|
||||||
|
ttsUserWindowMs,
|
||||||
|
ttsUserMax,
|
||||||
|
ttsUserWindowInMinutes,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const createTTSHandler = (ip = true) => {
|
||||||
|
const { ttsIpMax, ttsIpWindowInMinutes, ttsUserMax, ttsUserWindowInMinutes } =
|
||||||
|
getEnvironmentVariables();
|
||||||
|
|
||||||
|
return async (req, res) => {
|
||||||
|
const type = ViolationTypes.TTS_LIMIT;
|
||||||
|
const errorMessage = {
|
||||||
|
type,
|
||||||
|
max: ip ? ttsIpMax : ttsUserMax,
|
||||||
|
limiter: ip ? 'ip' : 'user',
|
||||||
|
windowInMinutes: ip ? ttsIpWindowInMinutes : ttsUserWindowInMinutes,
|
||||||
|
};
|
||||||
|
|
||||||
|
await logViolation(req, res, type, errorMessage);
|
||||||
|
res.status(429).json({ message: 'Too many TTS requests. Try again later' });
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const createTTSLimiters = () => {
|
||||||
|
const { ttsIpWindowMs, ttsIpMax, ttsUserWindowMs, ttsUserMax } = getEnvironmentVariables();
|
||||||
|
|
||||||
|
const ttsIpLimiter = rateLimit({
|
||||||
|
windowMs: ttsIpWindowMs,
|
||||||
|
max: ttsIpMax,
|
||||||
|
handler: createTTSHandler(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const ttsUserLimiter = rateLimit({
|
||||||
|
windowMs: ttsUserWindowMs,
|
||||||
|
max: ttsUserMax,
|
||||||
|
handler: createTTSHandler(false),
|
||||||
|
keyGenerator: function (req) {
|
||||||
|
return req.user?.id; // Use the user ID or NULL if not available
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { ttsIpLimiter, ttsUserLimiter };
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = createTTSLimiters;
|
||||||
35
api/server/middleware/limiters/verifyEmailLimiter.js
Normal file
35
api/server/middleware/limiters/verifyEmailLimiter.js
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
const rateLimit = require('express-rate-limit');
|
||||||
|
const { ViolationTypes } = require('librechat-data-provider');
|
||||||
|
const { removePorts } = require('~/server/utils');
|
||||||
|
const { logViolation } = require('~/cache');
|
||||||
|
|
||||||
|
const {
|
||||||
|
VERIFY_EMAIL_WINDOW = 2,
|
||||||
|
VERIFY_EMAIL_MAX = 2,
|
||||||
|
VERIFY_EMAIL_VIOLATION_SCORE: score,
|
||||||
|
} = process.env;
|
||||||
|
const windowMs = VERIFY_EMAIL_WINDOW * 60 * 1000;
|
||||||
|
const max = VERIFY_EMAIL_MAX;
|
||||||
|
const windowInMinutes = windowMs / 60000;
|
||||||
|
const message = `Too many attempts, please try again after ${windowInMinutes} minute(s)`;
|
||||||
|
|
||||||
|
const handler = async (req, res) => {
|
||||||
|
const type = ViolationTypes.VERIFY_EMAIL_LIMIT;
|
||||||
|
const errorMessage = {
|
||||||
|
type,
|
||||||
|
max,
|
||||||
|
windowInMinutes,
|
||||||
|
};
|
||||||
|
|
||||||
|
await logViolation(req, res, type, errorMessage, score);
|
||||||
|
return res.status(429).json({ message });
|
||||||
|
};
|
||||||
|
|
||||||
|
const verifyEmailLimiter = rateLimit({
|
||||||
|
windowMs,
|
||||||
|
max,
|
||||||
|
handler,
|
||||||
|
keyGenerator: removePorts,
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = verifyEmailLimiter;
|
||||||
22
api/server/middleware/requireLdapAuth.js
Normal file
22
api/server/middleware/requireLdapAuth.js
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
const passport = require('passport');
|
||||||
|
|
||||||
|
const requireLdapAuth = (req, res, next) => {
|
||||||
|
passport.authenticate('ldapauth', (err, user, info) => {
|
||||||
|
if (err) {
|
||||||
|
console.log({
|
||||||
|
title: '(requireLdapAuth) Error at passport.authenticate',
|
||||||
|
parameters: [{ name: 'error', value: err }],
|
||||||
|
});
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
if (!user) {
|
||||||
|
console.log({
|
||||||
|
title: '(requireLdapAuth) Error: No user',
|
||||||
|
});
|
||||||
|
return res.status(404).send(info);
|
||||||
|
}
|
||||||
|
req.user = user;
|
||||||
|
next();
|
||||||
|
})(req, res, next);
|
||||||
|
};
|
||||||
|
module.exports = requireLdapAuth;
|
||||||
@@ -21,7 +21,13 @@ const requireLocalAuth = (req, res, next) => {
|
|||||||
log({
|
log({
|
||||||
title: '(requireLocalAuth) Error: No user',
|
title: '(requireLocalAuth) Error: No user',
|
||||||
});
|
});
|
||||||
return res.status(422).send(info);
|
return res.status(404).send(info);
|
||||||
|
}
|
||||||
|
if (info && info.message) {
|
||||||
|
log({
|
||||||
|
title: '(requireLocalAuth) Error: ' + info.message,
|
||||||
|
});
|
||||||
|
return res.status(422).send({ message: info.message });
|
||||||
}
|
}
|
||||||
req.user = user;
|
req.user = user;
|
||||||
next();
|
next();
|
||||||
|
|||||||
14
api/server/middleware/roles/checkAdmin.js
Normal file
14
api/server/middleware/roles/checkAdmin.js
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
const { SystemRoles } = require('librechat-data-provider');
|
||||||
|
|
||||||
|
function checkAdmin(req, res, next) {
|
||||||
|
try {
|
||||||
|
if (req.user.role !== SystemRoles.ADMIN) {
|
||||||
|
return res.status(403).json({ message: 'Forbidden' });
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
res.status(500).json({ message: 'Internal Server Error' });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = checkAdmin;
|
||||||
52
api/server/middleware/roles/generateCheckAccess.js
Normal file
52
api/server/middleware/roles/generateCheckAccess.js
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
const { SystemRoles } = require('librechat-data-provider');
|
||||||
|
const { getRoleByName } = require('~/models/Role');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Middleware to check if a user has one or more required permissions, optionally based on `req.body` properties.
|
||||||
|
*
|
||||||
|
* @param {PermissionTypes} permissionType - The type of permission to check.
|
||||||
|
* @param {Permissions[]} permissions - The list of specific permissions to check.
|
||||||
|
* @param {Record<Permissions, string[]>} [bodyProps] - An optional object where keys are permissions and values are arrays of `req.body` properties to check.
|
||||||
|
* @returns {Function} Express middleware function.
|
||||||
|
*/
|
||||||
|
const generateCheckAccess = (permissionType, permissions, bodyProps = {}) => {
|
||||||
|
return async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
const { user } = req;
|
||||||
|
if (!user) {
|
||||||
|
return res.status(401).json({ message: 'Authorization required' });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (user.role === SystemRoles.ADMIN) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
const role = await getRoleByName(user.role);
|
||||||
|
if (role && role[permissionType]) {
|
||||||
|
const hasAnyPermission = permissions.some((permission) => {
|
||||||
|
if (role[permissionType][permission]) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bodyProps[permission] && req.body) {
|
||||||
|
return bodyProps[permission].some((prop) =>
|
||||||
|
Object.prototype.hasOwnProperty.call(req.body, prop),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (hasAnyPermission) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.status(403).json({ message: 'Forbidden: Insufficient permissions' });
|
||||||
|
} catch (error) {
|
||||||
|
return res.status(500).json({ message: `Server error: ${error.message}` });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = generateCheckAccess;
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user