Compare commits

...

68 Commits

Author SHA1 Message Date
Danny Avila
94764c9c2a Release v0.5.6 (#723) 2023-07-28 13:51:41 -04:00
Fuegovic
e9c981c202 feat: add version number in UI (#704)
* feat: add version number in UI

* feat: add version number in UI

* feat: add version number in footer

* Update Footer.tsx

More concise, cleaner message

---------

Co-authored-by: Danny Avila <110412045+danny-avila@users.noreply.github.com>
2023-07-28 13:48:02 -04:00
Danny Avila
bec1d245bd fix(Eng.tsx): change 'New Chat' to 'New chat' for consistency with other translations 2023-07-28 13:42:39 -04:00
Danny Avila
131cb6cddb chore: remove no longer needed route for nonexistant method 2023-07-28 13:42:39 -04:00
Danny Avila
a2b6e9a6a8 fix(meilisearch): results will now properly paginate 2023-07-28 13:42:39 -04:00
Danny Avila
428fd5bed8 chore(mongoMeili.js): update console log messages for indexing in Meilisearch 2023-07-28 13:42:39 -04:00
Danny Avila
9cacf76c10 refactor(mongoMeili.js): remove console.log statement for document not indexed 2023-07-28 13:42:39 -04:00
Danny Avila
7b8036a369 fix(anthropic.js, gptPlugins.js, openAI.js): add error handling to abortMessage function calls 2023-07-28 13:42:39 -04:00
Danny Avila
d56817850c chore(convos.js): comment out console.log statement for debugging deletion source 2023-07-28 13:42:39 -04:00
Danny Avila
f88a0685f7 fix(db/indexSync.js): update import paths for Conversation and Message models
feat(db/indexSync.js): add synchronization logic between MongoDB collection and MeiliSearch index
fix(models/plugins/mongoMeili.js): update createMeiliMongooseModel function to remove unused parameters and add documentation for syncWithMeili method
2023-07-28 13:42:39 -04:00
Marco Beretta
ae51e6153f docs: Improved ngrok installation and enhanced user_auth_system.md (#721)
* Update ngrok.md

* Update user_auth_system.md

* Update user_auth_system.md

* Update user_auth_system.md
2023-07-28 13:40:47 -04:00
Danny Avila
745eef2eb0 feat: build dev images on changes to api/client or manually (#720) 2023-07-27 19:00:21 -04:00
Danny Avila
1f8520cdad wip: testing leaner docker strategy and deployment compose file (#719)
* nginx setup

* chore(dev-images.yml): update workflow trigger to push events on main branch
chore(dev-images.yml): remove building and pushing of librechat-dev-client image
chore(nginx.conf): comment out SSL configuration in nginx.conf
chore(deploy-compose.yml): uncomment api build configuration in deploy-compose.yml
chore(deploy-compose.yml): update client build configuration in deploy-compose.yml
2023-07-27 18:52:10 -04:00
Danny Avila
dae2805d27 chore(dev-images.yml): rename workflow to "Docker Dev Images Build" (#717)
chore(deploy-compose.yml): change port mapping from 9000:3080 to 3080:3080
2023-07-27 17:05:35 -04:00
Danny Avila
ba2e95db04 Update Dockerfile.multi 2023-07-27 16:50:33 -04:00
Danny Avila
2a6e000217 wip: testing dev image workflows and deployment setup (#716)
* chore(deploy-compose.yml): update API and client image references to use latest versions from ghcr.io
feat(deploy-compose.yml): add NODE_ENV environment variable with value 'production' for API service

* chore(dev-images.yml): tag and push latest images to container registry
chore(dev-images.yml): tag and push latest client image to container registry
chore(dev-images.yml): tag and push latest dev image to container registry
fix(Dockerfile.multi): fix CMD command to properly set NODE_ENV variable
2023-07-27 16:48:41 -04:00
Danny Avila
32281d1b8d wip: testing container workflows and deployment images (#715)
* feat: add Dockerfile.multi for building API, Client, and Data Provider

feat: add nginx.conf for client-side routing in Nginx

feat: add deploy-compose.yml for deploying the application with Docker Compose

chore: update version in deploy-compose.yml to 3.8

chore: remove unused configuration in docs/dev/deploy-compose.yml

* chore(Dockerfile.multi): Remove data-provider build stage
chore(deploy-compose.yml): Add NODE_ENV=production environment variable

* chore(Dockerfile.multi): add environment variable NODE_OPTIONS with value "--max-old-space-size=776"
feat(Dockerfile.multi): copy client build output to api build stage

* chore(Dockerfile.multi): update NODE_OPTIONS to increase max-old-space-size to 2048
chore(deploy-compose.yml): remove NODE_ENV=production environment variable

* feat(dev-images.yml): add GitHub Actions workflow for Docker multi-stage build on push to main branch
2023-07-27 16:24:06 -04:00
Danny Avila
369b1f4eba chore: remove data-provider and use npm package instead (#713)
* chore: remove data-provider, install npm package

* chore: replace monorepo package with npm package: librechat-data-provider

* chore: remove data-provider scripts

* chore: remove data-provider from .eslintrc.js
2023-07-27 14:49:47 -04:00
Danny Avila
777d64088b feat: stop-backend.js and update.js linux support (#712)
* chore(dependabot.yml): update target-branch from "develop" to "dev" for npm package updates in /api, /client, and root directory

* feat: stop-backend.js and update.js linux support (#701)

* feat: stop-backend.js and update.js linux support

* feat: update.js sudo support

* chore(helpers.js): add deleteNodeModules function
feat(packages.js): add script to delete node_modules and install dependencies
refactor(update.js): remove unnecessary imports and use deleteNodeModules function
feat(package.json): add update:linux script to update with sudo

* chore(package.json): rename 'update:linux'  script to 'update:sudo'

* refactor(update.js): simplify downCommand and buildCommand by removing redundant use of sudo command, add sudo to single docker command

---------

Co-authored-by: Fuegovic <32828263+fuegovic@users.noreply.github.com>
2023-07-27 11:11:56 -04:00
Danny Avila
d59a3f20cb chore: linting 2023-07-27 10:32:23 -04:00
Danny Avila
8959576d75 fix(Settings/General): fix clear convos bug where active convo would still appear after clearing convos 2023-07-27 10:32:23 -04:00
Danny Avila
dd8bc39001 refactor(Nav/Conversation): reorganize imports and fix import paths 2023-07-27 10:32:23 -04:00
Danny Avila
4898f7489b chore(jest.config.cjs): linting 2023-07-27 10:32:23 -04:00
Danny Avila
c9c77d6fdf chore(eslint): add 'import' plugin to eslint configuration
chore(prettier): add 'prettier-plugin-tailwindcss' plugin to prettier configuration
chore(package.json): update eslint-plugin-import to version 2.27.5
2023-07-27 10:32:23 -04:00
Fuegovic
4dc86c4c18 feat: add more plugins to the Plugin store (#709) 2023-07-27 08:10:22 -04:00
Marco Beretta
6ae807c404 Update free_ai_apis.md (#707) 2023-07-27 08:08:53 -04:00
Marco Beretta
b5353e2640 Organize the getting started menu (#708)
* Create installation.md

* Delete installation.md

* Create installation.md

* Update installation.md

* Update README.md

* Update mkdocs.yml

* Update apis_and_tokens.md

* Update README.md

* Delete installation.md

* Update README.md

* Update mkdocs.yml
2023-07-27 08:05:49 -04:00
Danny Avila
f4f1199a55 feat: docker-compose deployment file (#706)
* feat(deploy-compose.yml): add docker-compose file for development deployment

A new docker-compose file has been added for development deployment. This file defines the services required for running the application in a development environment. The services include a client service running nginx, an api service running the LibreChat application, a mongodb service for the database, and a meilisearch service for search functionality.

The client service is configured to use the latest version of the nginx image, with port 3080 mapped to port 80. It also mounts the nginx.conf file and the client's node_modules directory.

The api service is named LibreChat and is built from the librechat image. It exposes port 9000 and depends on the mongodb service. It also mounts the api directory, the .env files, and the client's node_modules directory.

The mongodb service is named chat-mongodb and uses the mongo image. It exposes port 27018 and mounts the data-node directory for data storage

* chore(deploy-compose.yml): update env_file path to ../../.env

* chore(deploy-compose.yml): update image name to librechat_deploy
chore(deploy-compose.yml): update build context to ../../

* chore(deploy-compose.yml): update image and comment out build section

The image for the service has been updated to `ghcr.io/danny-avila/librechat:latest`. The build section has been commented out as it is no longer needed.

* refactor(nginx.conf): reformat nginx.conf for better readability and maintainability

* chore(nginx.conf): add worker_connections configuration to events block
chore(nginx.conf): add listen configuration to server block

* chore(deploy-compose.yml): update nginx container ports configuration
feat(deploy-compose.yml): add support for HTTPS by exposing port 443

* docs(dev/README.md): add instructions for deploying with deploy-compose.yml

* docs(dev/README.md): update instructions for deploying with deploy-compose.yml
2023-07-26 12:57:26 -04:00
Danny Avila
b6028a3434 Update breaking_changes.md 2023-07-26 08:48:12 -04:00
Danny Avila
abef8c02c1 Update breaking_changes.md 2023-07-26 08:44:51 -04:00
Danny Avila
19af2b06ce feat: utitlize lean queries, remove migration script, index createdAt timestamps (#698)
* feat(mongoDb): utitlize lean queries and index createdAt timestamps for cosmosDB support

* fix: remove unnecessary lean() method from deleteMany calls

* fix: remove unnecessary lean() method from deleteMany calls

* fix: remove lean() from queries that need hydration

* chore(migrateDb.js): remove unused migration script
fix(Preset.js): return lean documents when retrieving presets
refactor(index.js): remove migration script from server initialization
refactor(convos.js): remove toObject() when sending conversation object
refactor(presets.js): remove toObject() when sending presets object
2023-07-25 19:27:55 -04:00
Marco Beretta
2f7658e39f Italian-localization-support-for-Nav-components-and-small-fix (#697) 2023-07-25 18:29:32 -04:00
Raí
d3138c79fc Language files: Spanish translation and Portuguese corrections for PR. (#694)
* Add files via upload

* Add files via upload
2023-07-24 16:22:14 -04:00
Abner Chou
1e49b7ecb1 Support localization for Nav components (#688)
* init localization

* Update defaul to en

* Fix merge issue and import path.

* Set default to en

* Change jsx to tsx

* Update the password max length string.

* Remove languageContext as using the recoil instead.

* Add localization to component endpoints pages

* Revert default to en after testing.

* Update LoginForm.tsx

* Fix translation.

* Make lint happy

* Merge (#1)

* Create deploy.yml

* Add localization support for endpoint pages components  (#667)

* init localization

* Update defaul to en

* Fix merge issue and import path.

* Set default to en

* Change jsx to tsx

* Update the password max length string.

* Remove languageContext as using the recoil instead.

* Add localization to component endpoints pages

* Revert default to en after testing.

* Update LoginForm.tsx

* Fix translation.

* Make lint happy

* Add a restart to melisearch in docker-compose.yml (#684)

* Oauth fixes for Cognito (#686)

* Add a restart to melisearch in docker-compose.yml

* Oauth fixes for Cognito

* Use the username or email for full name from oath if not provided

---------

Co-authored-by: Donavan <snark@hey.com>

* Italian localization support for endpoint (#687)

---------

Co-authored-by: Danny Avila <110412045+danny-avila@users.noreply.github.com>
Co-authored-by: Donavan Stanley <donavan.stanley@gmail.com>
Co-authored-by: Donavan <snark@hey.com>
Co-authored-by: Marco Beretta <81851188+Berry-13@users.noreply.github.com>

* Translate Nav pages

* Fix npm test

---------

Co-authored-by: Danny Avila <110412045+danny-avila@users.noreply.github.com>
Co-authored-by: Donavan Stanley <donavan.stanley@gmail.com>
Co-authored-by: Donavan <snark@hey.com>
Co-authored-by: Marco Beretta <81851188+Berry-13@users.noreply.github.com>
2023-07-24 08:33:08 -04:00
Danny Avila
3b865fbc59 Update mkdocs.yml 2023-07-23 20:34:41 -04:00
Danny Avila
afd894553c docs: add installation guide for free AI APIs (ChimeraGPT) (#692)
* docs: add installation guide for free AI APIs (chimeraGPT)

* docs: Update free_ai_apis.md with screenshots
2023-07-23 20:30:58 -04:00
Daniel Avila
df485e5bfe chore(.env.example): comment out OPENAI_MODELS and PLUGIN_MODELS
The OPENAI_MODELS and PLUGIN_MODELS variables are being commented out in the .env.example file. This is done to prefer fetching api/models as the default behavior
2023-07-23 16:51:42 -07:00
Daniel Avila
77252bafc1 chore(.prettierrc.js): update tabWidth to 2 and remove commented out code 2023-07-23 16:51:42 -07:00
Daniel Avila
bd1d5e991d feat(endpoints): fetch v1/api/models for model selection when no default models are set 2023-07-23 16:51:42 -07:00
Danny Avila
18c4883ae0 refactor(PluginsClient.js): simplify getFunctionModelName logic using if-else statements
refactor(PluginsClient.js): improve readability by extracting observedImagePath variable
fix(PluginsClient.js): check if responseMessage already includes observedImagePath before appending observation
2023-07-23 16:51:42 -07:00
Youngwook Kim
197307d514 fix(OpenAIClient): resolve null pointer exception in tokenizer management (#689) 2023-07-23 11:59:11 -04:00
Marco Beretta
130356654c Italian localization support for endpoint (#687) 2023-07-22 20:12:48 -04:00
Donavan Stanley
8f9f09698b Oauth fixes for Cognito (#686)
* Add a restart to melisearch in docker-compose.yml

* Oauth fixes for Cognito

* Use the username or email for full name from oath if not provided

---------

Co-authored-by: Donavan <snark@hey.com>
2023-07-22 20:12:15 -04:00
Donavan Stanley
5da833e066 Add a restart to melisearch in docker-compose.yml (#684) 2023-07-22 15:10:07 -04:00
Abner Chou
b64273957a Add localization support for endpoint pages components (#667)
* init localization

* Update defaul to en

* Fix merge issue and import path.

* Set default to en

* Change jsx to tsx

* Update the password max length string.

* Remove languageContext as using the recoil instead.

* Add localization to component endpoints pages

* Revert default to en after testing.

* Update LoginForm.tsx

* Fix translation.

* Make lint happy
2023-07-22 15:09:45 -04:00
Danny Avila
4148c6d219 Create deploy.yml 2023-07-22 13:49:49 -04:00
Danny Avila
e9d68e3bef Update build.yml 2023-07-22 13:35:00 -04:00
Danny Avila
bbe690cc4b Update build.yml 2023-07-22 13:29:48 -04:00
Danny Avila
a1ad471d87 Update build.yml 2023-07-22 13:21:30 -04:00
Danny Avila
c319d709f3 Create build.yml 2023-07-22 11:31:56 -04:00
Danny Avila
6943f1c2c7 refactor: improve passport strategy handling in async/await manner to prevent race conditions upon importing modules (#682) 2023-07-22 10:29:17 -04:00
Danny Avila
e38483a8b9 feat(config/update.js): add support for updating with single-compose file (#680) 2023-07-21 21:51:35 -04:00
Danny Avila
2a2e6d9991 docs(dev): update README.md with instructions for using single-compose.yml (#676)
feat(single-compose.yml): add single-compose.yml for building leaner app container without meilisearch and mongodb services
- This is useful for deploying on Google, Azure, etc., as a single, leaner container.
- Instructions for running the container are added to the README.md file.
- The container requires a MongoDB Atlas connection string for the `MONGO_URI` environment variable.
- Remote Meilisearch may also be possible, but is not tested.
2023-07-21 20:11:05 -04:00
Danny Avila
deb1472aa5 chore: add update script for assuring clean installations (#673) 2023-07-21 16:44:59 -04:00
Danny Avila
8aa58ea240 chore(api): update langchain dependency to version 0.0.114 (#669) 2023-07-21 00:14:54 -04:00
Daniel Avila
3a112a344d fix(Content.jsx): remove 'z-index: 1;' from currentContent variable
fix(Content.jsx): exclude rehypePlugins if isIFrame is true
fix(Content.jsx): update content rendering to use currentContent variable
2023-07-20 19:40:31 -07:00
Daniel Avila
712be248be chore: bump app version to 0.5.5
chore: bump @waylaidwanderer/chatgpt-api to 1.37.2
2023-07-20 19:40:31 -07:00
fuegovic
530f9d303f feat: Bing Image Creator 2023-07-20 19:40:31 -07:00
Fuegovic
ad29d25396 docs: updates (#662)
* docs: updates

* docs: updates

* Update Settings.jsx
2023-07-19 08:35:41 -07:00
Danny Avila
1ef53a41f0 chore(Settings.jsx): update placeholder text for promptPrefix/system message (#656) 2023-07-16 13:22:36 -04:00
Fuegovic
0246f164b0 docs: add "chatgpt_plugins_openapi.md" to readme.md and mkdocs (#655)
* docs: add chatgpt_plugins_openapi.md to toc

* docs: add chatgpt_plugins_openapi.md to mkdocs toc
2023-07-16 13:14:07 -04:00
Danny Avila
514f625b8f feat: ChatGPT Plugins/OpenAPI specs for Plugins Endpoint (#620)
* wip: proof of concept for openapi chain

* chore(api): update langchain dependency to version 0.0.105

* feat(Plugins): use ChatGPT Plugins/OpenAPI specs (first pass)

* chore(manifest.json): update pluginKey for "Browser" tool to "web-browser"
chore(handleTools.js): update customConstructor key for "web-browser" tool

* fix(handleSubmit.js): set unfinished property to false for all endpoints

* fix(handlers.js): remove unnecessary capitalizeWords function and use action.tool directly
refactor(endpoints.js): rename availableTools to tools and transform it into a map

* feat(endpoints): add plugins selector to endpoints file
refactor(CodeBlock.tsx): refactor to typescript
refactor(Plugin.tsx): use recoil Map for plugin name and refactor to typescript
chore(Message.jsx): linting
chore(PluginsOptions/index.jsx): remove comment/linting
chore(svg): export Clipboard and CheckMark components from SVG index and refactor to typescript

* fix(OpenAPIPlugin.js): rename readYamlFile function to readSpecFile
fix(OpenAPIPlugin.js): handle JSON files in readSpecFile function
fix(OpenAPIPlugin.js): handle JSON URLs in getSpec function
fix(OpenAPIPlugin.js): handle JSON variables in createOpenAPIPlugin function
fix(OpenAPIPlugin.js): add description for variables in createOpenAPIPlugin function
fix(OpenAPIPlugin.js): add optional flag for is_user_authenticated and has_user_authentication in ManifestDefinition
fix(loadSpecs.js): add optional flag for is_user_authenticated and has_user_authentication in ManifestDefinition
fix(Plugin.tsx): remove unnecessary callback parameter in getPluginName function
fix(getDefaultConversation.js): fix browser console error: handle null value for lastConversationSetup in getDefaultConversation function

* feat(api): add new tools

Add Ai PDF tool for super-fast, interactive chats with PDFs of any size, complete with page references for fact checking.
Add VoxScript tool for searching through YouTube transcripts, financial data sources, Google Search results, and more.
Add WebPilot tool for browsing and QA of webpages, PDFs, and data. Generate articles from one or more URLs.

feat(api): update OpenAPIPlugin.js

- Add support for bearer token authorization in the OpenAPIPlugin.
- Add support for custom headers in the OpenAPIPlugin.

fix(api): fix loadTools.js

- Pass the user parameter to the loadSpecs function.

* feat(PluginsClient.js): import findMessageContent function from utils
feat(PluginsClient.js): add message parameter to options object in initializeCustomAgent function
feat(PluginsClient.js): add content to errorMessage if message content is found
feat(PluginsClient.js): break out of loop if message content is found
feat(PluginsClient.js): add delay option with value of 8 to generateTextStream function
feat(PluginsClient.js): add support for process.env.PORT environment variable in app.listen function
feat(askyourpdf.json): add askyourpdf plugin configuration
feat(metar.json): add metar plugin configuration
feat(askyourpdf.yaml): add askyourpdf plugin OpenAPI specification
feat(OpenAPIPlugin.js): add message parameter to createOpenAPIPlugin function
feat(OpenAPIPlugin.js): add description_for_model to chain run message
feat(addOpenAPISpecs.js): remove verbose option from loadSpecs function call

fix(loadSpecs.js): add 'message' parameter to the loadSpecs function
feat(findMessageContent.js): add utility function to find message content in JSON objects

* fix(PluginStoreDialog.tsx): update z-index value for the dialog container

The z-index value for the dialog container was updated to "102" to ensure it appears above other elements on the page.

* chore(web_pilot.json): add "params" field with "user_has_request" parameter set to true

* chore(eslintrc.js): update eslint rules
fix(Login.tsx): add missing semicolon after import statement

* fix(package-lock.json): update langchain dependency to version ^0.0.105

* fix(OpenAPIPlugin.js): change header key from 'id' to 'librechat_user_id' for consistency and clarity

feat(plugins): add documentation for using official ChatGPT Plugins with OpenAPI specs

This commit adds a new file `chatgpt_plugins_openapi.md` to the `docs/features/plugins` directory. The file provides detailed information on how to use official ChatGPT Plugins with OpenAPI specifications. It explains the components of a plugin, including the Plugin Manifest file and the OpenAPI spec. It also covers the process of adding a plugin, editing manifest files, and customizing OpenAPI spec files. Additionally, the commit includes disclaimers about the limitations and compatibility of plugins with LibreChat. The documentation also clarifies that the use of ChatGPT Plugins with LibreChat does not violate OpenAI's Terms of Service.

The purpose of this commit is to provide comprehensive documentation for developers who want to integrate ChatGPT Plugins into their projects using OpenAPI specs. It aims to guide them through the process of adding and configuring plugins, as well as addressing potential issues and

chore(introduction.md): update link to ChatGPT Plugins documentation
docs(introduction.md): clarify the purpose of the plugins endpoint and its capabilities

* fix(OpenAPIPlugin.js): update SUFFIX variable to provide a clearer description
docs(chatgpt_plugins_openapi.md): update information about adding plugins via url on the frontend

* feat(PluginsClient.js): sendIntermediateMessage on successful Agent load
fix(PluginsClient.js, server/index.js, gptPlugins.js): linting fixes
docs(chatgpt_plugins_openapi.md): update links and add additional information

* Update chatgpt_plugins_openapi.md

* chore: rebuild package-lock file

* chore: format/lint all files with new rules

* chore: format all files

* chore(README.md): update AI model selection list

The AI model selection list in the README.md file has been updated to reflect the current options available. The "Anthropic" model has been added as an alternative name for the "Claude" model.

* fix(Plugin.tsx): type issue

* feat(tools): add new tool WebPilot

feat(tools): remove tool Weather Report

feat(tools): add new tool Prompt Perfect

feat(tools): add new tool Scholarly Graph Link

* feat(OpenAPIPlugin.js): add getSpec and readSpecFile functions
feat(OpenAPIPlugin.spec.js): add tests for readSpecFile, getSpec, and createOpenAPIPlugin functions

* chore(agent-demo-1.js): remove unused code and dependencies
chore(agent-demo-2.js): remove unused code and dependencies
chore(demo.js): remove unused code and dependencies

* feat(addOpenAPISpecs): add function to transform OpenAPI specs into desired format
feat(addOpenAPISpecs.spec): add tests for transformSpec function
fix(loadSpecs): remove debugging code

* feat(loadSpecs.spec.js): add unit tests for ManifestDefinition, validateJson, and loadSpecs functions

* fix: package file resolution bug

* chore: move scholarly_graph_link manifest to 'has-issues'

* refactor(client/hooks): convert to TS and export from index

* Update introduction.md

* Update chatgpt_plugins_openapi.md
2023-07-16 12:19:47 -04:00
Danny Avila
39ac8d3858 fix: typo when including proxy for langchain (#653)
* fix(PluginsClient.js): change reverseProxyUrl variable to options.reverseProxyUrl

* chore(.prettierrc.js): comment out tabWidth option in Prettier configuration
2023-07-15 12:19:23 -04:00
Danny Avila
15987abe0a style(Nav): improve Nav transition for open/close (#652)
* Revert "Animated sidebar (#649)"

This reverts commit dd19323280.

* in progress

* style(Nav): improve transition for Nav
2023-07-15 10:43:15 -04:00
Anirudh
dd19323280 Animated sidebar (#649)
* Initial Commit

* Add transition
2023-07-15 08:26:18 -04:00
Fuegovic
af47a68632 fix: sharpness in Bing Chat icon (#648)
* Fix sharpness in Bing Chat icon

* Fix sharpness in Bing Chat icon

* Fix sharpness in Bing Chat icon
2023-07-15 08:25:11 -04:00
Danny Avila
9303ea2f57 chore(.env.example): add MEILI_NO_ANALYTICS variable and set it to true (#647)
chore(docker-compose.yml): add MEILI_NO_ANALYTICS environment variable and set it to true
2023-07-15 08:23:34 -04:00
Danny Avila
20dde44512 fix(Settings.jsx): fix Settings inputs losing focus to main textarea (#646)
* fix(Settings.jsx): fix Settings inputs losing focus to main textarea

* refactor(Input/index.jsx): remove console.log statement in useEffect
2023-07-14 15:47:32 -04:00
271 changed files with 7113 additions and 5402 deletions

View File

@@ -32,7 +32,7 @@ OPENAI_API_KEY="user_provided"
# Identify the available models, separated by commas *without spaces*.
# The first will be default.
# Leave it blank to use internal settings.
OPENAI_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-16k,gpt-3.5-turbo-0301,text-davinci-003,gpt-4,gpt-4-0314,gpt-4-0613
# OPENAI_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-16k,gpt-3.5-turbo-0301,text-davinci-003,gpt-4,gpt-4-0314,gpt-4-0613
# Reverse proxy settings for OpenAI:
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
@@ -124,7 +124,7 @@ ANTHROPIC_MODELS=claude-1,claude-instant-1,claude-2
# Identify the available models, separated by commas *without spaces*.
# The first will be default.
# Leave it blank to use internal settings.
PLUGIN_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-16k,gpt-3.5-turbo-0301,gpt-4,gpt-4-0314,gpt-4-0613
# PLUGIN_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-16k,gpt-3.5-turbo-0301,gpt-4,gpt-4-0314,gpt-4-0613
# For securely storing credentials, you need a fixed key and IV. You can set them here for prod and dev environments
# If you don't set them, the app will crash on startup.
@@ -174,6 +174,9 @@ PROXY=
# The easiest setup for this is through docker-compose, which takes care of it for you.
SEARCH=true
# HIGHLY RECOMMENDED: Disable anonymized telemetry analytics for MeiliSearch for absolute privacy.
MEILI_NO_ANALYTICS=true
# REQUIRED FOR SEARCH: MeiliSearch Host, mainly for the API server to connect to the search server.
# Replace '0.0.0.0' with 'meilisearch' if serving MeiliSearch with docker-compose.
MEILI_HOST=http://0.0.0.0:7700

View File

@@ -13,14 +13,7 @@ module.exports = {
'plugin:jest/recommended',
'prettier',
],
// ignorePatterns: ['packages/data-provider/types/**/*'],
ignorePatterns: [
'client/dist/**/*',
'client/public/**/*',
'e2e/playwright-report/**/*',
'packages/data-provider/types/**/*',
'packages/data-provider/dist/**/*',
],
ignorePatterns: ['client/dist/**/*', 'client/public/**/*', 'e2e/playwright-report/**/*'],
parser: '@typescript-eslint/parser',
parserOptions: {
ecmaVersion: 'latest',
@@ -29,7 +22,7 @@ module.exports = {
jsx: true,
},
},
plugins: ['react', 'react-hooks', '@typescript-eslint'],
plugins: ['react', 'react-hooks', '@typescript-eslint', 'import'],
rules: {
'react/react-in-jsx-scope': 'off',
'@typescript-eslint/ban-ts-comment': ['error', { 'ts-ignore': 'allow' }],
@@ -43,9 +36,12 @@ module.exports = {
ignoreComments: true,
},
],
'import/no-cycle': 'error',
'linebreak-style': 0,
'object-curly-spacing': ['error', 'always'],
curly: ['error', 'all'],
semi: ['error', 'always'],
'no-trailing-spaces': 'error',
'object-curly-spacing': ['error', 'always'],
'no-multiple-empty-lines': ['error', { max: 1 }],
'comma-dangle': ['error', 'always-multiline'],
// "arrow-parens": [2, "as-needed", { requireForBlockBody: true }],
@@ -109,18 +105,6 @@ module.exports = {
'plugin:@typescript-eslint/recommended',
],
},
{
files: './packages/data-provider/**/*.ts',
overrides: [
{
files: '**/*.ts',
parser: '@typescript-eslint/parser',
parserOptions: {
project: './packages/data-provider/tsconfig.json',
},
},
],
},
],
settings: {
react: {

View File

@@ -7,7 +7,7 @@ version: 2
updates:
- package-ecosystem: "npm" # See documentation for possible values
directory: "/api" # Location of package manifests
target-branch: "develop"
target-branch: "dev"
versioning-strategy: increase-if-necessary
schedule:
interval: "weekly"
@@ -20,7 +20,7 @@ updates:
include: "scope"
- package-ecosystem: "npm" # See documentation for possible values
directory: "/client" # Location of package manifests
target-branch: "develop"
target-branch: "dev"
versioning-strategy: increase-if-necessary
schedule:
interval: "weekly"
@@ -33,7 +33,7 @@ updates:
include: "scope"
- package-ecosystem: "npm" # See documentation for possible values
directory: "/" # Location of package manifests
target-branch: "develop"
target-branch: "dev"
versioning-strategy: increase-if-necessary
schedule:
interval: "weekly"

38
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,38 @@
name: Linux_Container_Workflow
on:
workflow_dispatch:
env:
RUNNER_VERSION: 2.293.0
jobs:
build-and-push:
runs-on: ubuntu-latest
steps:
# checkout the repo
- name: 'Checkout GitHub Action'
uses: actions/checkout@main
- name: 'Login via Azure CLI'
uses: azure/login@v1
with:
creds: ${{ secrets.AZURE_CREDENTIALS }}
- name: 'Build GitHub Runner container image'
uses: azure/docker-login@v1
with:
login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }}
username: ${{ secrets.REGISTRY_USERNAME }}
password: ${{ secrets.REGISTRY_PASSWORD }}
- run: |
docker build --build-arg RUNNER_VERSION=${{ env.RUNNER_VERSION }} -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/pwd9000-github-runner-lin:${{ env.RUNNER_VERSION }} .
- name: 'Push container image to ACR'
uses: azure/docker-login@v1
with:
login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }}
username: ${{ secrets.REGISTRY_USERNAME }}
password: ${{ secrets.REGISTRY_PASSWORD }}
- run: |
docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/pwd9000-github-runner-lin:${{ env.RUNNER_VERSION }}

38
.github/workflows/deploy.yml vendored Normal file
View File

@@ -0,0 +1,38 @@
name: Deploy_GHRunner_Linux_ACI
on:
workflow_dispatch:
env:
RUNNER_VERSION: 2.293.0
ACI_RESOURCE_GROUP: 'Demo-ACI-GitHub-Runners-RG'
ACI_NAME: 'gh-runner-linux-01'
DNS_NAME_LABEL: 'gh-lin-01'
GH_OWNER: ${{ github.repository_owner }}
GH_REPOSITORY: 'LibreChat' #Change here to deploy self hosted runner ACI to another repo.
jobs:
deploy-gh-runner-aci:
runs-on: ubuntu-latest
steps:
# checkout the repo
- name: 'Checkout GitHub Action'
uses: actions/checkout@main
- name: 'Login via Azure CLI'
uses: azure/login@v1
with:
creds: ${{ secrets.AZURE_CREDENTIALS }}
- name: 'Deploy to Azure Container Instances'
uses: 'azure/aci-deploy@v1'
with:
resource-group: ${{ env.ACI_RESOURCE_GROUP }}
image: ${{ secrets.REGISTRY_LOGIN_SERVER }}/pwd9000-github-runner-lin:${{ env.RUNNER_VERSION }}
registry-login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }}
registry-username: ${{ secrets.REGISTRY_USERNAME }}
registry-password: ${{ secrets.REGISTRY_PASSWORD }}
name: ${{ env.ACI_NAME }}
dns-name-label: ${{ env.DNS_NAME_LABEL }}
environment-variables: GH_TOKEN=${{ secrets.PAT_TOKEN }} GH_OWNER=${{ env.GH_OWNER }} GH_REPOSITORY=${{ env.GH_REPOSITORY }}
location: 'eastus'

51
.github/workflows/dev-images.yml vendored Normal file
View File

@@ -0,0 +1,51 @@
name: Docker Dev Images Build
on:
workflow_dispatch:
push:
branches:
- main
paths:
- 'api/**'
- 'client/**'
jobs:
build:
runs-on: ubuntu-latest
steps:
# Check out the repository
- name: Checkout
uses: actions/checkout@v2
# Set up Docker
- name: Set up Docker
uses: docker/setup-buildx-action@v1
# Log in to GitHub Container Registry
- name: Log in to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Build Docker images
- name: Build Docker images
run: |
cp .env.example .env
docker build -f Dockerfile.multi --target api-build -t librechat-dev-api .
docker build -f Dockerfile -t librechat-dev .
# Tag and push the images to GitHub Container Registry
- name: Tag and push images
run: |
docker tag librechat-dev-api:latest ghcr.io/${{ github.repository_owner }}/librechat-dev-api:${{ github.sha }}
docker push ghcr.io/${{ github.repository_owner }}/librechat-dev-api:${{ github.sha }}
docker tag librechat-dev-api:latest ghcr.io/${{ github.repository_owner }}/librechat-dev-api:latest
docker push ghcr.io/${{ github.repository_owner }}/librechat-dev-api:latest
docker tag librechat-dev:latest ghcr.io/${{ github.repository_owner }}/librechat-dev:${{ github.sha }}
docker push ghcr.io/${{ github.repository_owner }}/librechat-dev:${{ github.sha }}
docker tag librechat-dev:latest ghcr.io/${{ github.repository_owner }}/librechat-dev:latest
docker push ghcr.io/${{ github.repository_owner }}/librechat-dev:latest

View File

@@ -1,7 +1,8 @@
module.exports = {
plugins: ['prettier-plugin-tailwindcss'],
printWidth: 100,
useTabs: false,
tabWidth: 2,
useTabs: false,
semi: true,
singleQuote: true,
// bracketSpacing: false,

32
Dockerfile.multi Normal file
View File

@@ -0,0 +1,32 @@
# Build API, Client and Data Provider
FROM node:19-alpine AS base
WORKDIR /app
COPY config/loader.js ./config/
RUN npm install dotenv
WORKDIR /app/api
COPY api/package*.json ./
COPY api/ ./
RUN npm install
# React client build
FROM base AS client-build
WORKDIR /app/client
COPY ./client/ ./
RUN npm install
ENV NODE_OPTIONS="--max-old-space-size=2048"
RUN npm run build
# Node API setup
FROM base AS api-build
COPY --from=client-build /app/client/dist /app/client/dist
EXPOSE 3080
ENV HOST=0.0.0.0
CMD ["node", "server/index.js"]
# Nginx setup
FROM nginx:1.21.1-alpine AS prod-stage
COPY ./client/nginx.conf /etc/nginx/conf.d/default.conf
CMD ["nginx", "-g", "daemon off;"]

View File

@@ -31,12 +31,15 @@ LibreChat brings together the future of assistant AIs with the revolutionary tec
With LibreChat, you no longer need to opt for ChatGPT Plus and can instead use free or pay-per-call APIs. We welcome contributions, cloning, and forking to enhance the capabilities of this advanced chatbot platform.
https://github.com/danny-avila/LibreChat/assets/110412045/c1eb0c0f-41f6-4335-b982-84b278b53d59
<!-- https://github.com/danny-avila/LibreChat/assets/110412045/c1eb0c0f-41f6-4335-b982-84b278b53d59 -->
[![Watch the video](https://img.youtube.com/vi/pNIOs1ovsXw/maxresdefault.jpg)](https://youtu.be/pNIOs1ovsXw)
Click on the thumbnail to open the video☝
# Features
- Response streaming identical to ChatGPT through server-sent events
- UI from original ChatGPT, including Dark mode
- AI model selection (through 6 endpoints: OpenAI API, BingAI, ChatGPT Browser, PaLM2, Claude, Plugins)
- AI model selection: OpenAI API, BingAI, ChatGPT Browser, PaLM2, Anthropic (Claude), Plugins
- Create, Save, & Share custom presets - [More info on prompt presets here](https://github.com/danny-avila/LibreChat/releases/tag/v0.3.0)
- Edit and Resubmit messages with conversation branching
- Search all messages/conversations - [More info here](https://github.com/danny-avila/LibreChat/releases/tag/v0.1.0)
@@ -44,7 +47,9 @@ https://github.com/danny-avila/LibreChat/assets/110412045/c1eb0c0f-41f6-4335-b98
---
## ⚠️ [Breaking Changes as of v0.5.0](docs/general_info/breaking_changes.md#v050) ⚠️
## ⚠️ [Breaking Changes](docs/general_info/breaking_changes.md) ⚠️
**Applies to [v0.5.4](docs/general_info/breaking_changes.md#v054) & [v0.5.5](docs/general_info/breaking_changes.md#v055)**
**Please read this before updating from a previous version**
---
@@ -59,12 +64,15 @@ Keep up with the latest updates by visiting the releases page - [Releases](https
<details open>
<summary><strong>Getting Started</strong></summary>
* [Docker Install](docs/install/docker_install.md)
* [Linux Install](docs/install/linux_install.md)
* [Mac Install](docs/install/mac_install.md)
* [Windows Install](docs/install/windows_install.md)
* [APIs and Tokens](docs/install/apis_and_tokens.md)
* [User Auth System](docs/install/user_auth_system.md)
* Installation
* [Docker Install🐳](docs/install/docker_install.md)
* [Linux Install🐧](docs/install/linux_install.md)
* [Mac Install🍎](docs/install/mac_install.md)
* [Windows Install💙](docs/install/windows_install.md)
* Configuration
* [APIs and Tokens](docs/install/apis_and_tokens.md)
* [User Auth System](docs/install/user_auth_system.md)
* [Online MongoDB Database](docs/install/mongodb.md)
</details>
<details>
@@ -85,6 +93,7 @@ Keep up with the latest updates by visiting the releases page - [Releases](https
* [Stable Diffusion](docs/features/plugins/stable_diffusion.md)
* [Wolfram](docs/features/plugins/wolfram.md)
* [Make Your Own Plugin](docs/features/plugins/make_your_own.md)
* [Using official ChatGPT Plugins](docs/features/plugins/chatgpt_plugins_openapi.md)
* [Proxy](docs/features/proxy.md)
* [Bing Jailbreak](docs/features/bing_jailbreak.md)

View File

@@ -39,7 +39,7 @@ const askBing = async ({
jailbreakConversationId = false;
}
if (jailbreak)
if (jailbreak) {
options = {
jailbreakConversationId: jailbreakConversationId || jailbreak,
context,
@@ -47,8 +47,18 @@ const askBing = async ({
parentMessageId,
toneStyle,
onProgress,
clientOptions: {
features: {
genImage: {
server: {
enable: true,
type: 'markdown_list',
},
},
},
},
};
else {
} else {
options = {
conversationId,
context,
@@ -56,6 +66,16 @@ const askBing = async ({
parentMessageId,
toneStyle,
onProgress,
clientOptions: {
features: {
genImage: {
server: {
enable: true,
type: 'markdown_list',
},
},
},
},
};
// don't give those parameters for new conversation

View File

@@ -6,7 +6,10 @@ const {
} = require('@dqbd/tiktoken');
const { maxTokensMap, genAzureChatCompletion } = require('../../utils');
// Cache to store Tiktoken instances
const tokenizersCache = {};
// Counter for keeping track of the number of tokenizer calls
let tokenizerCallsCount = 0;
class OpenAIClient extends BaseClient {
constructor(apiKey, options = {}) {
@@ -89,7 +92,6 @@ class OpenAIClient extends BaseClient {
this.chatGptLabel = this.options.chatGptLabel || 'Assistant';
this.setupTokens();
this.setupTokenizer();
if (!this.modelOptions.stop) {
const stopTokens = [this.startToken];
@@ -133,68 +135,87 @@ class OpenAIClient extends BaseClient {
}
}
setupTokenizer() {
// Selects an appropriate tokenizer based on the current configuration of the client instance.
// It takes into account factors such as whether it's a chat completion, an unofficial chat GPT model, etc.
selectTokenizer() {
let tokenizer;
this.encoding = 'text-davinci-003';
if (this.isChatCompletion) {
this.encoding = 'cl100k_base';
this.gptEncoder = this.constructor.getTokenizer(this.encoding);
tokenizer = this.constructor.getTokenizer(this.encoding);
} else if (this.isUnofficialChatGptModel) {
this.gptEncoder = this.constructor.getTokenizer(this.encoding, true, {
const extendSpecialTokens = {
'<|im_start|>': 100264,
'<|im_end|>': 100265,
});
};
tokenizer = this.constructor.getTokenizer(this.encoding, true, extendSpecialTokens);
} else {
try {
this.encoding = this.modelOptions.model;
this.gptEncoder = this.constructor.getTokenizer(this.modelOptions.model, true);
tokenizer = this.constructor.getTokenizer(this.modelOptions.model, true);
} catch {
this.gptEncoder = this.constructor.getTokenizer(this.encoding, true);
tokenizer = this.constructor.getTokenizer(this.encoding, true);
}
}
}
static getTokenizer(encoding, isModelName = false, extendSpecialTokens = {}) {
if (tokenizersCache[encoding]) {
return tokenizersCache[encoding];
}
let tokenizer;
if (isModelName) {
tokenizer = encodingForModel(encoding, extendSpecialTokens);
} else {
tokenizer = getEncoding(encoding, extendSpecialTokens);
}
tokenizersCache[encoding] = tokenizer;
return tokenizer;
}
freeAndResetEncoder() {
try {
if (!this.gptEncoder) {
return;
// Retrieves a tokenizer either from the cache or creates a new one if one doesn't exist in the cache.
// If a tokenizer is being created, it's also added to the cache.
static getTokenizer(encoding, isModelName = false, extendSpecialTokens = {}) {
let tokenizer;
if (tokenizersCache[encoding]) {
tokenizer = tokenizersCache[encoding];
} else {
if (isModelName) {
tokenizer = encodingForModel(encoding, extendSpecialTokens);
} else {
tokenizer = getEncoding(encoding, extendSpecialTokens);
}
this.gptEncoder.free();
delete tokenizersCache[this.encoding];
delete tokenizersCache.count;
this.setupTokenizer();
tokenizersCache[encoding] = tokenizer;
}
return tokenizer;
}
// Frees all encoders in the cache and resets the count.
static freeAndResetAllEncoders() {
try {
Object.keys(tokenizersCache).forEach((key) => {
if (tokenizersCache[key]) {
tokenizersCache[key].free();
delete tokenizersCache[key];
}
});
// Reset count
tokenizerCallsCount = 1;
} catch (error) {
console.log('freeAndResetEncoder error');
console.log('Free and reset encoders error');
console.error(error);
}
}
getTokenCount(text) {
try {
if (tokenizersCache.count >= 25) {
if (this.options.debug) {
console.debug('freeAndResetEncoder: reached 25 encodings, reseting...');
}
this.freeAndResetEncoder();
// Checks if the cache of tokenizers has reached a certain size. If it has, it frees and resets all tokenizers.
resetTokenizersIfNecessary() {
if (tokenizerCallsCount >= 25) {
if (this.options.debug) {
console.debug('freeAndResetAllEncoders: reached 25 encodings, resetting...');
}
tokenizersCache.count = (tokenizersCache.count || 0) + 1;
return this.gptEncoder.encode(text, 'all').length;
this.constructor.freeAndResetAllEncoders();
}
tokenizerCallsCount++;
}
// Returns the token count of a given text. It also checks and resets the tokenizers if necessary.
getTokenCount(text) {
this.resetTokenizersIfNecessary();
try {
const tokenizer = this.selectTokenizer();
return tokenizer.encode(text, 'all').length;
} catch (error) {
this.freeAndResetEncoder();
return this.gptEncoder.encode(text, 'all').length;
this.constructor.freeAndResetAllEncoders();
const tokenizer = this.selectTokenizer();
return tokenizer.encode(text, 'all').length;
}
}

View File

@@ -2,6 +2,7 @@ const OpenAIClient = require('./OpenAIClient');
const { ChatOpenAI } = require('langchain/chat_models/openai');
const { CallbackManager } = require('langchain/callbacks');
const { initializeCustomAgent, initializeFunctionsAgent } = require('./agents/');
const { findMessageContent } = require('../../utils');
const { loadTools } = require('./tools/util');
const { SelfReflectionTool } = require('./tools/');
const { HumanChatMessage, AIChatMessage } = require('langchain/schema');
@@ -112,8 +113,8 @@ Only respond with your conversational reply to the following User Message:
super.setOptions(options);
this.isGpt3 = this.modelOptions.model.startsWith('gpt-3');
if (this.reverseProxyUrl) {
this.langchainProxy = this.reverseProxyUrl.match(/.*v1/)[0];
if (this.options.reverseProxyUrl) {
this.langchainProxy = this.options.reverseProxyUrl.match(/.*v1/)[0];
}
}
@@ -131,14 +132,13 @@ Only respond with your conversational reply to the following User Message:
}
getFunctionModelName(input) {
const prefixMap = {
'gpt-4': 'gpt-4-0613',
'gpt-4-32k': 'gpt-4-32k-0613',
'gpt-3.5-turbo': 'gpt-3.5-turbo-0613',
};
const prefix = Object.keys(prefixMap).find((key) => input.startsWith(key));
return prefix ? prefixMap[prefix] : 'gpt-3.5-turbo-0613';
if (input.startsWith('gpt-3.5-turbo')) {
return 'gpt-3.5-turbo';
} else if (input.startsWith('gpt-4')) {
return 'gpt-4';
} else {
return 'gpt-3.5-turbo';
}
}
getBuildMessagesOptions(opts) {
@@ -183,7 +183,9 @@ Only respond with your conversational reply to the following User Message:
const model = this.createLLM(modelOptions, configOptions);
if (this.options.debug) {
console.debug(`<-----Agent Model: ${model.modelName} | Temp: ${model.temperature}----->`);
console.debug(
`<-----Agent Model: ${model.modelName} | Temp: ${model.temperature} | Functions: ${this.functionsAgent}----->`,
);
}
this.availableTools = await loadTools({
@@ -193,6 +195,8 @@ Only respond with your conversational reply to the following User Message:
functions: this.functionsAgent,
options: {
openAIApiKey: this.openAIApiKey,
debug: this.options?.debug,
message,
},
});
// load tools
@@ -266,6 +270,15 @@ Only respond with your conversational reply to the following User Message:
if (this.options.debug) {
console.debug('Loaded agent.');
}
onAgentAction(
{
tool: 'self-reflection',
toolInput: `Processing the User's message:\n"${message}"`,
log: '',
},
true,
);
}
async executorCall(message, signal) {
@@ -290,6 +303,11 @@ Only respond with your conversational reply to the following User Message:
} catch (err) {
console.error(err);
errorMessage = err.message;
const content = findMessageContent(message);
if (content) {
errorMessage = content;
break;
}
if (attempts === maxAttempts) {
this.result.output = `Encountered an error while attempting to respond. Error: ${err.message}`;
this.result.intermediateSteps = this.actions;
@@ -311,7 +329,12 @@ Only respond with your conversational reply to the following User Message:
return;
}
if (!responseMessage.text.includes(observation)) {
// Extract the image file path from the observation
const observedImagePath = observation.match(/\(\/images\/.*\.\w*\)/g)[0];
// Check if the responseMessage already includes the image file path
if (!responseMessage.text.includes(observedImagePath)) {
// If the image file path is not found, append the whole observation
responseMessage.text += '\n' + observation;
if (this.options.debug) {
console.debug('added image from intermediateSteps');
@@ -408,7 +431,7 @@ Only respond with your conversational reply to the following User Message:
if (this.agentOptions.skipCompletion && this.result.output) {
responseMessage.text = this.result.output;
this.addImages(this.result.intermediateSteps, responseMessage);
await this.generateTextStream(this.result.output, opts.onProgress);
await this.generateTextStream(this.result.output, opts.onProgress, { delay: 8 });
return await this.handleResponseMessage(responseMessage, saveOptions, user);
}
@@ -469,7 +492,7 @@ Only respond with your conversational reply to the following User Message:
}
// testing if this works with browser endpoint
if (!this.isGpt3 && this.reverseProxyUrl) {
if (!this.isGpt3 && this.options.reverseProxyUrl) {
instructionsPayload.role = 'user';
}

View File

@@ -1,7 +1,7 @@
const OpenAIClient = require('../OpenAIClient');
describe('OpenAIClient', () => {
let client;
let client, client2;
const model = 'gpt-4';
const parentMessageId = '1';
const messages = [
@@ -19,11 +19,13 @@ describe('OpenAIClient', () => {
},
};
client = new OpenAIClient('test-api-key', options);
client2 = new OpenAIClient('test-api-key', options);
client.refineMessages = jest.fn().mockResolvedValue({
role: 'assistant',
content: 'Refined answer',
tokenCount: 30,
});
client.constructor.freeAndResetAllEncoders();
});
describe('setOptions', () => {
@@ -34,10 +36,25 @@ describe('OpenAIClient', () => {
});
});
describe('freeAndResetEncoder', () => {
it('should reset the encoder', () => {
client.freeAndResetEncoder();
expect(client.gptEncoder).toBeDefined();
describe('selectTokenizer', () => {
it('should get the correct tokenizer based on the instance state', () => {
const tokenizer = client.selectTokenizer();
expect(tokenizer).toBeDefined();
});
});
describe('freeAllTokenizers', () => {
it('should free all tokenizers', () => {
// Create a tokenizer
const tokenizer = client.selectTokenizer();
// Mock 'free' method on the tokenizer
tokenizer.free = jest.fn();
client.constructor.freeAndResetAllEncoders();
// Check if 'free' method has been called on the tokenizer
expect(tokenizer.free).toHaveBeenCalled();
});
});
@@ -48,7 +65,7 @@ describe('OpenAIClient', () => {
});
it('should reset the encoder and count when count reaches 25', () => {
const freeAndResetEncoderSpy = jest.spyOn(client, 'freeAndResetEncoder');
const freeAndResetEncoderSpy = jest.spyOn(client.constructor, 'freeAndResetAllEncoders');
// Call getTokenCount 25 times
for (let i = 0; i < 25; i++) {
@@ -59,7 +76,8 @@ describe('OpenAIClient', () => {
});
it('should not reset the encoder and count when count is less than 25', () => {
const freeAndResetEncoderSpy = jest.spyOn(client, 'freeAndResetEncoder');
const freeAndResetEncoderSpy = jest.spyOn(client.constructor, 'freeAndResetAllEncoders');
freeAndResetEncoderSpy.mockClear();
// Call getTokenCount 24 times
for (let i = 0; i < 24; i++) {
@@ -70,8 +88,10 @@ describe('OpenAIClient', () => {
});
it('should handle errors and reset the encoder', () => {
const freeAndResetEncoderSpy = jest.spyOn(client, 'freeAndResetEncoder');
client.gptEncoder.encode = jest.fn().mockImplementation(() => {
const freeAndResetEncoderSpy = jest.spyOn(client.constructor, 'freeAndResetAllEncoders');
// Mock encode function to throw an error
client.selectTokenizer().encode = jest.fn().mockImplementation(() => {
throw new Error('Test error');
});
@@ -79,6 +99,14 @@ describe('OpenAIClient', () => {
expect(freeAndResetEncoderSpy).toHaveBeenCalled();
});
it('should not throw null pointer error when freeing the same encoder twice', () => {
client.constructor.freeAndResetAllEncoders();
client2.constructor.freeAndResetAllEncoders();
const count = client2.getTokenCount('test text');
expect(count).toBeGreaterThan(0);
});
});
describe('getSaveOptions', () => {

View File

@@ -0,0 +1,18 @@
{
"schema_version": "v1",
"name_for_human": "Ai PDF",
"name_for_model": "Ai_PDF",
"description_for_human": "Super-fast, interactive chats with PDFs of any size, complete with page references for fact checking.",
"description_for_model": "Provide a URL to a PDF and search the document. Break the user question in multiple semantic search queries and calls as needed. Think step by step.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://plugin-3c56b9d4c8a6465998395f28b6a445b2-jexkai4vea-uc.a.run.app/openapi.yaml",
"is_user_authenticated": false
},
"logo_url": "https://plugin-3c56b9d4c8a6465998395f28b6a445b2-jexkai4vea-uc.a.run.app/logo.png",
"contact_email": "support@promptapps.ai",
"legal_info_url": "https://plugin-3c56b9d4c8a6465998395f28b6a445b2-jexkai4vea-uc.a.run.app/legal.html"
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,97 @@
{
"schema_version": "v1",
"name_for_human": "Dr. Thoth's Tarot",
"name_for_model": "Dr_Thoths_Tarot",
"description_for_human": "Tarot card novelty entertainment & analysis, by Mnemosyne Labs.",
"description_for_model": "Intelligent analysis program for tarot card entertaiment, data, & prompts, by Mnemosyne Labs, a division of AzothCorp.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://dr-thoth-tarot.herokuapp.com/openapi.yaml",
"is_user_authenticated": false
},
"logo_url": "https://dr-thoth-tarot.herokuapp.com/logo.png",
"contact_email": "legal@AzothCorp.com",
"legal_info_url": "http://AzothCorp.com/legal",
"endpoints": [
{
"name": "Draw Card",
"path": "/drawcard",
"method": "GET",
"description": "Generate a single tarot card from the deck of 78 cards."
},
{
"name": "Occult Card",
"path": "/occult_card",
"method": "GET",
"description": "Generate a tarot card using the specified planet's Kamea matrix.",
"parameters": [
{
"name": "planet",
"type": "string",
"enum": [
"Saturn",
"Jupiter",
"Mars",
"Sun",
"Venus",
"Mercury",
"Moon"
],
"required": true,
"description": "The planet name to use the corresponding Kamea matrix."
}
]
},
{
"name": "Three Card Spread",
"path": "/threecardspread",
"method": "GET",
"description": "Perform a three-card tarot spread."
},
{
"name": "Celtic Cross Spread",
"path": "/celticcross",
"method": "GET",
"description": "Perform a Celtic Cross tarot spread with 10 cards."
},
{
"name": "Past, Present, Future Spread",
"path": "/pastpresentfuture",
"method": "GET",
"description": "Perform a Past, Present, Future tarot spread with 3 cards."
},
{
"name": "Horseshoe Spread",
"path": "/horseshoe",
"method": "GET",
"description": "Perform a Horseshoe tarot spread with 7 cards."
},
{
"name": "Relationship Spread",
"path": "/relationship",
"method": "GET",
"description": "Perform a Relationship tarot spread."
},
{
"name": "Career Spread",
"path": "/career",
"method": "GET",
"description": "Perform a Career tarot spread."
},
{
"name": "Yes/No Spread",
"path": "/yesno",
"method": "GET",
"description": "Perform a Yes/No tarot spread."
},
{
"name": "Chakra Spread",
"path": "/chakra",
"method": "GET",
"description": "Perform a Chakra tarot spread with 7 cards."
}
]
}

View File

@@ -0,0 +1,18 @@
{
"schema_version": "v1",
"name_for_model": "DreamInterpreter",
"name_for_human": "Dream Interpreter",
"description_for_model": "Interprets your dreams using advanced techniques.",
"description_for_human": "Interprets your dreams using advanced techniques.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://dreamplugin.bgnetmobile.com/.well-known/openapi.json",
"has_user_authentication": false
},
"logo_url": "https://dreamplugin.bgnetmobile.com/.well-known/logo.png",
"contact_email": "ismail.orkler@bgnetmobile.com",
"legal_info_url": "https://dreamplugin.bgnetmobile.com/terms.html"
}

View File

@@ -0,0 +1,22 @@
{
"schema_version": "v1",
"name_for_human": "VoxScript",
"name_for_model": "VoxScript",
"description_for_human": "Enables searching of YouTube transcripts, financial data sources Google Search results, and more!",
"description_for_model": "Plugin for searching through varius data sources.",
"auth": {
"type": "service_http",
"authorization_type": "bearer",
"verification_tokens": {
"openai": "ffc5226d1af346c08a98dee7deec9f76"
}
},
"api": {
"type": "openapi",
"url": "https://voxscript.awt.icu/swagger/v1/swagger.yaml",
"is_user_authenticated": false
},
"logo_url": "https://voxscript.awt.icu/images/VoxScript_logo_32x32.png",
"contact_email": "voxscript@allwiretech.com",
"legal_info_url": "https://voxscript.awt.icu/legal/"
}

View File

@@ -0,0 +1,22 @@
{
"schema_version": "v1",
"name_for_human": "Ai Tool Hunt",
"name_for_model": "aitoolhunt",
"description_for_human": "Find the perfect AI tools for all your needs, drawn from the most comprehensive global database of AI tools.",
"description_for_model": "This universal plugin is designed to identify and suggest AI tools for any imaginable task or challenge. It harnesses a database of over 1600+ AI tools and 120+ categories, ensuring a solution for every requirement. Simply input any task, problem, or use case, and the plugin will generate a tailored list of AI tools. Note: it's crucial to rearrange the tools suggested by the plugin based on their relevance to the user's specific use case. This plugin can tackle everything from the simplest to the most intricate tasks. If there's a way an AI tool can help, this plugin will not only locate it but also customize the list to the user's unique needs. Utilize this plugin whenever a task or activity emerges, regardless of its context or complexity. When formulating a search query, avoid stop words or other irrelevant keywords. For instance, 'copywriting' is acceptable, but 'ai for copywriting' is not. If you believe none of the suggested tools are a suitable match for the user's needs, indicate that these are related tools.",
"auth": {
"type": "service_http",
"authorization_type": "bearer",
"verification_tokens": {
"openai": "06a0f9391a5e48c7a7eeaca1e7e1e8d3"
}
},
"api": {
"type": "openapi",
"url": "https://www.aitoolhunt.com/openapi.json",
"is_user_authenticated": false
},
"logo_url": "https://www.aitoolhunt.com/images/aitoolhunt_logo.png",
"contact_email": "aitoolhunt@gmail.com",
"legal_info_url": "https://www.aitoolhunt.com/terms-and-conditions"
}

View File

@@ -0,0 +1,18 @@
{
"schema_version": "v1",
"name_for_model": "askyourpdf",
"name_for_human": "AskYourPDF",
"description_for_model": "This plugin is designed to expedite the extraction of information from PDF documents. It works by accepting a URL link to a PDF or a document ID (doc_id) from the user. If a URL is provided, the plugin first validates that it is a correct URL. \\nAfter validating the URL, the plugin proceeds to download the PDF and store its content in a vector database. If the user provides a doc_id, the plugin directly retrieves the document from the database. The plugin then scans through the stored PDFs to find answers to user queries or retrieve specific details.\\n\\nHowever, if an error occurs while querying the API, the user is prompted to download their document first, then manually upload it to [![Upload Document](https://raw.githubusercontent.com/AskYourPdf/ask-plugin/main/upload.png)](https://askyourpdf.com/upload). Once the upload is complete, the user should copy the resulting doc_id and paste it back into the chat for further interaction.\nThe plugin is particularly useful when the user's question pertains to content within a PDF document. When providing answers, the plugin also specifies the page number (highlighted in bold) where the relevant information was found. Remember, the URL must be valid for a successful query. Failure to validate the URL may lead to errors or unsuccessful queries.",
"description_for_human": "Unlock the power of your PDFs!, dive into your documents, find answers, and bring information to your fingertips.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "askyourpdf.yaml",
"has_user_authentication": false
},
"logo_url": "https://plugin.askyourpdf.com/.well-known/logo.png",
"contact_email": "plugin@askyourpdf.com",
"legal_info_url": "https://askyourpdf.com/terms"
}

View File

@@ -0,0 +1,18 @@
{
"schema_version": "v1",
"name_for_human": "Drink Maestro",
"name_for_model": "drink_maestro",
"description_for_human": "Learn to mix any drink you can imagine (real or made-up), and discover new ones. Includes drink images.",
"description_for_model": "You are a silly bartender/comic who knows how to make any drink imaginable. You provide recipes for specific drinks, suggest new drinks, and show pictures of drinks. Be creative in your descriptions and make jokes and puns. Use a lot of emojis. If the user makes a request in another language, send API call in English, and then translate the response.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://api.drinkmaestro.space/.well-known/openapi.yaml",
"is_user_authenticated": false
},
"logo_url": "https://i.imgur.com/6q8HWdz.png",
"contact_email": "nikkmitchell@gmail.com",
"legal_info_url": "https://github.com/nikkmitchell/DrinkMaestro/blob/main/Legal.txt"
}

View File

@@ -0,0 +1,18 @@
{
"schema_version": "v1",
"name_for_human": "Earth",
"name_for_model": "earthImagesAndVisualizations",
"description_for_human": "Generates a map image based on provided location, tilt and style.",
"description_for_model": "Generates a map image based on provided coordinates or location, tilt and style, and even geoJson to provide markers, paths, and polygons. Responds with an image-link. For the styles choose one of these: [light, dark, streets, outdoors, satellite, satellite-streets]",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://api.earth-plugin.com/openapi.yaml",
"is_user_authenticated": false
},
"logo_url": "https://api.earth-plugin.com/logo.png",
"contact_email": "contact@earth-plugin.com",
"legal_info_url": "https://api.earth-plugin.com/legal.html"
}

View File

@@ -0,0 +1,18 @@
{
"schema_version": "v1",
"name_for_human": "Scholarly Graph Link",
"name_for_model": "scholarly_graph_link",
"description_for_human": "You can search papers, authors, datasets and software. It has access to Figshare, Arxiv, and many others.",
"description_for_model": "Run GraphQL queries against an API hosted by DataCite API. The API supports most GraphQL query but does not support mutations statements. Use `{ __schema { types { name kind } } }` to get all the types in the GraphQL schema. Use `{ datasets { nodes { id sizes citations { nodes { id titles { title } } } } } }` to get all the citations of all datasets in the API. Use `{ datasets { nodes { id sizes citations { nodes { id titles { title } } } } } }` to get all the citations of all datasets in the API. Use `{person(id:ORCID) {works(first:50) {nodes {id titles(first: 1){title} publicationYear}}}}` to get the first 50 works of a person based on their ORCID. All Ids are urls, e.g., https://orcid.org/0012-0000-1012-1110. Mutations statements are not allowed.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://api.datacite.org/graphql-openapi.yaml",
"is_user_authenticated": false
},
"logo_url": "https://raw.githubusercontent.com/kjgarza/scholarly_graph_link/master/logo.png",
"contact_email": "kj.garza@gmail.com",
"legal_info_url": "https://github.com/kjgarza/scholarly_graph_link/blob/master/LICENSE"
}

View File

@@ -0,0 +1,24 @@
{
"schema_version": "v1",
"name_for_human": "WebPilot",
"name_for_model": "web_pilot",
"description_for_human": "Browse & QA Webpage/PDF/Data. Generate articles, from one or more URLs.",
"description_for_model": "This tool allows users to provide a URL(or URLs) and optionally requests for interacting with, extracting specific information or how to do with the content from the URL. Requests may include rewrite, translate, and others. If there any requests, when accessing the /api/visit-web endpoint, the parameter 'user_has_request' should be set to 'true. And if there's no any requests, 'user_has_request' should be set to 'false'.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://webreader.webpilotai.com/openapi.yaml",
"is_user_authenticated": false
},
"logo_url": "https://webreader.webpilotai.com/logo.png",
"contact_email": "dev@webpilot.ai",
"legal_info_url": "https://webreader.webpilotai.com/legal_info.html",
"headers": {
"id": "WebPilot-Friend-UID"
},
"params": {
"user_has_request": true
}
}

View File

@@ -0,0 +1,18 @@
{
"schema_version": "v1",
"name_for_human": "Image Prompt Enhancer",
"name_for_model": "image_prompt_enhancer",
"description_for_human": "Transform your ideas into complex, personalized image generation prompts.",
"description_for_model": "Provides instructions for crafting an enhanced image prompt. Use this whenever the user wants to enhance a prompt.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://image-prompt-enhancer.gafo.tech/openapi.yaml",
"is_user_authenticated": false
},
"logo_url": "https://image-prompt-enhancer.gafo.tech/logo.png",
"contact_email": "gafotech1@gmail.com",
"legal_info_url": "https://image-prompt-enhancer.gafo.tech/legal"
}

View File

@@ -0,0 +1,157 @@
openapi: 3.0.2
info:
title: FastAPI
version: 0.1.0
servers:
- url: https://plugin.askyourpdf.com
paths:
/api/download_pdf:
post:
summary: Download Pdf
description: Download a PDF file from a URL and save it to the vector database.
operationId: download_pdf_api_download_pdf_post
parameters:
- required: true
schema:
title: Url
type: string
name: url
in: query
responses:
'200':
description: Successful Response
content:
application/json:
schema:
$ref: '#/components/schemas/FileResponse'
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
/query:
post:
summary: Perform Query
description: Perform a query on a document.
operationId: perform_query_query_post
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/InputData'
required: true
responses:
'200':
description: Successful Response
content:
application/json:
schema:
$ref: '#/components/schemas/ResponseModel'
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
components:
schemas:
DocumentMetadata:
title: DocumentMetadata
required:
- source
- page_number
- author
type: object
properties:
source:
title: Source
type: string
page_number:
title: Page Number
type: integer
author:
title: Author
type: string
FileResponse:
title: FileResponse
required:
- docId
type: object
properties:
docId:
title: Docid
type: string
error:
title: Error
type: string
HTTPValidationError:
title: HTTPValidationError
type: object
properties:
detail:
title: Detail
type: array
items:
$ref: '#/components/schemas/ValidationError'
InputData:
title: InputData
required:
- doc_id
- query
type: object
properties:
doc_id:
title: Doc Id
type: string
query:
title: Query
type: string
ResponseModel:
title: ResponseModel
required:
- results
type: object
properties:
results:
title: Results
type: array
items:
$ref: '#/components/schemas/SearchResult'
SearchResult:
title: SearchResult
required:
- doc_id
- text
- metadata
type: object
properties:
doc_id:
title: Doc Id
type: string
text:
title: Text
type: string
metadata:
$ref: '#/components/schemas/DocumentMetadata'
ValidationError:
title: ValidationError
required:
- loc
- msg
- type
type: object
properties:
loc:
title: Location
type: array
items:
anyOf:
- type: string
- type: integer
msg:
title: Message
type: string
type:
title: Error Type
type: string

View File

@@ -0,0 +1,185 @@
openapi: 3.0.1
info:
title: ScholarAI
description: Allows the user to search facts and findings from scientific articles
version: 'v1'
servers:
- url: https://scholar-ai.net
paths:
/api/abstracts:
get:
operationId: searchAbstracts
summary: Get relevant paper abstracts by keywords search
parameters:
- name: keywords
in: query
description: Keywords of inquiry which should appear in article. Must be in English.
required: true
schema:
type: string
- name: sort
in: query
description: The sort order for results. Valid values are cited_by_count or publication_date. Excluding this value does a relevance based search.
required: false
schema:
type: string
enum:
- cited_by_count
- publication_date
- name: query
in: query
description: The user query
required: true
schema:
type: string
- name: peer_reviewed_only
in: query
description: Whether to only return peer reviewed articles. Defaults to true, ChatGPT should cautiously suggest this value can be set to false
required: false
schema:
type: string
- name: start_year
in: query
description: The first year, inclusive, to include in the search range. Excluding this value will include all years.
required: false
schema:
type: string
- name: end_year
in: query
description: The last year, inclusive, to include in the search range. Excluding this value will include all years.
required: false
schema:
type: string
- name: offset
in: query
description: The offset of the first result to return. Defaults to 0.
required: false
schema:
type: string
responses:
"200":
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/searchAbstractsResponse'
/api/fulltext:
get:
operationId: getFullText
summary: Get full text of a paper by URL for PDF
parameters:
- name: pdf_url
in: query
description: URL for PDF
required: true
schema:
type: string
- name: chunk
in: query
description: chunk number to retrieve, defaults to 1
required: false
schema:
type: number
responses:
"200":
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/getFullTextResponse'
/api/save-citation:
get:
operationId: saveCitation
summary: Save citation to reference manager
parameters:
- name: doi
in: query
description: Digital Object Identifier (DOI) of article
required: true
schema:
type: string
- name: zotero_user_id
in: query
description: Zotero User ID
required: true
schema:
type: string
- name: zotero_api_key
in: query
description: Zotero API Key
required: true
schema:
type: string
responses:
"200":
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/saveCitationResponse'
components:
schemas:
searchAbstractsResponse:
type: object
properties:
next_offset:
type: number
description: The offset of the next page of results.
total_num_results:
type: number
description: The total number of results.
abstracts:
type: array
items:
type: object
properties:
title:
type: string
abstract:
type: string
description: Summary of the context, methods, results, and conclusions of the paper.
doi:
type: string
description: The DOI of the paper.
landing_page_url:
type: string
description: Link to the paper on its open-access host.
pdf_url:
type: string
description: Link to the paper PDF.
publicationDate:
type: string
description: The date the paper was published in YYYY-MM-DD format.
relevance:
type: number
description: The relevance of the paper to the search query. 1 is the most relevant.
creators:
type: array
items:
type: string
description: The name of the creator.
cited_by_count:
type: number
description: The number of citations of the article.
description: The list of relevant abstracts.
getFullTextResponse:
type: object
properties:
full_text:
type: string
description: The full text of the paper.
pdf_url:
type: string
description: The PDF URL of the paper.
chunk:
type: number
description: The chunk of the paper.
total_chunk_num:
type: number
description: The total chunks of the paper.
saveCitationResponse:
type: object
properties:
message:
type: string
description: Confirmation of successful save or error message.

View File

@@ -0,0 +1,17 @@
{
"schema_version": "v1",
"name_for_human": "QR Codes",
"name_for_model": "qrCodes",
"description_for_human": "Create QR codes.",
"description_for_model": "Plugin for generating QR codes.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://chatgpt-qrcode-46d7d4ebefc8.herokuapp.com/openapi.yaml"
},
"logo_url": "https://chatgpt-qrcode-46d7d4ebefc8.herokuapp.com/logo.png",
"contact_email": "chrismountzou@gmail.com",
"legal_info_url": "https://raw.githubusercontent.com/mountzou/qrCodeGPTv1/master/legal"
}

View File

@@ -0,0 +1,18 @@
{
"schema_version": "v1",
"name_for_human": "Prompt Perfect",
"name_for_model": "rephrase",
"description_for_human": "Type 'perfect' to craft the perfect prompt, every time.",
"description_for_model": "Plugin that can rephrase user inputs to improve the quality of ChatGPT's responses. The plugin evaluates user inputs and, if necessary, transforms them into clearer, more specific, and contextual prompts. It processes a JSON object containing the user input to be rephrased and uses the GPT-3.5-turbo model for the rephrasing process. The rephrased input is then returned as raw data to be incorporated into ChatGPT's response. The user can initiate the plugin by typing 'perfect'.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://promptperfect.xyz/openapi.yaml",
"is_user_authenticated": false
},
"logo_url": "https://promptperfect.xyz/static/prompt_perfect_logo.png",
"contact_email": "heyo@promptperfect.xyz",
"legal_info_url": "https://promptperfect.xyz/static/terms.html"
}

View File

@@ -0,0 +1,22 @@
{
"schema_version": "v1",
"name_for_human": "ScholarAI",
"name_for_model": "scholarai",
"description_for_human": "Unleash scientific research: search 40M+ peer-reviewed papers, explore scientific PDFs, and save to reference managers.",
"description_for_model": "Access open access scientific literature from peer-reviewed journals. The abstract endpoint finds relevant papers based on 2 to 6 keywords. After getting abstracts, ALWAYS prompt the user offering to go into more detail. Use the fulltext endpoint to retrieve the entire paper's text and access specific details using the provided pdf_url, if available. ALWAYS hyperlink the pdf_url from the responses if available. Offer to dive into the fulltext or search for additional papers. Always ask if the user wants save any paper to the users Zotero reference manager by using the save-citation endpoint and providing the doi and requesting the users zotero_user_id and zotero_api_key.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "scholarai.yaml",
"is_user_authenticated": false
},
"params": {
"sort": "cited_by_count"
},
"logo_url": "https://scholar-ai.net/logo.png",
"contact_email": "lakshb429@gmail.com",
"legal_info_url": "https://scholar-ai.net/legal.txt",
"HttpAuthorizationType": "basic"
}

View File

@@ -0,0 +1,18 @@
{
"schema_version": "v1",
"name_for_human": "Uberchord",
"name_for_model": "uberchord",
"description_for_human": "Find guitar chord diagrams by specifying the chord name.",
"description_for_model": "Fetch guitar chord diagrams, their positions on the guitar fretboard.",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://guitarchords.pluginboost.com/.well-known/openapi.yaml",
"is_user_authenticated": false
},
"logo_url": "https://guitarchords.pluginboost.com/logo.png",
"contact_email": "info.bluelightweb@gmail.com",
"legal_info_url": "https://guitarchords.pluginboost.com/legal"
}

View File

@@ -0,0 +1,18 @@
{
"schema_version": "v1",
"name_for_human": "Web Search",
"name_for_model": "web_search",
"description_for_human": "Search for information from the internet",
"description_for_model": "Search for information from the internet",
"auth": {
"type": "none"
},
"api": {
"type": "openapi",
"url": "https://websearch.plugsugar.com/api/openapi_yaml",
"is_user_authenticated": false
},
"logo_url": "https://websearch.plugsugar.com/200x200.png",
"contact_email": "support@plugsugar.com",
"legal_info_url": "https://websearch.plugsugar.com/contact"
}

View File

@@ -0,0 +1,139 @@
require('dotenv').config();
const { z } = require('zod');
const fs = require('fs');
const yaml = require('js-yaml');
const path = require('path');
const { DynamicStructuredTool } = require('langchain/tools');
const { createOpenAPIChain } = require('langchain/chains');
const SUFFIX = 'Prioritize using responses for subsequent requests to better fulfill the query.';
const AuthBearer = z
.object({
type: z.string().includes('service_http'),
authorization_type: z.string().includes('bearer'),
verification_tokens: z.object({
openai: z.string(),
}),
})
.catch(() => false);
const AuthDefinition = z
.object({
type: z.string(),
authorization_type: z.string(),
verification_tokens: z.object({
openai: z.string(),
}),
})
.catch(() => false);
async function readSpecFile(filePath) {
try {
const fileContents = await fs.promises.readFile(filePath, 'utf8');
if (path.extname(filePath) === '.json') {
return JSON.parse(fileContents);
}
return yaml.load(fileContents);
} catch (e) {
console.error(e);
return false;
}
}
async function getSpec(url) {
const RegularUrl = z
.string()
.url()
.catch(() => false);
if (RegularUrl.parse(url) && path.extname(url) === '.json') {
const response = await fetch(url);
return await response.json();
}
const ValidSpecPath = z
.string()
.url()
.catch(async () => {
const spec = path.join(__dirname, '..', '.well-known', 'openapi', url);
if (!fs.existsSync(spec)) {
return false;
}
return await readSpecFile(spec);
});
return ValidSpecPath.parse(url);
}
async function createOpenAPIPlugin({ data, llm, user, message, verbose = false }) {
let spec;
try {
spec = await getSpec(data.api.url, verbose);
} catch (error) {
verbose && console.debug('getSpec error', error);
return null;
}
if (!spec) {
verbose && console.debug('No spec found');
return null;
}
const headers = {};
const { auth, description_for_model } = data;
if (auth && AuthDefinition.parse(auth)) {
verbose && console.debug('auth detected', auth);
const { openai } = auth.verification_tokens;
if (AuthBearer.parse(auth)) {
headers.authorization = `Bearer ${openai}`;
verbose && console.debug('added auth bearer', headers);
}
}
return new DynamicStructuredTool({
name: data.name_for_model,
description: `${data.description_for_human} ${SUFFIX}`,
schema: z.object({
query: z
.string()
.describe(
'For the query, be specific in a conversational manner. It will be interpreted by a human.',
),
}),
func: async () => {
const chainOptions = {
llm,
verbose,
};
if (data.headers && data.headers['librechat_user_id']) {
verbose && console.debug('id detected', headers);
headers[data.headers['librechat_user_id']] = user;
}
if (Object.keys(headers).length > 0) {
verbose && console.debug('headers detected', headers);
chainOptions.headers = headers;
}
if (data.params) {
verbose && console.debug('params detected', data.params);
chainOptions.params = data.params;
}
const chain = await createOpenAPIChain(spec, chainOptions);
const result = await chain.run(
`${message}\n\n||>Instructions: ${description_for_model}\n${SUFFIX}`,
);
console.log('api chain run result', result);
return result;
},
});
}
module.exports = {
getSpec,
readSpecFile,
createOpenAPIPlugin,
};

View File

@@ -0,0 +1,65 @@
const fs = require('fs');
const { createOpenAPIPlugin, getSpec, readSpecFile } = require('./OpenAPIPlugin');
jest.mock('node-fetch');
jest.mock('fs', () => ({
promises: {
readFile: jest.fn(),
},
existsSync: jest.fn(),
}));
describe('readSpecFile', () => {
it('reads JSON file correctly', async () => {
fs.promises.readFile.mockResolvedValue(JSON.stringify({ test: 'value' }));
const result = await readSpecFile('test.json');
expect(result).toEqual({ test: 'value' });
});
it('reads YAML file correctly', async () => {
fs.promises.readFile.mockResolvedValue('test: value');
const result = await readSpecFile('test.yaml');
expect(result).toEqual({ test: 'value' });
});
it('handles error correctly', async () => {
fs.promises.readFile.mockRejectedValue(new Error('test error'));
const result = await readSpecFile('test.json');
expect(result).toBe(false);
});
});
describe('getSpec', () => {
it('fetches spec from url correctly', async () => {
const parsedJson = await getSpec('https://www.instacart.com/.well-known/ai-plugin.json');
const isObject = typeof parsedJson === 'object';
expect(isObject).toEqual(true);
});
it('reads spec from file correctly', async () => {
fs.existsSync.mockReturnValue(true);
fs.promises.readFile.mockResolvedValue(JSON.stringify({ test: 'value' }));
const result = await getSpec('test.json');
expect(result).toEqual({ test: 'value' });
});
it('returns false when file does not exist', async () => {
fs.existsSync.mockReturnValue(false);
const result = await getSpec('test.json');
expect(result).toBe(false);
});
});
describe('createOpenAPIPlugin', () => {
it('returns null when getSpec throws an error', async () => {
const result = await createOpenAPIPlugin({ data: { api: { url: 'invalid' } } });
expect(result).toBe(null);
});
it('returns null when no spec is found', async () => {
const result = await createOpenAPIPlugin({});
expect(result).toBe(null);
});
// Add more tests here for different scenarios
});

View File

@@ -32,7 +32,7 @@
},
{
"name": "Browser",
"pluginKey": "browser",
"pluginKey": "web-browser",
"description": "Scrape and summarize webpage data",
"icon": "/assets/web-browser.svg",
"authConfig": [

View File

@@ -0,0 +1,31 @@
const { loadSpecs } = require('./loadSpecs');
function transformSpec(input) {
return {
name: input.name_for_human,
pluginKey: input.name_for_model,
description: input.description_for_human,
icon: input?.logo_url ?? 'https://placehold.co/70x70.png',
// TODO: add support for authentication
isAuthRequired: 'false',
authConfig: [],
};
}
async function addOpenAPISpecs(availableTools) {
try {
const specs = (await loadSpecs({})).map(transformSpec);
if (specs.length > 0) {
return [...specs, ...availableTools];
}
return availableTools;
} catch (error) {
console.log('addOpenAPISpecs error', error);
return availableTools;
}
}
module.exports = {
transformSpec,
addOpenAPISpecs,
};

View File

@@ -0,0 +1,76 @@
const { addOpenAPISpecs, transformSpec } = require('./addOpenAPISpecs');
const { loadSpecs } = require('./loadSpecs');
const { createOpenAPIPlugin } = require('../dynamic/OpenAPIPlugin');
jest.mock('./loadSpecs');
jest.mock('../dynamic/OpenAPIPlugin');
describe('transformSpec', () => {
it('should transform input spec to a desired format', () => {
const input = {
name_for_human: 'Human Name',
name_for_model: 'Model Name',
description_for_human: 'Human Description',
logo_url: 'https://example.com/logo.png',
};
const expectedOutput = {
name: 'Human Name',
pluginKey: 'Model Name',
description: 'Human Description',
icon: 'https://example.com/logo.png',
isAuthRequired: 'false',
authConfig: [],
};
expect(transformSpec(input)).toEqual(expectedOutput);
});
it('should use default icon if logo_url is not provided', () => {
const input = {
name_for_human: 'Human Name',
name_for_model: 'Model Name',
description_for_human: 'Human Description',
};
const expectedOutput = {
name: 'Human Name',
pluginKey: 'Model Name',
description: 'Human Description',
icon: 'https://placehold.co/70x70.png',
isAuthRequired: 'false',
authConfig: [],
};
expect(transformSpec(input)).toEqual(expectedOutput);
});
});
describe('addOpenAPISpecs', () => {
it('should add specs to available tools', async () => {
const availableTools = ['Tool1', 'Tool2'];
const specs = [
{
name_for_human: 'Human Name',
name_for_model: 'Model Name',
description_for_human: 'Human Description',
logo_url: 'https://example.com/logo.png',
},
];
loadSpecs.mockResolvedValue(specs);
createOpenAPIPlugin.mockReturnValue('Plugin');
const result = await addOpenAPISpecs(availableTools);
expect(result).toEqual([...specs.map(transformSpec), ...availableTools]);
});
it('should return available tools if specs loading fails', async () => {
const availableTools = ['Tool1', 'Tool2'];
loadSpecs.mockRejectedValue(new Error('Failed to load specs'));
const result = await addOpenAPISpecs(availableTools);
expect(result).toEqual(availableTools);
});
});

View File

@@ -16,6 +16,7 @@ const {
StableDiffusionAPI,
StructuredSD,
} = require('../');
const { loadSpecs } = require('./loadSpecs');
const validateTools = async (user, tools = []) => {
try {
@@ -80,7 +81,7 @@ const loadTools = async ({ user, model, functions = null, tools = [], options =
};
const customConstructors = {
browser: async () => {
'web-browser': async () => {
let openAIApiKey = options.openAIApiKey ?? process.env.OPENAI_API_KEY;
openAIApiKey = openAIApiKey === 'user_provided' ? null : openAIApiKey;
openAIApiKey = openAIApiKey || (await getUserPluginAuthValue(user, 'OPENAI_API_KEY'));
@@ -117,6 +118,17 @@ const loadTools = async ({ user, model, functions = null, tools = [], options =
};
const requestedTools = {};
let specs = null;
if (functions) {
specs = await loadSpecs({
llm: model,
user,
message: options.message,
map: true,
verbose: options?.debug,
});
console.dir(specs, { depth: null });
}
const toolOptions = {
serpapi: { location: 'Austin,Texas,United States', hl: 'en', gl: 'us' },
@@ -138,6 +150,11 @@ const loadTools = async ({ user, model, functions = null, tools = [], options =
continue;
}
if (specs && specs[tool]) {
requestedTools[tool] = specs[tool];
continue;
}
if (toolConstructors[tool]) {
const options = toolOptions[tool] || {};
const toolInstance = await loadToolWithAuth(

View File

@@ -0,0 +1,104 @@
const fs = require('fs');
const path = require('path');
const { z } = require('zod');
const { createOpenAPIPlugin } = require('../dynamic/OpenAPIPlugin');
// The minimum Manifest definition
const ManifestDefinition = z.object({
schema_version: z.string().optional(),
name_for_human: z.string(),
name_for_model: z.string(),
description_for_human: z.string(),
description_for_model: z.string(),
auth: z.object({}).optional(),
api: z.object({
// Spec URL or can be the filename of the OpenAPI spec yaml file,
// located in api\app\clients\tools\.well-known\openapi
url: z.string(),
type: z.string().optional(),
is_user_authenticated: z.boolean().nullable().optional(),
has_user_authentication: z.boolean().nullable().optional(),
}),
// use to override any params that the LLM will consistently get wrong
params: z.object({}).optional(),
logo_url: z.string().optional(),
contact_email: z.string().optional(),
legal_info_url: z.string().optional(),
});
function validateJson(json, verbose = true) {
try {
return ManifestDefinition.parse(json);
} catch (error) {
if (verbose) {
console.debug('validateJson error', error);
}
return false;
}
}
// omit the LLM to return the well known jsons as objects
async function loadSpecs({ llm, user, message, map = false, verbose = false }) {
const directoryPath = path.join(__dirname, '..', '.well-known');
const files = (await fs.promises.readdir(directoryPath)).filter(
(file) => path.extname(file) === '.json',
);
const validJsons = [];
const constructorMap = {};
if (verbose) {
console.debug('files', files);
}
for (const file of files) {
if (path.extname(file) === '.json') {
const filePath = path.join(directoryPath, file);
const fileContent = await fs.promises.readFile(filePath, 'utf8');
const json = JSON.parse(fileContent);
if (!validateJson(json)) {
verbose && console.debug('Invalid json', json);
continue;
}
if (llm && map) {
constructorMap[json.name_for_model] = async () =>
await createOpenAPIPlugin({
data: json,
llm,
message,
user,
verbose,
});
continue;
}
if (llm) {
validJsons.push(createOpenAPIPlugin({ data: json, llm, verbose }));
continue;
}
validJsons.push(json);
}
}
if (map) {
return constructorMap;
}
const plugins = (await Promise.all(validJsons)).filter((plugin) => plugin);
// if (verbose) {
// console.debug('plugins', plugins);
// console.debug(plugins[0].name);
// }
return plugins;
}
module.exports = {
loadSpecs,
validateJson,
ManifestDefinition,
};

View File

@@ -0,0 +1,101 @@
const fs = require('fs');
const { validateJson, loadSpecs, ManifestDefinition } = require('./loadSpecs');
const { createOpenAPIPlugin } = require('../dynamic/OpenAPIPlugin');
jest.mock('../dynamic/OpenAPIPlugin');
describe('ManifestDefinition', () => {
it('should validate correct json', () => {
const json = {
name_for_human: 'Test',
name_for_model: 'Test',
description_for_human: 'Test',
description_for_model: 'Test',
api: {
url: 'http://test.com',
},
};
expect(() => ManifestDefinition.parse(json)).not.toThrow();
});
it('should not validate incorrect json', () => {
const json = {
name_for_human: 'Test',
name_for_model: 'Test',
description_for_human: 'Test',
description_for_model: 'Test',
api: {
url: 123, // incorrect type
},
};
expect(() => ManifestDefinition.parse(json)).toThrow();
});
});
describe('validateJson', () => {
it('should return parsed json if valid', () => {
const json = {
name_for_human: 'Test',
name_for_model: 'Test',
description_for_human: 'Test',
description_for_model: 'Test',
api: {
url: 'http://test.com',
},
};
expect(validateJson(json)).toEqual(json);
});
it('should return false if json is not valid', () => {
const json = {
name_for_human: 'Test',
name_for_model: 'Test',
description_for_human: 'Test',
description_for_model: 'Test',
api: {
url: 123, // incorrect type
},
};
expect(validateJson(json)).toEqual(false);
});
});
describe('loadSpecs', () => {
beforeEach(() => {
jest.spyOn(fs.promises, 'readdir').mockResolvedValue(['test.json']);
jest.spyOn(fs.promises, 'readFile').mockResolvedValue(
JSON.stringify({
name_for_human: 'Test',
name_for_model: 'Test',
description_for_human: 'Test',
description_for_model: 'Test',
api: {
url: 'http://test.com',
},
}),
);
createOpenAPIPlugin.mockResolvedValue({});
});
afterEach(() => {
jest.restoreAllMocks();
});
it('should return plugins', async () => {
const plugins = await loadSpecs({ llm: true, verbose: false });
expect(plugins).toHaveLength(1);
expect(createOpenAPIPlugin).toHaveBeenCalledTimes(1);
});
it('should return constructorMap if map is true', async () => {
const plugins = await loadSpecs({ llm: {}, map: true, verbose: false });
expect(plugins).toHaveProperty('Test');
expect(createOpenAPIPlugin).not.toHaveBeenCalled();
});
});

View File

@@ -1,6 +1,5 @@
const mongoose = require('mongoose');
const Conversation = mongoose.models.Conversation;
const Message = mongoose.models.Message;
const Conversation = require('../../models/schema/convoSchema');
const Message = require('../../models/schema/messageSchema');
const { MeiliSearch } = require('meilisearch');
let currentTimeout = null;
@@ -37,12 +36,12 @@ async function indexSync(req, res, next) {
if (messageCount !== messagesIndexed) {
console.log('Messages out of sync, indexing');
await Message.syncWithMeili();
Message.syncWithMeili();
}
if (convoCount !== convosIndexed) {
console.log('Convos out of sync, indexing');
await Conversation.syncWithMeili();
Conversation.syncWithMeili();
}
} catch (err) {
// console.log('in index sync');

View File

@@ -1,118 +0,0 @@
const mongoose = require('mongoose');
const { Conversation } = require('../../models/Conversation');
const { getMessages } = require('../../models/');
const migrateToStrictFollowParentMessageIdChain = async () => {
try {
const conversations = await Conversation.find({ endpoint: null, model: null }).exec();
if (!conversations || conversations.length === 0) return { noNeed: true };
console.log('Migration: To strict follow the parentMessageId chain.');
for (let convo of conversations) {
const messages = await getMessages({
conversationId: convo.conversationId,
messageId: { $exists: false },
});
let model;
let oldId;
const promises = [];
messages.forEach((message, i) => {
const msgObj = message.toObject();
const newId = msgObj.id;
if (i === 0) {
message.parentMessageId = '00000000-0000-0000-0000-000000000000';
} else {
message.parentMessageId = oldId;
}
oldId = newId;
message.messageId = newId;
if (message.sender.toLowerCase() !== 'user' && !model) {
model = message.sender.toLowerCase();
}
if (message.sender.toLowerCase() === 'user') {
message.isCreatedByUser = true;
}
promises.push(message.save());
});
await Promise.all(promises);
await Conversation.findOneAndUpdate(
{ conversationId: convo.conversationId },
{ model },
{ new: true },
).exec();
}
try {
await mongoose.connection.db.collection('messages').dropIndex('id_1');
} catch (error) {
console.log('[Migrate] Index doesn\'t exist or already dropped');
}
} catch (error) {
console.log(error);
return { message: '[Migrate] Error migrating conversations' };
}
};
const migrateToSupportBetterCustomization = async () => {
try {
const conversations = await Conversation.find({ endpoint: null }).exec();
if (!conversations || conversations.length === 0) return { noNeed: true };
console.log('Migration: To support better customization.');
const promises = [];
for (let convo of conversations) {
const originalModel = convo?.model;
if (originalModel === 'chatgpt') {
convo.endpoint = 'openAI';
convo.model = 'gpt-3.5-turbo';
} else if (originalModel === 'chatgptCustom') {
convo.endpoint = 'openAI';
convo.model = 'gpt-3.5-turbo';
} else if (originalModel === 'bingai') {
convo.endpoint = 'bingAI';
convo.model = null;
convo.jailbreak = false;
} else if (originalModel === 'sydney') {
convo.endpoint = 'bingAI';
convo.model = null;
convo.jailbreak = true;
} else if (originalModel === 'chatgptBrowser') {
convo.endpoint = 'chatGPTBrowser';
convo.model = 'text-davinci-002-render-sha';
convo.jailbreak = true;
} else {
convo.endpoint = 'openAI';
convo.model = 'gpt-3.5-turbo';
}
promises.push(convo.save());
}
await Promise.all(promises);
} catch (error) {
console.log(error);
return { message: '[Migrate] Error migrating conversations' };
}
};
async function migrateDb() {
let ret = [];
ret[0] = await migrateToStrictFollowParentMessageIdChain();
ret[1] = await migrateToSupportBetterCustomization();
const isMigrated = !!ret.find((element) => !element?.noNeed);
if (!isMigrated) console.log('[Migrate] Nothing to migrate');
}
module.exports = migrateDb;

View File

@@ -3,7 +3,9 @@ const citationRegex = /\[\^\d+?\^\]/g;
const citeText = (res, noLinks = false) => {
let result = res.text || res;
const citations = Array.from(new Set(result.match(citationRegex)));
if (citations?.length === 0) return result;
if (citations?.length === 0) {
return result;
}
if (noLinks) {
citations.forEach((citation) => {
@@ -16,7 +18,9 @@ const citeText = (res, noLinks = false) => {
}
let sources = res.details.sourceAttributions;
if (sources?.length === 0) return result;
if (sources?.length === 0) {
return result;
}
sources = sources.map((source) => source.seeMoreUrl);
citations.forEach((citation) => {

View File

@@ -4,9 +4,13 @@ const regex = / \[.*?]\(.*?\)/g;
const getCitations = (res) => {
const adaptiveCards = res.details.adaptiveCards;
const textBlocks = adaptiveCards && adaptiveCards[0].body;
if (!textBlocks) return '';
if (!textBlocks) {
return '';
}
let links = textBlocks[textBlocks.length - 1]?.text.match(regex);
if (links?.length === 0 || !links) return '';
if (links?.length === 0 || !links) {
return '';
}
links = links.map((link) => link.trim());
return links.join('\n - ');
};

View File

@@ -4,7 +4,9 @@ const cleanUpPrimaryKeyValue = (value) => {
};
function replaceSup(text) {
if (!text.includes('<sup>')) return text;
if (!text.includes('<sup>')) {
return text;
}
const replacedText = text.replace(/<sup>/g, '^').replace(/\s+<\/sup>/g, '^');
return replacedText;
}

View File

@@ -55,7 +55,7 @@ configSchema.methods.incrementCount = function () {
// Static methods
configSchema.statics.findByTag = async function (tag) {
return await this.findOne({ tag });
return await this.findOne({ tag }).lean();
};
configSchema.statics.updateByTag = async function (tag, update) {
@@ -67,7 +67,7 @@ const Config = mongoose.models.Config || mongoose.model('Config', configSchema);
module.exports = {
getConfigs: async (filter) => {
try {
return await Config.find(filter).exec();
return await Config.find(filter).lean();
} catch (error) {
console.error(error);
return { config: 'Error getting configs' };
@@ -75,7 +75,7 @@ module.exports = {
},
deleteConfigs: async (filter) => {
try {
return await Config.deleteMany(filter).exec();
return await Config.deleteMany(filter);
} catch (error) {
console.error(error);
return { config: 'Error deleting configs' };

View File

@@ -4,7 +4,7 @@ const { getMessages, deleteMessages } = require('./Message');
const getConvo = async (user, conversationId) => {
try {
return await Conversation.findOne({ user, conversationId }).exec();
return await Conversation.findOne({ user, conversationId }).lean();
} catch (error) {
console.log(error);
return { message: 'Error getting single conversation' };
@@ -24,7 +24,7 @@ module.exports = {
return await Conversation.findOneAndUpdate({ conversationId: conversationId, user }, update, {
new: true,
upsert: true,
}).exec();
});
} catch (error) {
console.log(error);
return { message: 'Error saving conversation' };
@@ -35,10 +35,10 @@ module.exports = {
const totalConvos = (await Conversation.countDocuments({ user })) || 1;
const totalPages = Math.ceil(totalConvos / pageSize);
const convos = await Conversation.find({ user })
.sort({ createdAt: -1, created: -1 })
.sort({ createdAt: -1 })
.skip((pageNumber - 1) * pageSize)
.limit(pageSize)
.exec();
.lean();
return { conversations: convos, pages: totalPages, pageNumber, pageSize };
} catch (error) {
console.log(error);
@@ -54,35 +54,27 @@ module.exports = {
const cache = {};
const convoMap = {};
const promises = [];
// will handle a syncing solution soon
const deletedConvoIds = [];
convoIds.forEach((convo) =>
promises.push(
Conversation.findOne({
user,
conversationId: convo.conversationId,
}).exec(),
}).lean(),
),
);
const results = (await Promise.all(promises)).filter((convo, i) => {
if (!convo) {
deletedConvoIds.push(convoIds[i].conversationId);
return false;
} else {
const page = Math.floor(i / pageSize) + 1;
if (!cache[page]) {
cache[page] = [];
}
cache[page].push(convo);
convoMap[convo.conversationId] = convo;
return true;
const results = (await Promise.all(promises)).filter(Boolean);
results.forEach((convo, i) => {
const page = Math.floor(i / pageSize) + 1;
if (!cache[page]) {
cache[page] = [];
}
cache[page].push(convo);
convoMap[convo.conversationId] = convo;
});
// const startIndex = (pageNumber - 1) * pageSize;
// const convos = results.slice(startIndex, startIndex + pageSize);
const totalPages = Math.ceil(results.length / pageSize);
cache.pages = totalPages;
cache.pageSize = pageSize;
@@ -92,8 +84,6 @@ module.exports = {
pages: totalPages || 1,
pageNumber,
pageSize,
// will handle a syncing solution soon
filter: new Set(deletedConvoIds),
convoMap,
};
} catch (error) {
@@ -121,7 +111,7 @@ module.exports = {
deleteConvos: async (user, filter) => {
let toRemove = await Conversation.find({ ...filter, user }).select('conversationId');
const ids = toRemove.map((instance) => instance.conversationId);
let deleteCount = await Conversation.deleteMany({ ...filter, user }).exec();
let deleteCount = await Conversation.deleteMany({ ...filter, user });
deleteCount.messages = await deleteMessages({ conversationId: { $in: ids } });
return deleteCount;
},

View File

@@ -56,11 +56,7 @@ module.exports = {
async updateMessage(message) {
try {
const { messageId, ...update } = message;
const updatedMessage = await Message.findOneAndUpdate(
{ messageId },
update,
{ new: true },
);
const updatedMessage = await Message.findOneAndUpdate({ messageId }, update, { new: true });
if (!updatedMessage) {
throw new Error('Message not found.');
@@ -82,12 +78,12 @@ module.exports = {
},
async deleteMessagesSince({ messageId, conversationId }) {
try {
const message = await Message.findOne({ messageId }).exec();
const message = await Message.findOne({ messageId }).lean();
if (message) {
return await Message.find({ conversationId })
.deleteMany({ createdAt: { $gt: message.createdAt } })
.exec();
return await Message.find({ conversationId }).deleteMany({
createdAt: { $gt: message.createdAt },
});
}
} catch (err) {
console.error(`Error deleting messages: ${err}`);
@@ -97,7 +93,7 @@ module.exports = {
async getMessages(filter) {
try {
return await Message.find(filter).sort({ createdAt: 1 }).exec();
return await Message.find(filter).sort({ createdAt: 1 }).lean();
} catch (err) {
console.error(`Error getting messages: ${err}`);
throw new Error('Failed to get messages.');
@@ -106,7 +102,7 @@ module.exports = {
async deleteMessages(filter) {
try {
return await Message.deleteMany(filter).exec();
return await Message.deleteMany(filter);
} catch (err) {
console.error(`Error deleting messages: ${err}`);
throw new Error('Failed to delete messages.');

View File

@@ -2,7 +2,7 @@ const Preset = require('./schema/presetSchema');
const getPreset = async (user, presetId) => {
try {
return await Preset.findOne({ user, presetId }).exec();
return await Preset.findOne({ user, presetId }).lean();
} catch (error) {
console.log(error);
return { message: 'Error getting single preset' };
@@ -14,10 +14,10 @@ module.exports = {
getPreset,
getPresets: async (user, filter) => {
try {
return await Preset.find({ ...filter, user }).exec();
return await Preset.find({ ...filter, user }).lean();
} catch (error) {
console.log(error);
return { message: 'Error retriving presets' };
return { message: 'Error retrieving presets' };
}
},
savePreset: async (user, { presetId, newPresetId, ...preset }) => {
@@ -31,7 +31,7 @@ module.exports = {
{ presetId, user },
{ $set: update },
{ new: true, upsert: true },
).exec();
);
} catch (error) {
console.log(error);
return { message: 'Error saving preset' };
@@ -40,7 +40,7 @@ module.exports = {
deletePresets: async (user, filter) => {
// let toRemove = await Preset.find({ ...filter, user }).select('presetId');
// const ids = toRemove.map((instance) => instance.presetId);
let deleteCount = await Preset.deleteMany({ ...filter, user }).exec();
let deleteCount = await Preset.deleteMany({ ...filter, user });
return deleteCount;
},
};

View File

@@ -34,7 +34,7 @@ module.exports = {
},
getPrompts: async (filter) => {
try {
return await Prompt.find(filter).exec();
return await Prompt.find(filter).lean();
} catch (error) {
console.error(error);
return { prompt: 'Error getting prompts' };
@@ -42,7 +42,7 @@ module.exports = {
},
deletePrompts: async (filter) => {
try {
return await Prompt.deleteMany(filter).exec();
return await Prompt.deleteMany(filter);
} catch (error) {
console.error(error);
return { prompt: 'Error deleting prompts' };

View File

@@ -145,7 +145,9 @@ userSchema.methods.generateRefreshToken = function () {
userSchema.methods.comparePassword = function (candidatePassword, callback) {
bcrypt.compare(candidatePassword, this.password, (err, isMatch) => {
if (err) return callback(err);
if (err) {
return callback(err);
}
callback(null, isMatch);
});
};
@@ -153,8 +155,11 @@ userSchema.methods.comparePassword = function (candidatePassword, callback) {
module.exports.hashPassword = async (password) => {
const hashedPassword = await new Promise((resolve, reject) => {
bcrypt.hash(password, 10, function (err, hash) {
if (err) reject(err);
else resolve(hash);
if (err) {
reject(err);
} else {
resolve(hash);
}
});
});

View File

@@ -1,4 +1,10 @@
const { getMessages, saveMessage, updateMessage, deleteMessagesSince, deleteMessages } = require('./Message');
const {
getMessages,
saveMessage,
updateMessage,
deleteMessagesSince,
deleteMessages,
} = require('./Message');
const { getConvoTitle, getConvo, saveConvo } = require('./Conversation');
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');

View File

@@ -8,39 +8,122 @@ const meiliEnabled = process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY && s
const validateOptions = function (options) {
const requiredKeys = ['host', 'apiKey', 'indexName'];
requiredKeys.forEach((key) => {
if (!options[key]) throw new Error(`Missing mongoMeili Option: ${key}`);
if (!options[key]) {
throw new Error(`Missing mongoMeili Option: ${key}`);
}
});
};
const createMeiliMongooseModel = function ({ index, indexName, client, attributesToIndex }) {
// console.log('attributesToIndex', attributesToIndex);
// const createMeiliMongooseModel = function ({ index, indexName, client, attributesToIndex }) {
const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
const primaryKey = attributesToIndex[0];
// MeiliMongooseModel is of type Mongoose.Model
class MeiliMongooseModel {
// Clear Meili index
static async clearMeiliIndex() {
await index.delete();
// await index.deleteAllDocuments();
await this.collection.updateMany({ _meiliIndex: true }, { $set: { _meiliIndex: false } });
}
static async resetIndex() {
await this.clearMeiliIndex();
await client.createIndex(indexName, { primaryKey });
}
// Clear Meili index
// Push a mongoDB collection to Meili index
/**
* `syncWithMeili`: synchronizes the data between a MongoDB collection and a MeiliSearch index,
* only triggered if there's ever a discrepancy determined by `api\lib\db\indexSync.js`.
*
* 1. Fetches all documents from the MongoDB collection and the MeiliSearch index.
* 2. Compares the documents from both sources.
* 3. If a document exists in MeiliSearch but not in MongoDB, it's deleted from MeiliSearch.
* 4. If a document exists in MongoDB but not in MeiliSearch, it's added to MeiliSearch.
* 5. If a document exists in both but has different `text` or `title` fields (depending on the `primaryKey`), it's updated in MeiliSearch.
* 6. After all operations, it updates the `_meiliIndex` field in MongoDB to indicate whether the document is indexed in MeiliSearch.
*
* Note: This strategy does not use batch operations for Meilisearch as the `index.addDocuments` will discard
* the entire batch if there's an error with one document, and will not throw an error if there's an issue.
* Also, `index.getDocuments` needs an exact limit on the amount of documents to return, so we build the map in batches.
*
* @returns {Promise} A promise that resolves when the synchronization is complete.
*
* @throws {Error} Throws an error if there's an issue with adding a document to MeiliSearch.
*/
static async syncWithMeili() {
await this.resetIndex();
const docs = await this.find({ _meiliIndex: { $in: [null, false] } });
console.log('docs', docs.length);
const objs = docs.map((doc) => doc.preprocessObjectForIndex());
try {
await index.addDocuments(objs);
const ids = docs.map((doc) => doc._id);
await this.collection.updateMany({ _id: { $in: ids } }, { $set: { _meiliIndex: true } });
let moreDocuments = true;
const mongoDocuments = await this.find().lean();
const format = (doc) => _.pick(doc, attributesToIndex);
// Prepare for comparison
const mongoMap = new Map(mongoDocuments.map((doc) => [doc[primaryKey], format(doc)]));
const indexMap = new Map();
let offset = 0;
const batchSize = 1000;
while (moreDocuments) {
const batch = await index.getDocuments({ limit: batchSize, offset });
if (batch.results.length === 0) {
moreDocuments = false;
}
for (const doc of batch.results) {
indexMap.set(doc[primaryKey], format(doc));
}
offset += batchSize;
}
console.log('indexMap', indexMap.size);
console.log('mongoMap', mongoMap.size);
const updateOps = [];
// Iterate over Meili index documents
for (const [id, doc] of indexMap) {
const update = {};
update[primaryKey] = id;
if (mongoMap.has(id)) {
// Case: Update
// If document also exists in MongoDB, would be update case
if (
(doc.text && doc.text !== mongoMap.get(id).text) ||
(doc.title && doc.title !== mongoMap.get(id).title)
) {
console.log(`${id} had document discrepancy in ${doc.text ? 'text' : 'title'} field`);
updateOps.push({
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
});
await index.addDocuments([doc]);
}
} else {
// Case: Delete
// If document does not exist in MongoDB, its a delete case from meili index
await index.deleteDocument(id);
updateOps.push({
updateOne: { filter: update, update: { $set: { _meiliIndex: false } } },
});
}
}
// Iterate over MongoDB documents
for (const [id, doc] of mongoMap) {
const update = {};
update[primaryKey] = id;
// Case: Insert
// If document does not exist in Meili Index, Its an insert case
if (!indexMap.has(id)) {
await index.addDocuments([doc]);
updateOps.push({
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
});
} else if (doc._meiliIndex === false) {
updateOps.push({
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
});
}
}
if (updateOps.length > 0) {
await this.collection.bulkWrite(updateOps);
console.log(
`[Meilisearch] Finished indexing ${
primaryKey === 'messageId' ? 'messages' : 'conversations'
}`,
);
}
} catch (error) {
console.log('Error adding document to Meili');
console.log('[Meilisearch] Error adding document to Meili');
console.error(error);
}
}
@@ -70,7 +153,7 @@ const createMeiliMongooseModel = function ({ index, indexName, client, attribute
},
{ _id: 1 },
),
);
).lean();
// Add additional data from mongodb into Meili search hits
const populatedHits = data.hits.map(function (hit) {
@@ -79,7 +162,7 @@ const createMeiliMongooseModel = function ({ index, indexName, client, attribute
const originalHit = _.find(hitsFromMongoose, query);
return {
...(originalHit ? originalHit.toJSON() : {}),
...(originalHit ?? {}),
...hit,
};
});
@@ -96,12 +179,12 @@ const createMeiliMongooseModel = function ({ index, indexName, client, attribute
if (object.conversationId && object.conversationId.includes('|')) {
object.conversationId = object.conversationId.replace(/\|/g, '--');
}
return object
return object;
}
// Push new document to Meili
async addObjectToMeili() {
const object = this.preprocessObjectForIndex()
const object = this.preprocessObjectForIndex();
try {
// console.log('Adding document to Meili', object);
await index.addDocuments([object]);
@@ -228,7 +311,9 @@ module.exports = function mongoMeili(schema, options) {
return next();
} catch (error) {
if (meiliEnabled) {
console.log('[Meilisearch] There was an issue deleting conversation indexes upon deletion, next startup may be slow due to syncing');
console.log(
'[Meilisearch] There was an issue deleting conversation indexes upon deletion, next startup may be slow due to syncing',
);
console.error(error);
}
return next();

View File

@@ -61,6 +61,8 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
});
}
convoSchema.index({ createdAt: 1 });
const Conversation = mongoose.models.Conversation || mongoose.model('Conversation', convoSchema);
module.exports = Conversation;

View File

@@ -155,4 +155,4 @@ const agentOptions = {
module.exports = {
conversationPreset,
agentOptions,
};
};

View File

@@ -100,6 +100,8 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
});
}
messageSchema.index({ createdAt: 1 });
const Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
module.exports = Message;

View File

@@ -23,4 +23,4 @@ const pluginAuthSchema = mongoose.Schema(
const PluginAuth = mongoose.models.Plugin || mongoose.model('PluginAuth', pluginAuthSchema);
module.exports = PluginAuth;
module.exports = PluginAuth;

View File

@@ -1,6 +1,6 @@
{
"name": "@librechat/backend",
"version": "0.5.4",
"version": "0.5.6",
"description": "",
"scripts": {
"start": "echo 'please run this from the root directory'",
@@ -24,7 +24,7 @@
"@dqbd/tiktoken": "^1.0.2",
"@fortaine/fetch-event-source": "^3.0.6",
"@keyv/mongo": "^2.1.8",
"@waylaidwanderer/chatgpt-api": "^1.37.0",
"@waylaidwanderer/chatgpt-api": "^1.37.2",
"axios": "^1.3.4",
"bcryptjs": "^2.4.3",
"cheerio": "^1.0.0-rc.12",
@@ -43,7 +43,7 @@
"jsonwebtoken": "^9.0.0",
"keyv": "^4.5.2",
"keyv-file": "^0.2.0",
"langchain": "^0.0.103",
"langchain": "^0.0.114",
"lodash": "^4.17.21",
"meilisearch": "^0.33.0",
"mongoose": "^7.1.1",
@@ -51,7 +51,9 @@
"openai": "^3.2.1",
"openid-client": "^5.4.2",
"passport": "^0.6.0",
"passport-discord": "^0.1.4",
"passport-facebook": "^3.0.0",
"passport-github2": "^0.1.12",
"passport-google-oauth20": "^2.0.0",
"passport-jwt": "^4.0.1",
"passport-local": "^1.0.0",

View File

@@ -1,8 +1,4 @@
const {
registerUser,
requestPasswordReset,
resetPassword,
} = require('../services/auth.service');
const { registerUser, requestPasswordReset, resetPassword } = require('../services/auth.service');
const isProduction = process.env.NODE_ENV === 'production';

View File

@@ -25,8 +25,12 @@ const handleValidationError = (err, res) => {
module.exports = (err, req, res, next) => {
try {
console.log('congrats you hit the error middleware');
if (err.name === 'ValidationError') return (err = handleValidationError(err, res));
if (err.code && err.code == 11000) return (err = handleDuplicateKeyError(err, res));
if (err.name === 'ValidationError') {
return (err = handleValidationError(err, res));
}
if (err.code && err.code == 11000) {
return (err = handleDuplicateKeyError(err, res));
}
} catch (err) {
res.status(500).send('An unknown error occurred.');
}

View File

@@ -1,6 +1,6 @@
// const { getAvailableToolsService } = require('../services/PluginService');
const fs = require('fs');
const { promises: fs } = require('fs');
const path = require('path');
const { addOpenAPISpecs } = require('../../app/clients/tools/util/addOpenAPISpecs');
const filterUniquePlugins = (plugins) => {
const seen = new Set();
@@ -27,26 +27,22 @@ const isPluginAuthenticated = (plugin) => {
const getAvailablePluginsController = async (req, res) => {
try {
fs.readFile(
const manifestFile = await fs.readFile(
path.join(__dirname, '..', '..', 'app', 'clients', 'tools', 'manifest.json'),
'utf8',
(err, data) => {
if (err) {
res.status(500).json({ message: err.message });
} else {
const jsonData = JSON.parse(data);
const uniquePlugins = filterUniquePlugins(jsonData);
const authenticatedPlugins = uniquePlugins.map((plugin) => {
if (isPluginAuthenticated(plugin)) {
return { ...plugin, authenticated: true };
} else {
return plugin;
}
});
res.status(200).json(authenticatedPlugins);
}
},
);
const jsonData = JSON.parse(manifestFile);
const uniquePlugins = filterUniquePlugins(jsonData);
const authenticatedPlugins = uniquePlugins.map((plugin) => {
if (isPluginAuthenticated(plugin)) {
return { ...plugin, authenticated: true };
} else {
return plugin;
}
});
const plugins = await addOpenAPISpecs(authenticatedPlugins);
res.status(200).json(plugins);
} catch (error) {
res.status(500).json({ message: error.message });
}

View File

@@ -2,12 +2,11 @@ const User = require('../../../models/User');
const loginController = async (req, res) => {
try {
const user = await User.findById(
req.user._id,
);
const user = await User.findById(req.user._id);
// If user doesn't exist, return error
if (!user) { // typeof user !== User) { // this doesn't seem to resolve the User type ??
if (!user) {
// typeof user !== User) { // this doesn't seem to resolve the User type ??
return res.status(400).json({ message: 'Invalid credentials' });
}
@@ -15,15 +14,11 @@ const loginController = async (req, res) => {
const expires = eval(process.env.SESSION_EXPIRY);
// Add token to cookie
res.cookie(
'token',
token,
{
expires: new Date(Date.now() + expires),
httpOnly: false,
secure: process.env.NODE_ENV === 'production',
},
);
res.cookie('token', token, {
expires: new Date(Date.now() + expires),
httpOnly: false,
secure: process.env.NODE_ENV === 'production',
});
return res.status(200).send({ token, user });
} catch (err) {
@@ -36,4 +31,4 @@ const loginController = async (req, res) => {
module.exports = {
loginController,
};
};

View File

@@ -9,7 +9,6 @@ const logoutController = async (req, res) => {
res.clearCookie('token');
res.clearCookie('refreshToken');
return res.status(status).send({ message });
} catch (err) {
console.log(err);
return res.status(500).json({ message: err.message });
@@ -18,4 +17,4 @@ const logoutController = async (req, res) => {
module.exports = {
logoutController,
};
};

View File

@@ -1,7 +1,6 @@
const express = require('express');
const session = require('express-session');
const connectDb = require('../lib/db/connectDb');
const migrateDb = require('../lib/db/migrateDb');
const indexSync = require('../lib/db/indexSync');
const path = require('path');
const cors = require('cors');
@@ -11,6 +10,15 @@ const passport = require('passport');
const port = process.env.PORT || 3080;
const host = process.env.HOST || 'localhost';
const projectPath = path.join(__dirname, '..', '..', 'client');
const {
jwtLogin,
passportLogin,
googleLogin,
githubLogin,
discordLogin,
facebookLogin,
setupOpenId,
} = require('../strategies');
// Init the config and validate it
const config = require('../../config/loader');
@@ -19,7 +27,6 @@ config.validate(); // Validate the config
(async () => {
await connectDb();
console.log('Connected to MongoDB');
await migrateDb();
await indexSync();
const app = express();
@@ -33,35 +40,43 @@ config.validate(); // Validate the config
app.use(cors());
if (!process.env.ALLOW_SOCIAL_LOGIN) {
console.warn('Social logins are disabled. Set Envrionment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.')
console.warn(
'Social logins are disabled. Set Envrionment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.',
);
}
// OAUTH
app.use(passport.initialize());
require('../strategies/jwtStrategy');
require('../strategies/localStrategy');
passport.use(await jwtLogin());
passport.use(await passportLogin());
if (process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET) {
require('../strategies/googleStrategy');
passport.use(await googleLogin());
}
if (process.env.FACEBOOK_CLIENT_ID && process.env.FACEBOOK_CLIENT_SECRET) {
require('../strategies/facebookStrategy');
passport.use(await facebookLogin());
}
if (process.env.GITHUB_CLIENT_ID && process.env.GITHUB_CLIENT_SECRET) {
require('../strategies/githubStrategy');
passport.use(await githubLogin());
}
if (process.env.DISCORD_CLIENT_ID && process.env.DISCORD_CLIENT_SECRET) {
require('../strategies/discordStrategy');
passport.use(await discordLogin());
}
if (process.env.OPENID_CLIENT_ID && process.env.OPENID_CLIENT_SECRET &&
process.env.OPENID_ISSUER && process.env.OPENID_SCOPE &&
process.env.OPENID_SESSION_SECRET) {
app.use(session({
secret: process.env.OPENID_SESSION_SECRET,
resave: false,
saveUninitialized: false,
}));
if (
process.env.OPENID_CLIENT_ID &&
process.env.OPENID_CLIENT_SECRET &&
process.env.OPENID_ISSUER &&
process.env.OPENID_SCOPE &&
process.env.OPENID_SESSION_SECRET
) {
app.use(
session({
secret: process.env.OPENID_SESSION_SECRET,
resave: false,
saveUninitialized: false,
}),
);
app.use(passport.session());
require('../strategies/openidStrategy');
await setupOpenId();
}
app.use('/oauth', routes.oauth);
// api endpoint
@@ -84,12 +99,13 @@ config.validate(); // Validate the config
});
app.listen(port, host, () => {
if (host == '0.0.0.0')
if (host == '0.0.0.0') {
console.log(
`Server listening on all interface at port ${port}. Use http://localhost:${port} to access it`,
);
else
} else {
console.log(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`);
}
});
})();

View File

@@ -31,16 +31,16 @@ describe.skip('GET /', () => {
process.env.APP_TITLE = 'Test Title';
process.env.GOOGLE_CLIENT_ID = 'Test Google Client Id';
process.env.GOOGLE_CLIENT_SECRET = 'Test Google Client Secret';
process.env.OPENID_CLIENT_ID= 'Test OpenID Id';
process.env.OPENID_CLIENT_SECRET= 'Test OpenID Secret';
process.env.OPENID_ISSUER= 'Test OpenID Issuer';
process.env.OPENID_SESSION_SECRET= 'Test Secret';
process.env.OPENID_BUTTON_LABEL= 'Test OpenID';
process.env.OPENID_AUTH_URL= 'http://test-server.com';
process.env.OPENID_CLIENT_ID = 'Test OpenID Id';
process.env.OPENID_CLIENT_SECRET = 'Test OpenID Secret';
process.env.OPENID_ISSUER = 'Test OpenID Issuer';
process.env.OPENID_SESSION_SECRET = 'Test Secret';
process.env.OPENID_BUTTON_LABEL = 'Test OpenID';
process.env.OPENID_AUTH_URL = 'http://test-server.com';
process.env.GITHUB_CLIENT_ID = 'Test Github client Id';
process.env.GITHUB_CLIENT_SECRET= 'Test Github client Secret';
process.env.GITHUB_CLIENT_SECRET = 'Test Github client Secret';
process.env.DISCORD_CLIENT_ID = 'Test Discord client Id';
process.env.DISCORD_CLIENT_SECRET= 'Test Discord client Secret';
process.env.DISCORD_CLIENT_SECRET = 'Test Discord client Secret';
process.env.DOMAIN_SERVER = 'http://test-server.com';
process.env.ALLOW_REGISTRATION = 'true';
process.env.ALLOW_SOCIAL_LOGIN = 'true';

View File

@@ -10,13 +10,21 @@ const { handleError, sendMessage, createOnProgress } = require('./handlers');
const abortControllers = new Map();
router.post('/abort', requireJwtAuth, async (req, res) => {
return await abortMessage(req, res, abortControllers);
try {
return await abortMessage(req, res, abortControllers);
} catch (err) {
console.error(err);
}
});
router.post('/', requireJwtAuth, async (req, res) => {
const { endpoint, text, parentMessageId, conversationId: oldConversationId } = req.body;
if (text.length === 0) return handleError(res, { text: 'Prompt empty or too short' });
if (endpoint !== 'anthropic') return handleError(res, { text: 'Illegal request' });
if (text.length === 0) {
return handleError(res, { text: 'Prompt empty or too short' });
}
if (endpoint !== 'anthropic') {
return handleError(res, { text: 'Illegal request' });
}
const endpointOption = {
promptPrefix: req.body?.promptPrefix ?? null,
@@ -117,7 +125,7 @@ const ask = async ({ text, endpointOption, parentMessageId = null, conversationI
const onStart = (userMessage) => {
sendMessage(res, { message: userMessage, created: true });
abortControllers.set(userMessage.conversationId, { abortController, ...endpointOption });
}
};
const client = new AnthropicClient(endpointOption.token);

View File

@@ -15,8 +15,12 @@ router.post('/', requireJwtAuth, async (req, res) => {
parentMessageId,
conversationId: oldConversationId,
} = req.body;
if (text.length === 0) return handleError(res, { text: 'Prompt empty or too short' });
if (endpoint !== 'chatGPTBrowser') return handleError(res, { text: 'Illegal request' });
if (text.length === 0) {
return handleError(res, { text: 'Prompt empty or too short' });
}
if (endpoint !== 'chatGPTBrowser') {
return handleError(res, { text: 'Illegal request' });
}
// build user message
const conversationId = oldConversationId || crypto.randomUUID();
@@ -167,7 +171,7 @@ const ask = async ({
// First update conversationId if needed
let conversationUpdate = { conversationId: newConversationId, endpoint: 'chatGPTBrowser' };
if (conversationId != newConversationId)
if (conversationId != newConversationId) {
if (isNewConversation) {
// change the conversationId to new one
conversationUpdate = {
@@ -182,6 +186,7 @@ const ask = async ({
...endpointOption,
};
}
}
await saveConvo(req.user.id, conversationUpdate);
conversationId = newConversationId;
@@ -191,12 +196,13 @@ const ask = async ({
userMessage.messageId = newUserMassageId;
// If response has parentMessageId, the fake userMessage.messageId should be updated to the real one.
if (!overrideParentMessageId)
if (!overrideParentMessageId) {
await saveMessage({
...userMessage,
messageId: userMessageId,
newMessageId: newUserMassageId,
});
}
userMessageId = newUserMassageId;
sendMessage(res, {

View File

@@ -15,8 +15,12 @@ router.post('/', requireJwtAuth, async (req, res) => {
parentMessageId,
conversationId: oldConversationId,
} = req.body;
if (text.length === 0) return handleError(res, { text: 'Prompt empty or too short' });
if (endpoint !== 'bingAI') return handleError(res, { text: 'Illegal request' });
if (text.length === 0) {
return handleError(res, { text: 'Prompt empty or too short' });
}
if (endpoint !== 'bingAI') {
return handleError(res, { text: 'Illegal request' });
}
// build user message
const conversationId = oldConversationId || crypto.randomUUID();
@@ -34,7 +38,7 @@ router.post('/', requireJwtAuth, async (req, res) => {
// build endpoint option
let endpointOption = {};
if (req.body?.jailbreak)
if (req.body?.jailbreak) {
endpointOption = {
jailbreak: req.body?.jailbreak ?? false,
jailbreakConversationId: req.body?.jailbreakConversationId ?? null,
@@ -43,7 +47,7 @@ router.post('/', requireJwtAuth, async (req, res) => {
toneStyle: req.body?.toneStyle ?? 'creative',
token: req.body?.token ?? null,
};
else
} else {
endpointOption = {
jailbreak: req.body?.jailbreak ?? false,
systemMessage: req.body?.systemMessage ?? null,
@@ -54,6 +58,7 @@ router.post('/', requireJwtAuth, async (req, res) => {
toneStyle: req.body?.toneStyle ?? 'creative',
token: req.body?.token ?? null,
};
}
console.log('ask log', {
userMessage,
@@ -106,7 +111,9 @@ const ask = async ({
'X-Accel-Buffering': 'no',
});
if (preSendRequest) sendMessage(res, { message: userMessage, created: true });
if (preSendRequest) {
sendMessage(res, { message: userMessage, created: true });
}
let lastSavedTimestamp = 0;
const { onProgress: progressCallback, getPartialText } = createOnProgress({
@@ -165,7 +172,8 @@ const ask = async ({
let unfinished = false;
if (partialText?.trim()?.length > response.text.length) {
response.text = partialText;
unfinished = true;
unfinished = false;
//setting "unfinished" to false fix bing image generation error msg and allows to continue a convo after being triggered by censorship (bing does remember the context after a "censored error" so there is no reason to end the convo)
}
let responseMessage = {
@@ -207,12 +215,13 @@ const ask = async ({
userMessage.messageId = newUserMessageId;
// If response has parentMessageId, the fake userMessage.messageId should be updated to the real one.
if (!overrideParentMessageId)
if (!overrideParentMessageId) {
await saveMessage({
...userMessage,
messageId: userMessageId,
newMessageId: newUserMessageId,
});
}
userMessageId = newUserMessageId;
sendMessage(res, {

View File

@@ -9,8 +9,12 @@ const requireJwtAuth = require('../../../middleware/requireJwtAuth');
router.post('/', requireJwtAuth, async (req, res) => {
const { endpoint, text, parentMessageId, conversationId: oldConversationId } = req.body;
if (text.length === 0) return handleError(res, { text: 'Prompt empty or too short' });
if (endpoint !== 'google') return handleError(res, { text: 'Illegal request' });
if (text.length === 0) {
return handleError(res, { text: 'Prompt empty or too short' });
}
if (endpoint !== 'google') {
return handleError(res, { text: 'Illegal request' });
}
// build endpoint option
const endpointOption = {

View File

@@ -15,13 +15,21 @@ const requireJwtAuth = require('../../../middleware/requireJwtAuth');
const abortControllers = new Map();
router.post('/abort', requireJwtAuth, async (req, res) => {
return await abortMessage(req, res, abortControllers);
try {
return await abortMessage(req, res, abortControllers);
} catch (err) {
console.error(err);
}
});
router.post('/', requireJwtAuth, async (req, res) => {
const { endpoint, text, parentMessageId, conversationId } = req.body;
if (text.length === 0) return handleError(res, { text: 'Prompt empty or too short' });
if (endpoint !== 'gptPlugins') return handleError(res, { text: 'Illegal request' });
if (text.length === 0) {
return handleError(res, { text: 'Prompt empty or too short' });
}
if (endpoint !== 'gptPlugins') {
return handleError(res, { text: 'Illegal request' });
}
const agentOptions = req.body?.agentOptions ?? {
agent: 'functions',
@@ -67,7 +75,15 @@ router.post('/', requireJwtAuth, async (req, res) => {
});
});
const ask = async ({ text, endpoint, endpointOption, parentMessageId = null, conversationId, req, res }) => {
const ask = async ({
text,
endpoint,
endpointOption,
parentMessageId = null,
conversationId,
req,
res,
}) => {
res.writeHead(200, {
Connection: 'keep-alive',
'Content-Type': 'text/event-stream',
@@ -100,7 +116,11 @@ const ask = async ({ text, endpoint, endpointOption, parentMessageId = null, con
}
};
const { onProgress: progressCallback, sendIntermediateMessage, getPartialText } = createOnProgress({
const {
onProgress: progressCallback,
sendIntermediateMessage,
getPartialText,
} = createOnProgress({
onProgress: ({ text: partialText }) => {
const currentTimestamp = Date.now();
@@ -156,7 +176,7 @@ const ask = async ({ text, endpoint, endpointOption, parentMessageId = null, con
const onStart = (userMessage) => {
sendMessage(res, { message: userMessage, created: true });
abortControllers.set(userMessage.conversationId, { abortController, ...endpointOption });
}
};
endpointOption.tools = await validateTools(user, endpointOption.tools);
const clientOptions = {
@@ -179,11 +199,13 @@ const ask = async ({ text, endpoint, endpointOption, parentMessageId = null, con
}
const chatAgent = new PluginsClient(openAIApiKey, clientOptions);
const onAgentAction = (action) => {
const onAgentAction = (action, start = false) => {
const formattedAction = formatAction(action);
plugin.inputs.push(formattedAction);
plugin.latest = formattedAction.plugin;
saveMessage(userMessage);
if (!start) {
saveMessage(userMessage);
}
sendIntermediateMessage(res, { plugin });
// console.log('PLUGIN ACTION', formattedAction);
};

View File

@@ -61,7 +61,12 @@ const createOnProgress = ({ onProgress: _onProgress }) => {
};
const sendIntermediateMessage = (res, payload) => {
sendMessage(res, { text: tokens?.length === 0 ? cursor : tokens, message: true, initial: i === 0, ...payload });
sendMessage(res, {
text: tokens?.length === 0 ? cursor : tokens,
message: true,
initial: i === 0,
...payload,
});
i++;
};
@@ -92,7 +97,7 @@ const handleText = async (response, bing = false) => {
};
const isObject = (item) => item && typeof item === 'object' && !Array.isArray(item);
const getString = (input) => isObject(input) ? JSON.stringify(input) : input ;
const getString = (input) => (isObject(input) ? JSON.stringify(input) : input);
function formatSteps(steps) {
let output = '';
@@ -117,20 +122,8 @@ function formatSteps(steps) {
}
function formatAction(action) {
const capitalizeWords = (input) => {
if (input === 'dall-e') {
return 'DALL-E';
}
return input
.replace(/-/g, ' ')
.split(' ')
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
.join(' ');
};
const formattedAction = {
plugin: capitalizeWords(action.tool) || action.tool,
plugin: action.tool,
input: getString(action.toolInput),
thought: action.log.includes('Thought: ')
? action.log.split('\n')[0].replace('Thought: ', '')
@@ -162,4 +155,4 @@ module.exports = {
handleText,
formatSteps,
formatAction,
};
};

View File

@@ -3,24 +3,28 @@ const router = express.Router();
const { titleConvo, OpenAIClient } = require('../../../app');
const { getAzureCredentials, abortMessage } = require('../../../utils');
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models');
const {
handleError,
sendMessage,
createOnProgress,
} = require('./handlers');
const { handleError, sendMessage, createOnProgress } = require('./handlers');
const requireJwtAuth = require('../../../middleware/requireJwtAuth');
const abortControllers = new Map();
router.post('/abort', requireJwtAuth, async (req, res) => {
return await abortMessage(req, res, abortControllers);
try {
return await abortMessage(req, res, abortControllers);
} catch (err) {
console.error(err);
}
});
router.post('/', requireJwtAuth, async (req, res) => {
const { endpoint, text, parentMessageId, conversationId } = req.body;
if (text.length === 0) return handleError(res, { text: 'Prompt empty or too short' });
if (text.length === 0) {
return handleError(res, { text: 'Prompt empty or too short' });
}
const isOpenAI = endpoint === 'openAI' || endpoint === 'azureOpenAI';
if (!isOpenAI) return handleError(res, { text: 'Illegal request' });
if (!isOpenAI) {
return handleError(res, { text: 'Illegal request' });
}
// build endpoint option
const endpointOption = {
@@ -50,7 +54,15 @@ router.post('/', requireJwtAuth, async (req, res) => {
});
});
const ask = async ({ text, endpointOption, parentMessageId = null, endpoint, conversationId, req, res }) => {
const ask = async ({
text,
endpointOption,
parentMessageId = null,
endpoint,
conversationId,
req,
res,
}) => {
res.writeHead(200, {
Connection: 'keep-alive',
'Content-Type': 'text/event-stream',
@@ -166,7 +178,11 @@ const ask = async ({ text, endpointOption, parentMessageId = null, endpoint, con
response.parentMessageId = overrideParentMessageId;
}
console.log('promptTokens, completionTokens:', response.promptTokens, response.completionTokens);
console.log(
'promptTokens, completionTokens:',
response.promptTokens,
response.completionTokens,
);
await saveMessage(response);
sendMessage(res, {

View File

@@ -5,14 +5,16 @@ router.get('/', async function (req, res) {
try {
const appTitle = process.env.APP_TITLE || 'LibreChat';
const googleLoginEnabled = !!process.env.GOOGLE_CLIENT_ID && !!process.env.GOOGLE_CLIENT_SECRET;
const openidLoginEnabled = !!process.env.OPENID_CLIENT_ID
&& !!process.env.OPENID_CLIENT_SECRET
&& !!process.env.OPENID_ISSUER
&& !!process.env.OPENID_SESSION_SECRET;
const openidLoginEnabled =
!!process.env.OPENID_CLIENT_ID &&
!!process.env.OPENID_CLIENT_SECRET &&
!!process.env.OPENID_ISSUER &&
!!process.env.OPENID_SESSION_SECRET;
const openidLabel = process.env.OPENID_BUTTON_LABEL || 'Login with OpenID';
const openidImageUrl = process.env.OPENID_IMAGE_URL;
const githubLoginEnabled = !!process.env.GITHUB_CLIENT_ID && !!process.env.GITHUB_CLIENT_SECRET;
const discordLoginEnabled = !!process.env.DISCORD_CLIENT_ID && !!process.env.DISCORD_CLIENT_SECRET;
const discordLoginEnabled =
!!process.env.DISCORD_CLIENT_ID && !!process.env.DISCORD_CLIENT_SECRET;
const serverDomain = process.env.DOMAIN_SERVER || 'http://localhost:3080';
const registrationEnabled = process.env.ALLOW_REGISTRATION === 'true';
const socialLoginEnabled = process.env.ALLOW_SOCIAL_LOGIN === 'true';
@@ -29,7 +31,6 @@ router.get('/', async function (req, res) {
registrationEnabled,
socialLoginEnabled,
});
} catch (err) {
console.error(err);
return res.status(500).send({ error: err.message });

View File

@@ -13,8 +13,11 @@ router.get('/:conversationId', requireJwtAuth, async (req, res) => {
const { conversationId } = req.params;
const convo = await getConvo(req.user.id, conversationId);
if (convo) res.status(200).send(convo.toObject());
else res.status(404).end();
if (convo) {
res.status(200).send(convo);
} else {
res.status(404).end();
}
});
router.post('/clear', requireJwtAuth, async (req, res) => {
@@ -24,7 +27,8 @@ router.post('/clear', requireJwtAuth, async (req, res) => {
filter = { conversationId };
}
console.log('source:', source);
// for debugging deletion source
// console.log('source:', source);
if (source === 'button' && !conversationId) {
return res.status(200).send('No conversationId provided');

View File

@@ -1,31 +1,107 @@
const axios = require('axios');
const express = require('express');
const router = express.Router();
const { availableTools } = require('../../app/clients/tools');
const { addOpenAPISpecs } = require('../../app/clients/tools/util/addOpenAPISpecs');
const getOpenAIModels = (opts = { azure: false }) => {
let models = ['gpt-4', 'gpt-4-0613', 'gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-0613', 'gpt-3.5-turbo-0301', 'text-davinci-003' ];
const key = opts.azure ? 'AZURE_OPENAI_MODELS' : 'OPENAI_MODELS';
if (process.env[key]) models = String(process.env[key]).split(',');
const openAIApiKey = process.env.OPENAI_API_KEY;
const azureOpenAIApiKey = process.env.AZURE_API_KEY;
const userProvidedOpenAI = openAIApiKey
? openAIApiKey === 'user_provided'
: azureOpenAIApiKey === 'user_provided';
const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _models = []) => {
let models = _models.slice() ?? [];
if (opts.azure) {
/* TODO: Add Azure models from api/models */
return models;
}
let basePath = 'https://api.openai.com/v1/';
const reverseProxyUrl = process.env.OPENAI_REVERSE_PROXY;
if (reverseProxyUrl) {
basePath = reverseProxyUrl.match(/.*v1/)[0];
}
if (basePath.includes('v1')) {
try {
const res = await axios.get(`${basePath}/models`, {
headers: {
Authorization: `Bearer ${openAIApiKey}`,
},
});
models = res.data.data.map((item) => item.id);
} catch (err) {
console.error(err);
}
}
if (!reverseProxyUrl) {
const regex = /(text-davinci-003|gpt-)/;
models = models.filter((model) => regex.test(model));
}
return models;
};
const getOpenAIModels = async (opts = { azure: false, plugins: false }) => {
let models = [
'gpt-4',
'gpt-4-0613',
'gpt-3.5-turbo',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-0613',
'gpt-3.5-turbo-0301',
];
if (!opts.plugins) {
models.push('text-davinci-003');
}
let key;
if (opts.azure) {
key = 'AZURE_OPENAI_MODELS';
} else if (opts.plugins) {
key = 'PLUGIN_MODELS';
} else {
key = 'OPENAI_MODELS';
}
if (process.env[key]) {
models = String(process.env[key]).split(',');
return models;
}
if (userProvidedOpenAI) {
console.warn(
`When setting OPENAI_API_KEY to 'user_provided', ${key} must be set manually or default values will be used`,
);
return models;
}
models = await fetchOpenAIModels(opts, models);
return models;
};
const getChatGPTBrowserModels = () => {
let models = ['text-davinci-002-render-sha', 'gpt-4'];
if (process.env.CHATGPT_MODELS) models = String(process.env.CHATGPT_MODELS).split(',');
if (process.env.CHATGPT_MODELS) {
models = String(process.env.CHATGPT_MODELS).split(',');
}
return models;
};
const getAnthropicModels = () => {
let models = ['claude-1', 'claude-1-100k', 'claude-instant-1', 'claude-instant-1-100k', 'claude-2'];
if (process.env.ANTHROPIC_MODELS) models = String(process.env.ANTHROPIC_MODELS).split(',');
return models;
};
const getPluginModels = () => {
let models = ['gpt-4', 'gpt-4-0613', 'gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-0613', 'gpt-3.5-turbo-0301'];
if (process.env.PLUGIN_MODELS) models = String(process.env.PLUGIN_MODELS).split(',');
let models = [
'claude-1',
'claude-1-100k',
'claude-instant-1',
'claude-instant-1-100k',
'claude-2',
];
if (process.env.ANTHROPIC_MODELS) {
models = String(process.env.ANTHROPIC_MODELS).split(',');
}
return models;
};
@@ -50,22 +126,37 @@ router.get('/', async function (req, res) {
}
}
const tools = await addOpenAPISpecs(availableTools);
function transformToolsToMap(tools) {
return tools.reduce((map, obj) => {
map[obj.pluginKey] = obj.name;
return map;
}, {});
}
const plugins = transformToolsToMap(tools);
const google =
key || palmUser
? { userProvide: palmUser, availableModels: ['chat-bison', 'text-bison', 'codechat-bison'] }
: false;
const openAIApiKey = process.env.OPENAI_API_KEY;
const azureOpenAIApiKey = process.env.AZURE_API_KEY;
const userProvidedOpenAI = openAIApiKey ? openAIApiKey === 'user_provided' : azureOpenAIApiKey === 'user_provided';
const openAI = openAIApiKey
? { availableModels: getOpenAIModels(), userProvide: openAIApiKey === 'user_provided' }
? { availableModels: await getOpenAIModels(), userProvide: openAIApiKey === 'user_provided' }
: false;
const azureOpenAI = azureOpenAIApiKey
? { availableModels: getOpenAIModels({ azure: true }), userProvide: azureOpenAIApiKey === 'user_provided' }
: false;
const gptPlugins = openAIApiKey || azureOpenAIApiKey
? { availableModels: getPluginModels(), availableTools, availableAgents: ['classic', 'functions'], userProvide: userProvidedOpenAI }
? {
availableModels: await getOpenAIModels({ azure: true }),
userProvide: azureOpenAIApiKey === 'user_provided',
}
: false;
const gptPlugins =
openAIApiKey || azureOpenAIApiKey
? {
availableModels: await getOpenAIModels({ plugins: true }),
plugins,
availableAgents: ['classic', 'functions'],
userProvide: userProvidedOpenAI,
}
: false;
const bingAI = process.env.BINGAI_TOKEN
? { userProvide: process.env.BINGAI_TOKEN == 'user_provided' }
: false;
@@ -82,7 +173,9 @@ router.get('/', async function (req, res) {
}
: false;
res.send(JSON.stringify({ azureOpenAI, openAI, google, bingAI, chatGPTBrowser, gptPlugins, anthropic }));
res.send(
JSON.stringify({ azureOpenAI, openAI, google, bingAI, chatGPTBrowser, gptPlugins, anthropic }),
);
});
module.exports = { router, getOpenAIModels, getChatGPTBrowserModels };

View File

@@ -6,7 +6,7 @@ const requireJwtAuth = require('../../middleware/requireJwtAuth');
router.get('/', requireJwtAuth, async (req, res) => {
const presets = (await getPresets(req.user.id)).map((preset) => {
return preset.toObject();
return preset;
});
res.status(200).send(presets);
});
@@ -20,7 +20,7 @@ router.post('/', requireJwtAuth, async (req, res) => {
await savePreset(req.user.id, update);
const presets = (await getPresets(req.user.id)).map((preset) => {
return preset.toObject();
return preset;
});
res.status(201).send(presets);
} catch (error) {
@@ -33,18 +33,16 @@ router.post('/delete', requireJwtAuth, async (req, res) => {
let filter = {};
const { presetId } = req.body.arg || {};
if (presetId) filter = { presetId };
if (presetId) {
filter = { presetId };
}
console.log('delete preset filter', filter);
try {
await deletePresets(req.user.id, filter);
const presets = (await getPresets(req.user.id)).map((preset) => preset.toObject());
// console.log('delete preset response', presets);
const presets = await getPresets(req.user.id);
res.status(201).send(presets);
// res.status(201).send(dbResponse);
} catch (error) {
console.error(error);
res.status(500).send(error);

View File

@@ -90,11 +90,6 @@ router.get('/', requireJwtAuth, async function (req, res) {
}
});
router.get('/clear', async function (req, res) {
await Message.resetIndex();
res.send('cleared');
});
router.get('/test', async function (req, res) {
const { q } = req.query;
const messages = (

View File

@@ -3,7 +3,7 @@ const { encrypt, decrypt } = require('../../utils/');
const getUserPluginAuthValue = async (user, authField) => {
try {
const pluginAuth = await PluginAuth.findOne({ user, authField });
const pluginAuth = await PluginAuth.findOne({ user, authField }).lean();
if (!pluginAuth) {
return null;
}
@@ -43,7 +43,7 @@ const getUserPluginAuthValue = async (user, authField) => {
const updateUserPluginAuth = async (userId, authField, pluginKey, value) => {
try {
const encryptedValue = encrypt(value);
const pluginAuth = await PluginAuth.findOne({ userId, authField });
const pluginAuth = await PluginAuth.findOne({ userId, authField }).lean();
if (pluginAuth) {
const pluginAuth = await PluginAuth.updateOne(
{ userId, authField },

View File

@@ -54,7 +54,7 @@ const registerUser = async (user) => {
const { email, password, name, username } = user;
try {
const existingUser = await User.findOne({ email });
const existingUser = await User.findOne({ email }).lean();
if (existingUser) {
console.info(
@@ -104,13 +104,15 @@ const registerUser = async (user) => {
* @returns
*/
const requestPasswordReset = async (email) => {
const user = await User.findOne({ email });
const user = await User.findOne({ email }).lean();
if (!user) {
return new Error('Email does not exist');
}
let token = await Token.findOne({ userId: user._id });
if (token) await token.deleteOne();
if (token) {
await token.deleteOne();
}
let resetToken = crypto.randomBytes(32).toString('hex');
const hash = await bcrypt.hashSync(resetToken, 10);

View File

@@ -1,51 +1,51 @@
const passport = require('passport');
const { Strategy: DiscordStrategy } = require('passport-discord');
const User = require('../models/User');
const config = require('../../config/loader');
const domains = config.domains;
const discordLogin = new DiscordStrategy(
{
clientID: process.env.DISCORD_CLIENT_ID,
clientSecret: process.env.DISCORD_CLIENT_SECRET,
callbackURL: `${domains.server}${process.env.DISCORD_CALLBACK_URL}`,
scope: ['identify', 'email'], // Request scopes
authorizationURL: 'https://discord.com/api/oauth2/authorize?prompt=none', // Add the prompt query parameter
},
async (accessToken, refreshToken, profile, cb) => {
try {
const email = profile.email;
const discordId = profile.id;
const discordLogin = async () =>
new DiscordStrategy(
{
clientID: process.env.DISCORD_CLIENT_ID,
clientSecret: process.env.DISCORD_CLIENT_SECRET,
callbackURL: `${domains.server}${process.env.DISCORD_CALLBACK_URL}`,
scope: ['identify', 'email'], // Request scopes
authorizationURL: 'https://discord.com/api/oauth2/authorize?prompt=none', // Add the prompt query parameter
},
async (accessToken, refreshToken, profile, cb) => {
try {
const email = profile.email;
const discordId = profile.id;
const oldUser = await User.findOne({ email });
if (oldUser) {
return cb(null, oldUser);
const oldUser = await User.findOne({ email });
if (oldUser) {
return cb(null, oldUser);
}
let avatarURL;
if (profile.avatar) {
const format = profile.avatar.startsWith('a_') ? 'gif' : 'png';
avatarURL = `https://cdn.discordapp.com/avatars/${profile.id}/${profile.avatar}.${format}`;
} else {
const defaultAvatarNum = Number(profile.discriminator) % 5;
avatarURL = `https://cdn.discordapp.com/embed/avatars/${defaultAvatarNum}.png`;
}
const newUser = await User.create({
provider: 'discord',
discordId,
username: profile.username,
email,
name: profile.global_name,
avatar: avatarURL,
});
cb(null, newUser);
} catch (err) {
console.error(err);
cb(err);
}
},
);
let avatarURL;
if (profile.avatar) {
const format = profile.avatar.startsWith('a_') ? 'gif' : 'png';
avatarURL = `https://cdn.discordapp.com/avatars/${profile.id}/${profile.avatar}.${format}`;
} else {
const defaultAvatarNum = Number(profile.discriminator) % 5;
avatarURL = `https://cdn.discordapp.com/embed/avatars/${defaultAvatarNum}.png`;
}
const newUser = await User.create({
provider: 'discord',
discordId,
username: profile.username,
email,
name: profile.global_name,
avatar: avatarURL,
});
cb(null, newUser);
} catch (err) {
console.error(err);
cb(err);
}
},
);
passport.use(discordLogin);
module.exports = discordLogin;

View File

@@ -1,59 +1,59 @@
const passport = require('passport');
const FacebookStrategy = require('passport-facebook').Strategy;
const User = require('../models/User');
const config = require('../../config/loader');
const domains = config.domains;
// facebook strategy
const facebookLogin = new FacebookStrategy(
{
clientID: process.env.FACEBOOK_APP_ID,
clientSecret: process.env.FACEBOOK_SECRET,
callbackURL: `${domains.server}${process.env.FACEBOOK_CALLBACK_URL}`,
proxy: true,
// profileFields: [
// 'id',
// 'email',
// 'gender',
// 'profileUrl',
// 'displayName',
// 'locale',
// 'name',
// 'timezone',
// 'updated_time',
// 'verified',
// 'picture.type(large)'
// ]
},
async (accessToken, refreshToken, profile, done) => {
console.log('facebookLogin => profile', profile);
try {
const oldUser = await User.findOne({ email: profile.emails[0].value });
const facebookLogin = async () =>
new FacebookStrategy(
{
clientID: process.env.FACEBOOK_APP_ID,
clientSecret: process.env.FACEBOOK_SECRET,
callbackURL: `${domains.server}${process.env.FACEBOOK_CALLBACK_URL}`,
proxy: true,
// profileFields: [
// 'id',
// 'email',
// 'gender',
// 'profileUrl',
// 'displayName',
// 'locale',
// 'name',
// 'timezone',
// 'updated_time',
// 'verified',
// 'picture.type(large)'
// ]
},
async (accessToken, refreshToken, profile, done) => {
console.log('facebookLogin => profile', profile);
try {
const oldUser = await User.findOne({ email: profile.emails[0].value });
if (oldUser) {
console.log('FACEBOOK LOGIN => found user', oldUser);
return done(null, oldUser);
if (oldUser) {
console.log('FACEBOOK LOGIN => found user', oldUser);
return done(null, oldUser);
}
} catch (err) {
console.log(err);
}
} catch (err) {
console.log(err);
}
// register user
try {
const newUser = await new User({
provider: 'facebook',
facebookId: profile.id,
username: profile.name.givenName + profile.name.familyName,
email: profile.emails[0].value,
name: profile.displayName,
avatar: profile.photos[0].value,
}).save();
// register user
try {
const newUser = await new User({
provider: 'facebook',
facebookId: profile.id,
username: profile.name.givenName + profile.name.familyName,
email: profile.emails[0].value,
name: profile.displayName,
avatar: profile.photos[0].value,
}).save();
done(null, newUser);
} catch (err) {
console.log(err);
}
},
);
done(null, newUser);
} catch (err) {
console.log(err);
}
},
);
passport.use(facebookLogin);
module.exports = facebookLogin;

View File

@@ -1,4 +1,3 @@
const passport = require('passport');
const { Strategy: GitHubStrategy } = require('passport-github2');
const config = require('../../config/loader');
const domains = config.domains;
@@ -6,42 +5,43 @@ const domains = config.domains;
const User = require('../models/User');
// GitHub strategy
const githubLogin = new GitHubStrategy(
{
clientID: process.env.GITHUB_CLIENT_ID,
clientSecret: process.env.GITHUB_CLIENT_SECRET,
callbackURL: `${domains.server}${process.env.GITHUB_CALLBACK_URL}`,
proxy: false,
scope: ['user:email'], // Request email scope
},
async (accessToken, refreshToken, profile, cb) => {
try {
let email;
if (profile.emails && profile.emails.length > 0) {
email = profile.emails[0].value;
const githubLogin = async () =>
new GitHubStrategy(
{
clientID: process.env.GITHUB_CLIENT_ID,
clientSecret: process.env.GITHUB_CLIENT_SECRET,
callbackURL: `${domains.server}${process.env.GITHUB_CALLBACK_URL}`,
proxy: false,
scope: ['user:email'], // Request email scope
},
async (accessToken, refreshToken, profile, cb) => {
try {
let email;
if (profile.emails && profile.emails.length > 0) {
email = profile.emails[0].value;
}
const oldUser = await User.findOne({ email });
if (oldUser) {
return cb(null, oldUser);
}
const newUser = await new User({
provider: 'github',
githubId: profile.id,
username: profile.username,
email,
emailVerified: profile.emails[0].verified,
name: profile.displayName,
avatar: profile.photos[0].value,
}).save();
cb(null, newUser);
} catch (err) {
console.error(err);
cb(err);
}
},
);
const oldUser = await User.findOne({ email });
if (oldUser) {
return cb(null, oldUser);
}
const newUser = await new User({
provider: 'github',
githubId: profile.id,
username: profile.username,
email,
emailVerified: profile.emails[0].verified,
name: profile.displayName,
avatar: profile.photos[0].value,
}).save();
cb(null, newUser);
} catch (err) {
console.error(err);
cb(err);
}
},
);
passport.use(githubLogin);
module.exports = githubLogin;

View File

@@ -1,4 +1,3 @@
const passport = require('passport');
const { Strategy: GoogleStrategy } = require('passport-google-oauth20');
const config = require('../../config/loader');
const domains = config.domains;
@@ -6,38 +5,39 @@ const domains = config.domains;
const User = require('../models/User');
// google strategy
const googleLogin = new GoogleStrategy(
{
clientID: process.env.GOOGLE_CLIENT_ID,
clientSecret: process.env.GOOGLE_CLIENT_SECRET,
callbackURL: `${domains.server}${process.env.GOOGLE_CALLBACK_URL}`,
proxy: true,
},
async (accessToken, refreshToken, profile, cb) => {
try {
const oldUser = await User.findOne({ email: profile.emails[0].value });
if (oldUser) {
return cb(null, oldUser);
const googleLogin = async () =>
new GoogleStrategy(
{
clientID: process.env.GOOGLE_CLIENT_ID,
clientSecret: process.env.GOOGLE_CLIENT_SECRET,
callbackURL: `${domains.server}${process.env.GOOGLE_CALLBACK_URL}`,
proxy: true,
},
async (accessToken, refreshToken, profile, cb) => {
try {
const oldUser = await User.findOne({ email: profile.emails[0].value });
if (oldUser) {
return cb(null, oldUser);
}
} catch (err) {
console.log(err);
}
} catch (err) {
console.log(err);
}
try {
const newUser = await new User({
provider: 'google',
googleId: profile.id,
username: profile.name.givenName,
email: profile.emails[0].value,
emailVerified: profile.emails[0].verified,
name: `${profile.name.givenName} ${profile.name.familyName}`,
avatar: profile.photos[0].value,
}).save();
cb(null, newUser);
} catch (err) {
console.log(err);
}
},
);
try {
const newUser = await new User({
provider: 'google',
googleId: profile.id,
username: profile.name.givenName,
email: profile.emails[0].value,
emailVerified: profile.emails[0].verified,
name: `${profile.name.givenName} ${profile.name.familyName}`,
avatar: profile.photos[0].value,
}).save();
cb(null, newUser);
} catch (err) {
console.log(err);
}
},
);
passport.use(googleLogin);
module.exports = googleLogin;

17
api/strategies/index.js Normal file
View File

@@ -0,0 +1,17 @@
const passportLogin = require('./localStrategy');
const googleLogin = require('./googleStrategy');
const githubLogin = require('./githubStrategy');
const discordLogin = require('./discordStrategy');
const jwtLogin = require('./jwtStrategy');
const facebookLogin = require('./facebookStrategy');
const setupOpenId = require('./openidStrategy');
module.exports = {
passportLogin,
googleLogin,
githubLogin,
discordLogin,
jwtLogin,
facebookLogin,
setupOpenId,
};

View File

@@ -1,26 +1,26 @@
const passport = require('passport');
const { Strategy: JwtStrategy, ExtractJwt } = require('passport-jwt');
const User = require('../models/User');
// JWT strategy
const jwtLogin = new JwtStrategy(
{
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
secretOrKey: process.env.JWT_SECRET,
},
async (payload, done) => {
try {
const user = await User.findById(payload.id);
if (user) {
done(null, user);
} else {
console.log('JwtStrategy => no user found');
done(null, false);
const jwtLogin = async () =>
new JwtStrategy(
{
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
secretOrKey: process.env.JWT_SECRET,
},
async (payload, done) => {
try {
const user = await User.findById(payload.id);
if (user) {
done(null, user);
} else {
console.log('JwtStrategy => no user found');
done(null, false);
}
} catch (err) {
done(err, false);
}
} catch (err) {
done(err, false);
}
},
);
},
);
passport.use(jwtLogin);
module.exports = jwtLogin;

View File

@@ -1,62 +1,60 @@
const passport = require('passport');
const PassportLocalStrategy = require('passport-local').Strategy;
const User = require('../models/User');
const { loginSchema } = require('./validators');
const DebugControl = require('../utils/debug.js');
const passportLogin = new PassportLocalStrategy(
{
usernameField: 'email',
passwordField: 'password',
session: false,
passReqToCallback: true,
},
async (req, email, password, done) => {
const { error } = loginSchema.validate(req.body);
if (error) {
log({
title: 'Passport Local Strategy - Validation Error',
parameters: [{ name: 'req.body', value: req.body }],
});
return done(null, false, { message: error.details[0].message });
}
try {
const user = await User.findOne({ email: email.trim() });
if (!user) {
const passportLogin = async () =>
new PassportLocalStrategy(
{
usernameField: 'email',
passwordField: 'password',
session: false,
passReqToCallback: true,
},
async (req, email, password, done) => {
const { error } = loginSchema.validate(req.body);
if (error) {
log({
title: 'Passport Local Strategy - User Not Found',
parameters: [{ name: 'email', value: email }],
title: 'Passport Local Strategy - Validation Error',
parameters: [{ name: 'req.body', value: req.body }],
});
return done(null, false, { message: 'Email does not exists.' });
return done(null, false, { message: error.details[0].message });
}
user.comparePassword(password, function (err, isMatch) {
if (err) {
try {
const user = await User.findOne({ email: email.trim() });
if (!user) {
log({
title: 'Passport Local Strategy - Compare password error',
parameters: [{ name: 'error', value: err }],
title: 'Passport Local Strategy - User Not Found',
parameters: [{ name: 'email', value: email }],
});
return done(err);
}
if (!isMatch) {
log({
title: 'Passport Local Strategy - Password does not match',
parameters: [{ name: 'isMatch', value: isMatch }],
});
return done(null, false, { message: 'Incorrect password.' });
return done(null, false, { message: 'Email does not exists.' });
}
return done(null, user);
});
} catch (err) {
return done(err);
}
},
);
user.comparePassword(password, function (err, isMatch) {
if (err) {
log({
title: 'Passport Local Strategy - Compare password error',
parameters: [{ name: 'error', value: err }],
});
return done(err);
}
if (!isMatch) {
log({
title: 'Passport Local Strategy - Password does not match',
parameters: [{ name: 'isMatch', value: isMatch }],
});
return done(null, false, { message: 'Incorrect password.' });
}
passport.use(passportLogin);
return done(null, user);
});
} catch (err) {
return done(err);
}
},
);
function log({ title, parameters }) {
DebugControl.log.functionName(title);
@@ -64,3 +62,5 @@ function log({ title, parameters }) {
DebugControl.log.parameters(parameters);
}
}
module.exports = passportLogin;

View File

@@ -19,7 +19,7 @@ const downloadImage = async (url, imagePath, accessToken) => {
try {
const response = await axios.get(url, {
headers: {
'Authorization': `Bearer ${accessToken}`,
Authorization: `Bearer ${accessToken}`,
},
responseType: 'arraybuffer',
});
@@ -36,8 +36,9 @@ const downloadImage = async (url, imagePath, accessToken) => {
}
};
Issuer.discover(process.env.OPENID_ISSUER)
.then(issuer => {
async function setupOpenId() {
try {
const issuer = await Issuer.discover(process.env.OPENID_ISSUER);
const client = new issuer.Client({
client_id: process.env.OPENID_CLIENT_ID,
client_secret: process.env.OPENID_CLIENT_SECRET,
@@ -66,13 +67,15 @@ Issuer.discover(process.env.OPENID_ISSUER)
fullName = userinfo.given_name;
} else if (userinfo.family_name) {
fullName = userinfo.family_name;
} else {
fullName = userinfo.username || userinfo.email;
}
if (!user) {
user = new User({
provider: 'openid',
openidId: userinfo.sub,
username: userinfo.given_name || '',
username: userinfo.username || userinfo.given_name || '',
email: userinfo.email || '',
emailVerified: userinfo.email_verified || false,
name: fullName,
@@ -96,9 +99,22 @@ Issuer.discover(process.env.OPENID_ISSUER)
fileName = userinfo.sub + '.png';
}
const imagePath = path.join(__dirname, '..', '..', 'client', 'public', 'images', 'openid', fileName);
const imagePath = path.join(
__dirname,
'..',
'..',
'client',
'public',
'images',
'openid',
fileName,
);
const imagePathOrEmpty = await downloadImage(imageUrl, imagePath, tokenset.access_token);
const imagePathOrEmpty = await downloadImage(
imageUrl,
imagePath,
tokenset.access_token,
);
user.avatar = imagePathOrEmpty;
} else {
@@ -115,8 +131,9 @@ Issuer.discover(process.env.OPENID_ISSUER)
);
passport.use('openid', openidLogin);
})
.catch(err => {
} catch (err) {
console.error(err);
});
}
}
module.exports = setupOpenId;

View File

@@ -68,45 +68,65 @@ module.exports = {
setLevel: (l) => (level = l),
log: {
trace: (msg) => {
if (level <= levels.TRACE) return;
if (level <= levels.TRACE) {
return;
}
logger.trace(msg);
},
debug: (msg) => {
if (level <= levels.DEBUG) return;
if (level <= levels.DEBUG) {
return;
}
logger.debug(msg);
},
info: (msg) => {
if (level <= levels.INFO) return;
if (level <= levels.INFO) {
return;
}
logger.info(msg);
},
warn: (msg) => {
if (level <= levels.WARN) return;
if (level <= levels.WARN) {
return;
}
logger.warn(msg);
},
error: (msg) => {
if (level <= levels.ERROR) return;
if (level <= levels.ERROR) {
return;
}
logger.error(msg);
},
fatal: (msg) => {
if (level <= levels.FATAL) return;
if (level <= levels.FATAL) {
return;
}
logger.fatal(msg);
},
// Custom loggers
parameters: (parameters) => {
if (level <= levels.TRACE) return;
if (level <= levels.TRACE) {
return;
}
logger.debug({ parameters }, 'Function Parameters');
},
functionName: (name) => {
if (level <= levels.TRACE) return;
if (level <= levels.TRACE) {
return;
}
logger.debug(`EXECUTING: ${name}`);
},
flow: (flow) => {
if (level <= levels.INFO) return;
if (level <= levels.INFO) {
return;
}
logger.debug(`BEGIN FLOW: ${flow}`);
},
variable: ({ name, value }) => {
if (level <= levels.DEBUG) return;
if (level <= levels.DEBUG) {
return;
}
// Check if the variable name matches any of the redact patterns and redact the value
let sanitizedValue = value;
for (const pattern of redactPatterns) {
@@ -118,7 +138,9 @@ module.exports = {
logger.debug({ variable: { name, value: sanitizedValue } }, `VARIABLE ${name}`);
},
request: () => (req, res, next) => {
if (level < levels.DEBUG) return next();
if (level < levels.DEBUG) {
return next();
}
logger.debug({ query: req.query, body: req.body }, `Hit URL ${req.url} with following`);
return next();
},

View File

@@ -15,4 +15,4 @@ async function abortMessage(req, res, abortControllers) {
res.send(JSON.stringify(ret));
}
module.exports = abortMessage;
module.exports = abortMessage;

View File

@@ -1,6 +1,6 @@
const genAzureEndpoint = ({ azureOpenAIApiInstanceName, azureOpenAIApiDeploymentName }) => {
return `https://${azureOpenAIApiInstanceName}.openai.azure.com/openai/deployments/${azureOpenAIApiDeploymentName}`;
}
};
const genAzureChatCompletion = ({
azureOpenAIApiInstanceName,
@@ -8,7 +8,7 @@ const genAzureChatCompletion = ({
azureOpenAIApiVersion,
}) => {
return `https://${azureOpenAIApiInstanceName}.openai.azure.com/openai/deployments/${azureOpenAIApiDeploymentName}/chat/completions?api-version=${azureOpenAIApiVersion}`;
}
};
const getAzureCredentials = () => {
return {
@@ -16,7 +16,7 @@ const getAzureCredentials = () => {
azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_API_INSTANCE_NAME,
azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME,
azureOpenAIApiVersion: process.env.AZURE_OPENAI_API_VERSION,
}
}
};
};
module.exports = { genAzureEndpoint, genAzureChatCompletion, getAzureCredentials };

View File

@@ -12,21 +12,29 @@ module.exports = {
setLevel: (l) => (level = l),
log: {
parameters: (parameters) => {
if (levels.HIGH > level) return;
if (levels.HIGH > level) {
return;
}
console.group();
parameters.forEach((p) => console.log(`${p.name}:`, p.value));
console.groupEnd();
},
functionName: (name) => {
if (levels.MEDIUM > level) return;
if (levels.MEDIUM > level) {
return;
}
console.log(`\nEXECUTING: ${name}\n`);
},
flow: (flow) => {
if (levels.LOW > level) return;
if (levels.LOW > level) {
return;
}
console.log(`\n\n\nBEGIN FLOW: ${flow}\n\n\n`);
},
variable: ({ name, value }) => {
if (levels.HIGH > level) return;
if (levels.HIGH > level) {
return;
}
console.group();
console.group();
console.log(`VARIABLE ${name}:`, value);
@@ -34,7 +42,9 @@ module.exports = {
console.groupEnd();
},
request: () => (req, res, next) => {
if (levels.HIGH > level) return next();
if (levels.HIGH > level) {
return next();
}
console.log('Hit URL', req.url, 'with following:');
console.group();
console.log('Query:', req.query);

View File

@@ -0,0 +1,33 @@
function findContent(obj) {
if (obj && typeof obj === 'object') {
if ('kwargs' in obj && 'content' in obj.kwargs) {
return obj.kwargs.content;
}
for (let key in obj) {
let content = findContent(obj[key]);
if (content) {
return content;
}
}
}
return null;
}
function findMessageContent(message) {
let startIndex = Math.min(message.indexOf('{'), message.indexOf('['));
let jsonString = message.substring(startIndex);
let jsonObjectOrArray;
try {
jsonObjectOrArray = JSON.parse(jsonString);
} catch (error) {
console.error('Failed to parse JSON:', error);
return null;
}
let content = findContent(jsonObjectOrArray);
return content;
}
module.exports = findMessageContent;

View File

@@ -3,6 +3,7 @@ const cryptoUtils = require('./crypto');
const { tiktokenModels, maxTokensMap } = require('./tokens');
const sendEmail = require('./sendEmail');
const abortMessage = require('./abortMessage');
const findMessageContent = require('./findMessageContent');
module.exports = {
...cryptoUtils,
@@ -11,4 +12,5 @@ module.exports = {
tiktokenModels,
sendEmail,
abortMessage,
}
findMessageContent,
};

View File

@@ -2,14 +2,14 @@ module.exports = {
roots: ['<rootDir>/src'],
testEnvironment: 'jsdom',
testEnvironmentOptions: {
url: 'http://localhost:3080'
url: 'http://localhost:3080',
},
collectCoverage: true,
collectCoverageFrom: [
'src/**/*.{js,jsx,ts,tsx}',
'!<rootDir>/node_modules/',
'!src/**/*.css.d.ts',
'!src/**/*.d.ts'
'!src/**/*.d.ts',
],
coveragePathIgnorePatterns: ['<rootDir>/node_modules/', '<rootDir>/test/setupTests.js'],
// Todo: Add coverageThreshold once we have enough coverage
@@ -27,7 +27,7 @@ module.exports = {
'\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$':
'jest-file-loader',
'layout-test-utils': '<rootDir>/test/layout-test-utils',
'^~/(.*)$': '<rootDir>/src/$1'
'^~/(.*)$': '<rootDir>/src/$1',
},
restoreMocks: true,
testResultsProcessor: 'jest-junit',
@@ -35,10 +35,10 @@ module.exports = {
transform: {
'\\.[jt]sx?$': 'babel-jest',
'\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$':
'jest-file-loader'
'jest-file-loader',
},
transformIgnorePatterns: ['node_modules/?!@zattoo/use-double-click'],
preset: 'ts-jest',
setupFilesAfterEnv: ['@testing-library/jest-dom/extend-expect', '<rootDir>/test/setupTests.js'],
clearMocks: true
clearMocks: true,
};

View File

@@ -1,15 +1,17 @@
server {
listen 80;
# listen 443 ssl;
# ssl_certificate /etc/nginx/ssl/nginx.crt;
# ssl_certificate_key /etc/nginx/ssl/nginx.key;
server_name localhost;
location /api {
# Proxy requests to the API service
proxy_pass http://api:3080/api;
}
location / {
# Serve your React app
root /usr/share/nginx/html;
try_files $uri $uri/ /index.html;
proxy_pass http://api:3080;
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@librechat/frontend",
"version": "0.5.4",
"version": "0.5.6",
"description": "",
"scripts": {
"data-provider": "cd .. && npm run build:data-provider",
@@ -53,6 +53,7 @@
"export-from-json": "^1.7.2",
"filenamify": "^6.0.0",
"html2canvas": "^1.4.1",
"librechat-data-provider": "^0.1.0",
"lodash": "^4.17.21",
"lucide-react": "^0.220.0",
"pino": "^8.12.1",
@@ -76,8 +77,7 @@
"tailwind-merge": "^1.9.1",
"tailwindcss-animate": "^1.0.5",
"tailwindcss-radix": "^2.8.0",
"url": "^0.11.0",
"@librechat/data-provider": "*"
"url": "^0.11.0"
},
"devDependencies": {
"@babel/cli": "^7.20.7",

Binary file not shown.

After

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

View File

@@ -6,8 +6,8 @@ import { useNavigate } from 'react-router-dom';
import { useRecoilValue } from 'recoil';
import store from '~/store';
import { localize } from '~/localization/Translation';
import { useGetStartupConfig } from '@librechat/data-provider';
import { GoogleIcon, OpenIDIcon, GithubIcon, DiscordIcon } from '~/components'
import { useGetStartupConfig } from 'librechat-data-provider';
import { GoogleIcon, OpenIDIcon, GithubIcon, DiscordIcon } from '~/components';
function Login() {
const { login, error, isAuthenticated } = useAuthContext();
@@ -26,7 +26,9 @@ function Login() {
return (
<div className="flex min-h-screen flex-col items-center justify-center bg-white pt-6 sm:pt-0">
<div className="mt-6 w-96 overflow-hidden bg-white px-6 py-4 sm:max-w-md sm:rounded-lg">
<h1 className="mb-4 text-center text-3xl font-semibold">{localize(lang, 'com_auth_welcome_back')}</h1>
<h1 className="mb-4 text-center text-3xl font-semibold">
{localize(lang, 'com_auth_welcome_back')}
</h1>
{error && (
<div
className="relative mt-4 rounded border border-red-400 bg-red-100 px-4 py-3 text-red-700"
@@ -55,12 +57,12 @@ function Login() {
)}
{startupConfig?.googleLoginEnabled && startupConfig?.socialLoginEnabled && (
<>
<div className="mt-2 flex gap-x-2">
<a
aria-label="Login with Google"
className="justify-left flex w-full items-center space-x-3 rounded-md border border-gray-300 px-5 py-3 hover:bg-gray-50 focus:ring-2 focus:ring-violet-600 focus:ring-offset-1"
href={`${startupConfig.serverDomain}/oauth/google`}>
href={`${startupConfig.serverDomain}/oauth/google`}
>
<GoogleIcon />
<p>{localize(lang, 'com_auth_google_login')}</p>
</a>
@@ -87,12 +89,12 @@ function Login() {
)}
{startupConfig?.githubLoginEnabled && startupConfig?.socialLoginEnabled && (
<>
<div className="mt-2 flex gap-x-2">
<a
aria-label="Login with GitHub"
className="justify-left flex w-full items-center space-x-3 rounded-md border border-gray-300 px-5 py-3 hover:bg-gray-50 focus:ring-2 focus:ring-violet-600 focus:ring-offset-1"
href={`${startupConfig.serverDomain}/oauth/github`}>
href={`${startupConfig.serverDomain}/oauth/github`}
>
<GithubIcon />
<p>{localize(lang, 'com_auth_github_login')}</p>
</a>
@@ -101,12 +103,12 @@ function Login() {
)}
{startupConfig?.discordLoginEnabled && startupConfig?.socialLoginEnabled && (
<>
<div className="mt-2 flex gap-x-2">
<a
aria-label="Login with Discord"
className="justify-left flex w-full items-center space-x-3 rounded-md border border-gray-300 px-5 py-3 hover:bg-gray-50 focus:ring-2 focus:ring-violet-600 focus:ring-offset-1"
href={`${startupConfig.serverDomain}/oauth/discord`}>
href={`${startupConfig.serverDomain}/oauth/discord`}
>
<DiscordIcon />
<p>{localize(lang, 'com_auth_discord_login')}</p>
</a>
@@ -116,6 +118,6 @@ function Login() {
</div>
</div>
);
};
}
export default Login;

View File

@@ -2,7 +2,7 @@ import { useForm } from 'react-hook-form';
import { useRecoilValue } from 'recoil';
import store from '~/store';
import { localize } from '~/localization/Translation';
import { TLoginUser } from '@librechat/data-provider';
import { TLoginUser } from 'librechat-data-provider';
type TLoginFormProps = {
onSubmit: (data: TLoginUser) => void;

Some files were not shown because too many files have changed in this diff Show More