From fd70e2173218b5fea61bd9a064c1aa6b2c94549a Mon Sep 17 00:00:00 2001 From: Danny Avila <110412045+danny-avila@users.noreply.github.com> Date: Mon, 18 Sep 2023 12:55:51 -0400 Subject: [PATCH] =?UTF-8?q?feat:=20OpenRouter=20Support=20&=20Improve=20Mo?= =?UTF-8?q?del=20Fetching=20=E2=87=86=20(#936)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(ChatGPTClient.js): add support for OpenRouter API chore(OpenAIClient.js): add support for OpenRouter API * chore: comment out token debugging * chore: add back streamResult assignment * chore: remove double condition/assignment from merging * refactor(routes/endpoints): -> controller/services logic * feat: add openrouter model fetching * chore: remove unused endpointsConfig in cleanupPreset function * refactor: separate models concern from endpointsConfig * refactor(data-provider): add TModels type and make TEndpointsConfig adaptible to new endpoint keys * refactor: complete models endpoint service in data-provider * refactor: onMutate for refreshToken and login, invalidate models query * feat: complete models endpoint logic for frontend * chore: remove requireJwtAuth from /api/endpoints and /api/models as not implemented yet * fix: endpoint will not be overwritten and instead use active value * feat: openrouter support for plugins * chore(EndpointOptionsDialog): remove unused recoil value * refactor(schemas/parseConvo): add handling of secondaryModels to use first of defined secondary models, which includes last selected one as first, or default to the convo's secondary model value * refactor: remove hooks from store and move to hooks refactor(switchToConversation): make switchToConversation use latest recoil state, which is necessary to get the most up-to-date models list, replace wrapper function refactor(getDefaultConversation): factor out logic into 3 pieces to reduce complexity. * fix: backend tests * feat: optimistic update by calling newConvo when models are fetched * feat: openrouter support for titling convos * feat: cache models fetch * chore: add missing dep to AuthContext useEffect * chore: fix useTimeout types * chore: delete old getDefaultConvo file * chore: remove newConvo logic from Root, remove console log from api models caching * chore: ensure bun is used for building in b:client script * fix: default endpoint will not default to null on a completely fresh login (no localStorage/cookies) * chore: add openrouter docs to free_ai_apis.md and .env.example * chore: remove openrouter console logs * feat: add debugging env variable for Plugins --- .env.example | 15 ++ api/app/clients/ChatGPTClient.js | 5 + api/app/clients/OpenAIClient.js | 38 +++- api/app/clients/PluginsClient.js | 30 ++- api/server/controllers/EndpointController.js | 60 ++++++ api/server/controllers/ModelController.js | 23 +++ api/server/index.js | 1 + api/server/routes/ask/askChatGPTBrowser.js | 4 - api/server/routes/endpoints.js | 187 +----------------- .../endpoints/gptPlugins/initializeClient.js | 13 +- api/server/routes/index.js | 4 +- api/server/routes/models.js | 7 + api/server/services/EndpointService.js | 40 ++++ api/server/services/ModelService.js | 127 ++++++++++++ .../components/Conversations/Conversation.jsx | 6 +- .../components/Conversations/DeleteButton.jsx | 6 +- .../components/Endpoints/EditPresetDialog.tsx | 5 +- .../Endpoints/EndpointOptionsDialog.tsx | 5 +- .../components/Endpoints/EndpointSettings.tsx | 4 +- .../Endpoints/SaveAsPresetDialog.tsx | 4 - .../Input/EndpointMenu/EndpointMenu.jsx | 30 ++- .../Input/ModelSelect/ModelSelect.tsx | 4 +- client/src/components/Messages/Message.tsx | 4 +- client/src/components/Nav/ClearConvos.tsx | 7 +- .../Nav/ExportConversation/ExportModel.jsx | 7 +- client/src/components/Nav/MobileNav.jsx | 4 +- client/src/components/Nav/Nav.tsx | 13 +- client/src/components/Nav/NewChat.jsx | 5 +- .../components/Nav/SettingsTabs/General.tsx | 13 +- client/src/hooks/AuthContext.tsx | 2 +- client/src/hooks/index.ts | 3 + client/src/hooks/useConversation.ts | 85 ++++++++ client/src/hooks/useConversations.ts | 15 ++ client/src/hooks/useDefaultConvo.ts | 30 +++ client/src/hooks/useMessageHandler.ts | 6 +- client/src/hooks/usePresetOptions.ts | 6 - client/src/hooks/useServerStream.ts | 6 +- client/src/hooks/useTimeout.tsx | 4 +- client/src/routes/Chat.tsx | 16 +- client/src/routes/Root.tsx | 15 +- client/src/routes/Search.tsx | 3 +- client/src/store/conversation.ts | 106 +--------- client/src/store/conversations.ts | 17 +- client/src/store/index.ts | 2 + client/src/store/models.ts | 34 ++++ client/src/utils/buildDefaultConvo.ts | 64 ++++++ client/src/utils/cleanupPreset.ts | 5 +- client/src/utils/getDefaultConversation.ts | 96 --------- client/src/utils/getDefaultEndpoint.ts | 54 +++++ client/src/utils/index.ts | 3 +- docs/install/free_ai_apis.md | 30 ++- package.json | 2 +- packages/data-provider/src/api-endpoints.ts | 2 + packages/data-provider/src/data-service.ts | 6 + .../data-provider/src/react-query-service.ts | 19 +- packages/data-provider/src/request.ts | 1 + packages/data-provider/src/schemas.ts | 14 +- packages/data-provider/src/types.ts | 15 +- 58 files changed, 809 insertions(+), 523 deletions(-) create mode 100644 api/server/controllers/EndpointController.js create mode 100644 api/server/controllers/ModelController.js create mode 100644 api/server/routes/models.js create mode 100644 api/server/services/EndpointService.js create mode 100644 api/server/services/ModelService.js create mode 100644 client/src/hooks/useConversation.ts create mode 100644 client/src/hooks/useConversations.ts create mode 100644 client/src/hooks/useDefaultConvo.ts create mode 100644 client/src/store/models.ts create mode 100644 client/src/utils/buildDefaultConvo.ts delete mode 100644 client/src/utils/getDefaultConversation.ts create mode 100644 client/src/utils/getDefaultEndpoint.ts diff --git a/.env.example b/.env.example index 9110a8f69..17a8e1f20 100644 --- a/.env.example +++ b/.env.example @@ -77,6 +77,19 @@ OPENAI_API_KEY=user_provided # https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy # OPENAI_REVERSE_PROXY= +########################## +# OpenRouter (overrides OpenAI and Plugins Endpoints): +########################## + +# OpenRouter is a legitimate proxy service to a multitude of LLMs, both closed and open source, including: +# OpenAI models, Anthropic models, Meta's Llama models, pygmalionai/mythalion-13b +# and many more open source models. Newer integrations are usually discounted, too! + +# Note: this overrides the OpenAI and Plugins Endpoints. +# See ./docs/install/free_ai_apis.md for more info. + +# OPENROUTER_API_KEY= + ########################## # AZURE Endpoint: ########################## @@ -156,6 +169,8 @@ BINGAI_TOKEN=user_provided # Leave it blank to use internal settings. # PLUGIN_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-16k,gpt-3.5-turbo-0301,gpt-4,gpt-4-0314,gpt-4-0613 +DEBUG_PLUGINS=true # Set to false or comment out to disable debug mode for plugins + # For securely storing credentials, you need a fixed key and IV. You can set them here for prod and dev environments # If you don't set them, the app will crash on startup. # You need a 32-byte key (64 characters in hex) and 16-byte IV (32 characters in hex) diff --git a/api/app/clients/ChatGPTClient.js b/api/app/clients/ChatGPTClient.js index 5fa6344d2..b7c541085 100644 --- a/api/app/clients/ChatGPTClient.js +++ b/api/app/clients/ChatGPTClient.js @@ -179,6 +179,11 @@ class ChatGPTClient extends BaseClient { opts.headers.Authorization = `Bearer ${this.apiKey}`; } + if (this.useOpenRouter) { + opts.headers['HTTP-Referer'] = 'https://librechat.ai'; + opts.headers['X-Title'] = 'LibreChat'; + } + if (this.options.headers) { opts.headers = { ...opts.headers, ...this.options.headers }; } diff --git a/api/app/clients/OpenAIClient.js b/api/app/clients/OpenAIClient.js index 0fbb0339e..9a6c61a3a 100644 --- a/api/app/clients/OpenAIClient.js +++ b/api/app/clients/OpenAIClient.js @@ -61,7 +61,13 @@ class OpenAIClient extends BaseClient { }; } + if (process.env.OPENROUTER_API_KEY) { + this.apiKey = process.env.OPENROUTER_API_KEY; + this.useOpenRouter = true; + } + this.isChatCompletion = + this.useOpenRouter || this.options.reverseProxyUrl || this.options.localAI || this.modelOptions.model.startsWith('gpt-'); @@ -119,6 +125,10 @@ class OpenAIClient extends BaseClient { console.debug('Using Azure endpoint'); } + if (this.useOpenRouter) { + this.completionsUrl = 'https://openrouter.ai/api/v1/chat/completions'; + } + return this; } @@ -324,12 +334,24 @@ class OpenAIClient extends BaseClient { return; } + if (this.options.debug) { + // console.debug('progressMessage'); + // console.dir(progressMessage, { depth: null }); + } + if (progressMessage.choices) { streamResult = progressMessage; } - const token = this.isChatCompletion - ? progressMessage.choices?.[0]?.delta?.content - : progressMessage.choices?.[0]?.text; + + let token = null; + if (this.isChatCompletion) { + token = + progressMessage.choices?.[0]?.delta?.content ?? progressMessage.choices?.[0]?.text; + } + + if (!token && this.useOpenRouter) { + token = progressMessage.choices?.[0]?.message?.content; + } // first event's delta content is always undefined if (!token) { return; @@ -396,6 +418,16 @@ class OpenAIClient extends BaseClient { configOptions.basePath = this.langchainProxy; } + if (this.useOpenRouter) { + configOptions.basePath = 'https://openrouter.ai/api/v1'; + configOptions.baseOptions = { + headers: { + 'HTTP-Referer': 'https://librechat.ai', + 'X-Title': 'LibreChat', + }, + }; + } + try { const llm = createLLM({ modelOptions, diff --git a/api/app/clients/PluginsClient.js b/api/app/clients/PluginsClient.js index 9b939d052..b8a4759f5 100644 --- a/api/app/clients/PluginsClient.js +++ b/api/app/clients/PluginsClient.js @@ -13,25 +13,27 @@ class PluginsClient extends OpenAIClient { this.sender = options.sender ?? 'Assistant'; this.tools = []; this.actions = []; - this.openAIApiKey = apiKey; this.setOptions(options); + this.openAIApiKey = this.apiKey; this.executor = null; } setOptions(options) { - this.agentOptions = options.agentOptions; + this.agentOptions = { ...options.agentOptions }; this.functionsAgent = this.agentOptions?.agent === 'functions'; - this.agentIsGpt3 = this.agentOptions?.model.startsWith('gpt-3'); - if (this.functionsAgent && this.agentOptions.model) { + this.agentIsGpt3 = this.agentOptions?.model?.includes('gpt-3'); + + super.setOptions(options); + + if (this.functionsAgent && this.agentOptions.model && !this.useOpenRouter) { this.agentOptions.model = this.getFunctionModelName(this.agentOptions.model); } - super.setOptions(options); - this.isGpt3 = this.modelOptions.model.startsWith('gpt-3'); + this.isGpt3 = this.modelOptions?.model?.includes('gpt-3'); - // if (this.options.reverseProxyUrl) { - // this.langchainProxy = this.options.reverseProxyUrl.match(/.*v1/)[0]; - // } + if (this.options.reverseProxyUrl) { + this.langchainProxy = this.options.reverseProxyUrl.match(/.*v1/)[0]; + } } getSaveOptions() { @@ -77,6 +79,16 @@ class PluginsClient extends OpenAIClient { configOptions.basePath = this.langchainProxy; } + if (this.useOpenRouter) { + configOptions.basePath = 'https://openrouter.ai/api/v1'; + configOptions.baseOptions = { + headers: { + 'HTTP-Referer': 'https://librechat.ai', + 'X-Title': 'LibreChat', + }, + }; + } + const model = createLLM({ modelOptions, configOptions, diff --git a/api/server/controllers/EndpointController.js b/api/server/controllers/EndpointController.js new file mode 100644 index 000000000..ff4c8c978 --- /dev/null +++ b/api/server/controllers/EndpointController.js @@ -0,0 +1,60 @@ +const { availableTools } = require('../../app/clients/tools'); +const { addOpenAPISpecs } = require('../../app/clients/tools/util/addOpenAPISpecs'); +const { + openAIApiKey, + azureOpenAIApiKey, + useAzurePlugins, + userProvidedOpenAI, + palmKey, + openAI, + azureOpenAI, + bingAI, + chatGPTBrowser, + anthropic, +} = require('../services/EndpointService').config; + +let i = 0; +async function endpointController(req, res) { + let key, palmUser; + try { + key = require('../../data/auth.json'); + } catch (e) { + if (i === 0) { + i++; + } + } + + if (palmKey === 'user_provided') { + palmUser = true; + if (i <= 1) { + i++; + } + } + + const tools = await addOpenAPISpecs(availableTools); + function transformToolsToMap(tools) { + return tools.reduce((map, obj) => { + map[obj.pluginKey] = obj.name; + return map; + }, {}); + } + const plugins = transformToolsToMap(tools); + + const google = key || palmUser ? { userProvide: palmUser } : false; + + const gptPlugins = + openAIApiKey || azureOpenAIApiKey + ? { + plugins, + availableAgents: ['classic', 'functions'], + userProvide: userProvidedOpenAI, + azure: useAzurePlugins, + } + : false; + + res.send( + JSON.stringify({ azureOpenAI, openAI, google, bingAI, chatGPTBrowser, gptPlugins, anthropic }), + ); +} + +module.exports = endpointController; diff --git a/api/server/controllers/ModelController.js b/api/server/controllers/ModelController.js new file mode 100644 index 000000000..1c1b9b9e8 --- /dev/null +++ b/api/server/controllers/ModelController.js @@ -0,0 +1,23 @@ +const { + getOpenAIModels, + getChatGPTBrowserModels, + getAnthropicModels, +} = require('../services/ModelService'); + +const { useAzurePlugins } = require('../services/EndpointService').config; + +async function modelController(req, res) { + const google = ['chat-bison', 'text-bison', 'codechat-bison']; + const openAI = await getOpenAIModels(); + const azureOpenAI = await getOpenAIModels({ azure: true }); + const gptPlugins = await getOpenAIModels({ azure: useAzurePlugins, plugins: true }); + const bingAI = ['BingAI', 'Sydney']; + const chatGPTBrowser = getChatGPTBrowserModels(); + const anthropic = getAnthropicModels(); + + res.send( + JSON.stringify({ azureOpenAI, openAI, google, bingAI, chatGPTBrowser, gptPlugins, anthropic }), + ); +} + +module.exports = modelController; diff --git a/api/server/index.js b/api/server/index.js index 496f0ac42..f7d6cbdd0 100644 --- a/api/server/index.js +++ b/api/server/index.js @@ -60,6 +60,7 @@ const startServer = async () => { app.use('/api/prompts', routes.prompts); app.use('/api/tokenizer', routes.tokenizer); app.use('/api/endpoints', routes.endpoints); + app.use('/api/models', routes.models); app.use('/api/plugins', routes.plugins); app.use('/api/config', routes.config); diff --git a/api/server/routes/ask/askChatGPTBrowser.js b/api/server/routes/ask/askChatGPTBrowser.js index 1c916265c..c3a9d56f1 100644 --- a/api/server/routes/ask/askChatGPTBrowser.js +++ b/api/server/routes/ask/askChatGPTBrowser.js @@ -41,10 +41,6 @@ router.post('/', setHeaders, async (req, res) => { key: req.body?.key ?? null, }; - // const availableModels = getChatGPTBrowserModels(); - // if (availableModels.find((model) => model === endpointOption.model) === undefined) - // return handleError(res, { text: 'Illegal request: model' }); - console.log('ask log', { userMessage, endpointOption, diff --git a/api/server/routes/endpoints.js b/api/server/routes/endpoints.js index dc5533c8b..a75c2e2f9 100644 --- a/api/server/routes/endpoints.js +++ b/api/server/routes/endpoints.js @@ -1,188 +1,7 @@ -const axios = require('axios'); const express = require('express'); const router = express.Router(); -const { availableTools } = require('../../app/clients/tools'); -const { addOpenAPISpecs } = require('../../app/clients/tools/util/addOpenAPISpecs'); -// const { getAzureCredentials, genAzureChatCompletion } = require('../../utils/'); +const endpointController = require('../controllers/EndpointController'); -const openAIApiKey = process.env.OPENAI_API_KEY; -const azureOpenAIApiKey = process.env.AZURE_API_KEY; -const useAzurePlugins = !!process.env.PLUGINS_USE_AZURE; -const userProvidedOpenAI = useAzurePlugins - ? azureOpenAIApiKey === 'user_provided' - : openAIApiKey === 'user_provided'; +router.get('/', endpointController); -const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _models = []) => { - let models = _models.slice() ?? []; - let apiKey = openAIApiKey; - let basePath = 'https://api.openai.com/v1'; - if (opts.azure) { - return models; - // const azure = getAzureCredentials(); - // basePath = (genAzureChatCompletion(azure)) - // .split('/deployments')[0] - // .concat(`/models?api-version=${azure.azureOpenAIApiVersion}`); - // apiKey = azureOpenAIApiKey; - } - - const reverseProxyUrl = process.env.OPENAI_REVERSE_PROXY; - if (reverseProxyUrl) { - basePath = reverseProxyUrl.match(/.*v1/)[0]; - } - - if (basePath.includes('v1') || opts.azure) { - try { - const res = await axios.get(`${basePath}${opts.azure ? '' : '/models'}`, { - headers: { - Authorization: `Bearer ${apiKey}`, - }, - }); - - models = res.data.data.map((item) => item.id); - // console.log(`Fetched ${models.length} models from ${opts.azure ? 'Azure ' : ''}OpenAI API`); - } catch (err) { - console.log(`Failed to fetch models from ${opts.azure ? 'Azure ' : ''}OpenAI API`); - } - } - - if (!reverseProxyUrl) { - const regex = /(text-davinci-003|gpt-)/; - models = models.filter((model) => regex.test(model)); - } - return models; -}; - -const getOpenAIModels = async (opts = { azure: false, plugins: false }) => { - let models = [ - 'gpt-4', - 'gpt-4-0613', - 'gpt-3.5-turbo', - 'gpt-3.5-turbo-16k', - 'gpt-3.5-turbo-0613', - 'gpt-3.5-turbo-0301', - ]; - - if (!opts.plugins) { - models.push('text-davinci-003'); - } - - let key; - if (opts.azure) { - key = 'AZURE_OPENAI_MODELS'; - } else if (opts.plugins) { - key = 'PLUGIN_MODELS'; - } else { - key = 'OPENAI_MODELS'; - } - - if (process.env[key]) { - models = String(process.env[key]).split(','); - return models; - } - - if (userProvidedOpenAI) { - return models; - } - - models = await fetchOpenAIModels(opts, models); - return models; -}; - -const getChatGPTBrowserModels = () => { - let models = ['text-davinci-002-render-sha', 'gpt-4']; - if (process.env.CHATGPT_MODELS) { - models = String(process.env.CHATGPT_MODELS).split(','); - } - - return models; -}; -const getAnthropicModels = () => { - let models = [ - 'claude-1', - 'claude-1-100k', - 'claude-instant-1', - 'claude-instant-1-100k', - 'claude-2', - ]; - if (process.env.ANTHROPIC_MODELS) { - models = String(process.env.ANTHROPIC_MODELS).split(','); - } - - return models; -}; - -let i = 0; -router.get('/', async function (req, res) { - let key, palmUser; - try { - key = require('../../data/auth.json'); - } catch (e) { - if (i === 0) { - i++; - } - } - - if (process.env.PALM_KEY === 'user_provided') { - palmUser = true; - if (i <= 1) { - i++; - } - } - - const tools = await addOpenAPISpecs(availableTools); - function transformToolsToMap(tools) { - return tools.reduce((map, obj) => { - map[obj.pluginKey] = obj.name; - return map; - }, {}); - } - const plugins = transformToolsToMap(tools); - - const google = - key || palmUser - ? { userProvide: palmUser, availableModels: ['chat-bison', 'text-bison', 'codechat-bison'] } - : false; - const openAI = openAIApiKey - ? { availableModels: await getOpenAIModels(), userProvide: openAIApiKey === 'user_provided' } - : false; - const azureOpenAI = azureOpenAIApiKey - ? { - availableModels: await getOpenAIModels({ azure: true }), - userProvide: azureOpenAIApiKey === 'user_provided', - } - : false; - const gptPlugins = - openAIApiKey || azureOpenAIApiKey - ? { - availableModels: await getOpenAIModels({ azure: useAzurePlugins, plugins: true }), - plugins, - availableAgents: ['classic', 'functions'], - userProvide: userProvidedOpenAI, - azure: useAzurePlugins, - } - : false; - const bingAI = process.env.BINGAI_TOKEN - ? { - availableModels: ['BingAI', 'Sydney'], - userProvide: process.env.BINGAI_TOKEN == 'user_provided', - } - : false; - const chatGPTBrowser = process.env.CHATGPT_TOKEN - ? { - userProvide: process.env.CHATGPT_TOKEN == 'user_provided', - availableModels: getChatGPTBrowserModels(), - } - : false; - const anthropic = process.env.ANTHROPIC_API_KEY - ? { - userProvide: process.env.ANTHROPIC_API_KEY == 'user_provided', - availableModels: getAnthropicModels(), - } - : false; - - res.send( - JSON.stringify({ azureOpenAI, openAI, google, bingAI, chatGPTBrowser, gptPlugins, anthropic }), - ); -}); - -module.exports = { router, getOpenAIModels, getChatGPTBrowserModels }; +module.exports = router; diff --git a/api/server/routes/endpoints/gptPlugins/initializeClient.js b/api/server/routes/endpoints/gptPlugins/initializeClient.js index 428f612a0..cf5af8c82 100644 --- a/api/server/routes/endpoints/gptPlugins/initializeClient.js +++ b/api/server/routes/endpoints/gptPlugins/initializeClient.js @@ -1,13 +1,20 @@ const { PluginsClient } = require('../../../../app'); +const { isEnabled } = require('../../../utils'); const { getAzureCredentials } = require('../../../../utils'); const { getUserKey, checkUserKeyExpiry } = require('../../../services/UserService'); const initializeClient = async (req, endpointOption) => { - const { PROXY, OPENAI_API_KEY, AZURE_API_KEY, PLUGINS_USE_AZURE, OPENAI_REVERSE_PROXY } = - process.env; + const { + PROXY, + OPENAI_API_KEY, + AZURE_API_KEY, + PLUGINS_USE_AZURE, + OPENAI_REVERSE_PROXY, + DEBUG_PLUGINS, + } = process.env; const { key: expiresAt } = req.body; const clientOptions = { - // debug: true, + debug: isEnabled(DEBUG_PLUGINS), reverseProxyUrl: OPENAI_REVERSE_PROXY ?? null, proxy: PROXY ?? null, ...endpointOption, diff --git a/api/server/routes/index.js b/api/server/routes/index.js index 2d1315839..b7a267b7c 100644 --- a/api/server/routes/index.js +++ b/api/server/routes/index.js @@ -9,7 +9,8 @@ const tokenizer = require('./tokenizer'); const auth = require('./auth'); const keys = require('./keys'); const oauth = require('./oauth'); -const { router: endpoints } = require('./endpoints'); +const endpoints = require('./endpoints'); +const models = require('./models'); const plugins = require('./plugins'); const user = require('./user'); const config = require('./config'); @@ -28,6 +29,7 @@ module.exports = { user, tokenizer, endpoints, + models, plugins, config, }; diff --git a/api/server/routes/models.js b/api/server/routes/models.js new file mode 100644 index 000000000..196bd5f11 --- /dev/null +++ b/api/server/routes/models.js @@ -0,0 +1,7 @@ +const express = require('express'); +const router = express.Router(); +const modelController = require('../controllers/ModelController'); + +router.get('/', modelController); + +module.exports = router; diff --git a/api/server/services/EndpointService.js b/api/server/services/EndpointService.js new file mode 100644 index 000000000..67c669a70 --- /dev/null +++ b/api/server/services/EndpointService.js @@ -0,0 +1,40 @@ +const { + OPENAI_API_KEY: openAIApiKey, + AZURE_API_KEY: azureOpenAIApiKey, + ANTHROPIC_API_KEY: anthropicApiKey, + CHATGPT_TOKEN: chatGPTToken, + BINGAI_TOKEN: bingToken, + PLUGINS_USE_AZURE, + PALM_KEY: palmKey, +} = process.env ?? {}; + +const useAzurePlugins = !!PLUGINS_USE_AZURE; + +const userProvidedOpenAI = useAzurePlugins + ? azureOpenAIApiKey === 'user_provided' + : openAIApiKey === 'user_provided'; + +function isUserProvided(key) { + return key ? { userProvide: key === 'user_provided' } : false; +} + +const openAI = isUserProvided(openAIApiKey); +const azureOpenAI = isUserProvided(azureOpenAIApiKey); +const bingAI = isUserProvided(bingToken); +const chatGPTBrowser = isUserProvided(chatGPTToken); +const anthropic = isUserProvided(anthropicApiKey); + +module.exports = { + config: { + openAIApiKey, + azureOpenAIApiKey, + useAzurePlugins, + userProvidedOpenAI, + palmKey, + openAI, + azureOpenAI, + chatGPTBrowser, + anthropic, + bingAI, + }, +}; diff --git a/api/server/services/ModelService.js b/api/server/services/ModelService.js new file mode 100644 index 000000000..a91be3510 --- /dev/null +++ b/api/server/services/ModelService.js @@ -0,0 +1,127 @@ +const Keyv = require('keyv'); +const axios = require('axios'); +// const { getAzureCredentials, genAzureChatCompletion } = require('../../utils/'); +const { openAIApiKey, userProvidedOpenAI } = require('./EndpointService').config; + +const modelsCache = new Keyv({ namespace: 'models' }); + +const { OPENROUTER_API_KEY, OPENAI_REVERSE_PROXY, CHATGPT_MODELS, ANTHROPIC_MODELS } = + process.env ?? {}; + +const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _models = []) => { + let models = _models.slice() ?? []; + let apiKey = openAIApiKey; + let basePath = 'https://api.openai.com/v1'; + if (opts.azure) { + return models; + // const azure = getAzureCredentials(); + // basePath = (genAzureChatCompletion(azure)) + // .split('/deployments')[0] + // .concat(`/models?api-version=${azure.azureOpenAIApiVersion}`); + // apiKey = azureOpenAIApiKey; + } + + let reverseProxyUrl = OPENAI_REVERSE_PROXY; + + if (OPENROUTER_API_KEY) { + reverseProxyUrl = 'https://openrouter.ai/api/v1'; + } + + if (reverseProxyUrl) { + basePath = reverseProxyUrl.match(/.*v1/)[0]; + } + + const cachedModels = await modelsCache.get(basePath); + if (cachedModels) { + return cachedModels; + } + + if (basePath.includes('v1') || opts.azure) { + try { + const res = await axios.get(`${basePath}${opts.azure ? '' : '/models'}`, { + headers: { + Authorization: `Bearer ${apiKey}`, + }, + }); + + models = res.data.data.map((item) => item.id); + // console.log(`Fetched ${models.length} models from ${opts.azure ? 'Azure ' : ''}OpenAI API`); + } catch (err) { + console.log(`Failed to fetch models from ${opts.azure ? 'Azure ' : ''}OpenAI API`); + } + } + + if (!reverseProxyUrl) { + const regex = /(text-davinci-003|gpt-)/; + models = models.filter((model) => regex.test(model)); + } + + await modelsCache.set(basePath, models); + return models; +}; + +const getOpenAIModels = async (opts = { azure: false, plugins: false }) => { + let models = [ + 'gpt-4', + 'gpt-4-0613', + 'gpt-3.5-turbo', + 'gpt-3.5-turbo-16k', + 'gpt-3.5-turbo-0613', + 'gpt-3.5-turbo-0301', + ]; + + if (!opts.plugins) { + models.push('text-davinci-003'); + } + + let key; + if (opts.azure) { + key = 'AZURE_OPENAI_MODELS'; + } else if (opts.plugins) { + key = 'PLUGIN_MODELS'; + } else { + key = 'OPENAI_MODELS'; + } + + if (process.env[key]) { + models = String(process.env[key]).split(','); + return models; + } + + if (userProvidedOpenAI) { + return models; + } + + models = await fetchOpenAIModels(opts, models); + return models; +}; + +const getChatGPTBrowserModels = () => { + let models = ['text-davinci-002-render-sha', 'gpt-4']; + if (CHATGPT_MODELS) { + models = String(CHATGPT_MODELS).split(','); + } + + return models; +}; + +const getAnthropicModels = () => { + let models = [ + 'claude-1', + 'claude-1-100k', + 'claude-instant-1', + 'claude-instant-1-100k', + 'claude-2', + ]; + if (ANTHROPIC_MODELS) { + models = String(ANTHROPIC_MODELS).split(','); + } + + return models; +}; + +module.exports = { + getOpenAIModels, + getChatGPTBrowserModels, + getAnthropicModels, +}; diff --git a/client/src/components/Conversations/Conversation.jsx b/client/src/components/Conversations/Conversation.jsx index af791c6ad..f301444c9 100644 --- a/client/src/components/Conversations/Conversation.jsx +++ b/client/src/components/Conversations/Conversation.jsx @@ -4,15 +4,15 @@ import { useUpdateConversationMutation } from 'librechat-data-provider'; import RenameButton from './RenameButton'; import DeleteButton from './DeleteButton'; import ConvoIcon from '../svg/ConvoIcon'; - +import { useConversations, useConversation } from '~/hooks'; import store from '~/store'; export default function Conversation({ conversation, retainView }) { const [currentConversation, setCurrentConversation] = useRecoilState(store.conversation); const setSubmission = useSetRecoilState(store.submission); - const { refreshConversations } = store.useConversations(); - const { switchToConversation } = store.useConversation(); + const { refreshConversations } = useConversations(); + const { switchToConversation } = useConversation(); const updateConvoMutation = useUpdateConversationMutation(currentConversation?.conversationId); diff --git a/client/src/components/Conversations/DeleteButton.jsx b/client/src/components/Conversations/DeleteButton.jsx index 282099bee..fec6a6d24 100644 --- a/client/src/components/Conversations/DeleteButton.jsx +++ b/client/src/components/Conversations/DeleteButton.jsx @@ -5,14 +5,14 @@ import { useRecoilValue } from 'recoil'; import { useDeleteConversationMutation } from 'librechat-data-provider'; import { Dialog, DialogTrigger, Label } from '~/components/ui/'; import DialogTemplate from '~/components/ui/DialogTemplate'; +import { useLocalize, useConversations, useConversation } from '~/hooks'; import store from '~/store'; -import { useLocalize } from '~/hooks'; export default function DeleteButton({ conversationId, renaming, retainView, title }) { const localize = useLocalize(); const currentConversation = useRecoilValue(store.conversation) || {}; - const { newConversation } = store.useConversation(); - const { refreshConversations } = store.useConversations(); + const { newConversation } = useConversation(); + const { refreshConversations } = useConversations(); const confirmDelete = () => { deleteConvoMutation.mutate({ conversationId, source: 'button' }); diff --git a/client/src/components/Endpoints/EditPresetDialog.tsx b/client/src/components/Endpoints/EditPresetDialog.tsx index 2e11d7851..ed87f981f 100644 --- a/client/src/components/Endpoints/EditPresetDialog.tsx +++ b/client/src/components/Endpoints/EditPresetDialog.tsx @@ -16,7 +16,6 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }: TEditP const [preset, setPreset] = useRecoilState(store.preset); const setPresets = useSetRecoilState(store.presets); const availableEndpoints = useRecoilValue(store.availableEndpoints); - const endpointsConfig = useRecoilValue(store.endpointsConfig); const { setOption } = useSetOptions(_preset); const localize = useLocalize(); @@ -27,7 +26,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }: TEditP axios({ method: 'post', url: '/api/presets', - data: cleanupPreset({ preset, endpointsConfig }), + data: cleanupPreset({ preset }), withCredentials: true, }).then((res) => { setPresets(res?.data); @@ -40,7 +39,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }: TEditP } const fileName = filenamify(preset?.title || 'preset'); exportFromJSON({ - data: cleanupPreset({ preset, endpointsConfig }), + data: cleanupPreset({ preset }), fileName, exportType: exportFromJSON.types.json, }); diff --git a/client/src/components/Endpoints/EndpointOptionsDialog.tsx b/client/src/components/Endpoints/EndpointOptionsDialog.tsx index 8182ec1f4..d25036ed1 100644 --- a/client/src/components/Endpoints/EndpointOptionsDialog.tsx +++ b/client/src/components/Endpoints/EndpointOptionsDialog.tsx @@ -1,6 +1,6 @@ import exportFromJSON from 'export-from-json'; import { useEffect, useState } from 'react'; -import { useRecoilValue, useRecoilState } from 'recoil'; +import { useRecoilState } from 'recoil'; import { tPresetSchema } from 'librechat-data-provider'; import type { TSetOption, TEditPresetProps } from '~/common'; import { Dialog, DialogButton } from '~/components/ui'; @@ -21,7 +21,6 @@ const EndpointOptionsDialog = ({ }: TEditPresetProps) => { const [preset, setPreset] = useRecoilState(store.preset); const [saveAsDialogShow, setSaveAsDialogShow] = useState(false); - const endpointsConfig = useRecoilValue(store.endpointsConfig); const localize = useLocalize(); const setOption: TSetOption = (param) => (newValue) => { @@ -44,7 +43,7 @@ const EndpointOptionsDialog = ({ return; } exportFromJSON({ - data: cleanupPreset({ preset, endpointsConfig }), + data: cleanupPreset({ preset }), fileName: `${preset?.title}.json`, exportType: exportFromJSON.types.json, }); diff --git a/client/src/components/Endpoints/EndpointSettings.tsx b/client/src/components/Endpoints/EndpointSettings.tsx index 68512d016..5ab452828 100644 --- a/client/src/components/Endpoints/EndpointSettings.tsx +++ b/client/src/components/Endpoints/EndpointSettings.tsx @@ -23,13 +23,13 @@ export default function Settings({ isPreset = false, className = '', }: TSettingsProps) { - const endpointsConfig = useRecoilValue(store.endpointsConfig); + const modelsConfig = useRecoilValue(store.modelsConfig); if (!conversation?.endpoint) { return null; } const { endpoint } = conversation; - const models = endpointsConfig?.[endpoint]?.['availableModels'] || []; + const models = modelsConfig?.[endpoint] ?? []; const OptionComponent = optionComponents[endpoint]; if (OptionComponent) { diff --git a/client/src/components/Endpoints/SaveAsPresetDialog.tsx b/client/src/components/Endpoints/SaveAsPresetDialog.tsx index 4f0b6c1a4..98bc8bed0 100644 --- a/client/src/components/Endpoints/SaveAsPresetDialog.tsx +++ b/client/src/components/Endpoints/SaveAsPresetDialog.tsx @@ -1,16 +1,13 @@ import React, { useEffect, useState } from 'react'; -import { useRecoilValue } from 'recoil'; import { useCreatePresetMutation } from 'librechat-data-provider'; import type { TEditPresetProps } from '~/common'; import { Dialog, Input, Label } from '~/components/ui/'; import DialogTemplate from '~/components/ui/DialogTemplate'; import { cn, defaultTextPropsLabel, removeFocusOutlines, cleanupPreset } from '~/utils/'; import { useLocalize } from '~/hooks'; -import store from '~/store'; const SaveAsPresetDialog = ({ open, onOpenChange, preset }: TEditPresetProps) => { const [title, setTitle] = useState(preset?.title || 'My Preset'); - const endpointsConfig = useRecoilValue(store.endpointsConfig); const createPresetMutation = useCreatePresetMutation(); const localize = useLocalize(); @@ -20,7 +17,6 @@ const SaveAsPresetDialog = ({ open, onOpenChange, preset }: TEditPresetProps) => ...preset, title, }, - endpointsConfig, }); createPresetMutation.mutate(_preset); }; diff --git a/client/src/components/Input/EndpointMenu/EndpointMenu.jsx b/client/src/components/Input/EndpointMenu/EndpointMenu.jsx index 3e4493308..b620be91e 100644 --- a/client/src/components/Input/EndpointMenu/EndpointMenu.jsx +++ b/client/src/components/Input/EndpointMenu/EndpointMenu.jsx @@ -23,12 +23,13 @@ import { TooltipContent, } from '~/components/ui/'; import DialogTemplate from '~/components/ui/DialogTemplate'; -import { cn, cleanupPreset, getDefaultConversation } from '~/utils'; -import { useLocalize, useLocalStorage } from '~/hooks'; +import { cn, cleanupPreset } from '~/utils'; +import { useLocalize, useLocalStorage, useConversation, useDefaultConvo } from '~/hooks'; import store from '~/store'; export default function NewConversationMenu() { const localize = useLocalize(); + const getDefaultConversation = useDefaultConvo(); const [menuOpen, setMenuOpen] = useState(false); const [showPresets, setShowPresets] = useState(true); const [showEndpoints, setShowEndpoints] = useState(true); @@ -37,12 +38,12 @@ export default function NewConversationMenu() { const [conversation, setConversation] = useRecoilState(store.conversation) ?? {}; const [messages, setMessages] = useRecoilState(store.messages); const availableEndpoints = useRecoilValue(store.availableEndpoints); - const endpointsConfig = useRecoilValue(store.endpointsConfig); + const [presets, setPresets] = useRecoilState(store.presets); const modularEndpoints = new Set(['gptPlugins', 'anthropic', 'google', 'openAI']); - const { endpoint, conversationId } = conversation; - const { newConversation } = store.useConversation(); + const { endpoint } = conversation; + const { newConversation } = useConversation(); const deletePresetsMutation = useDeletePresetMutation(); const createPresetMutation = useCreatePresetMutation(); @@ -62,19 +63,10 @@ export default function NewConversationMenu() { }; const onFileSelected = (jsonData) => { - const jsonPreset = { ...cleanupPreset({ preset: jsonData, endpointsConfig }), presetId: null }; + const jsonPreset = { ...cleanupPreset({ preset: jsonData }), presetId: null }; importPreset(jsonPreset); }; - // update the default model when availableModels changes - // typically, availableModels changes => modelsFilter or customGPTModels changes - useEffect(() => { - const isInvalidConversation = !availableEndpoints.find((e) => e === endpoint); - if (conversationId == 'new' && isInvalidConversation) { - newConversation(); - } - }, [availableEndpoints]); - // save states to localStorage const [newUser, setNewUser] = useLocalStorage('newUser', true); const [lastModel, setLastModel] = useLocalStorage('lastSelectedModel', {}); @@ -82,7 +74,12 @@ export default function NewConversationMenu() { const [lastBingSettings, setLastBingSettings] = useLocalStorage('lastBingSettings', {}); useEffect(() => { if (endpoint && endpoint !== 'bingAI') { - setLastModel({ ...lastModel, [endpoint]: conversation?.model }), setLastConvo(conversation); + const lastModelUpdate = { ...lastModel, [endpoint]: conversation?.model }; + if (endpoint === 'gptPlugins') { + lastModelUpdate.secondaryModel = conversation.agentOptions.model; + } + setLastModel(lastModelUpdate); + setLastConvo(conversation); } else if (endpoint === 'bingAI') { const { jailbreak, toneStyle } = conversation; setLastBingSettings({ ...lastBingSettings, jailbreak, toneStyle }); @@ -114,7 +111,6 @@ export default function NewConversationMenu() { ) { const currentConvo = getDefaultConversation({ conversation, - endpointsConfig, preset: newPreset, }); diff --git a/client/src/components/Input/ModelSelect/ModelSelect.tsx b/client/src/components/Input/ModelSelect/ModelSelect.tsx index a7f893cc8..87afa32fc 100644 --- a/client/src/components/Input/ModelSelect/ModelSelect.tsx +++ b/client/src/components/Input/ModelSelect/ModelSelect.tsx @@ -32,14 +32,14 @@ const optionComponents: { [key: string]: React.FC } = { }; export default function ModelSelect({ conversation, setOption }: TSelectProps) { - const endpointsConfig = useRecoilValue(store.endpointsConfig); + const modelsConfig = useRecoilValue(store.modelsConfig); if (!conversation?.endpoint) { return null; } const { endpoint } = conversation; const OptionComponent = optionComponents[endpoint]; - const models = endpointsConfig?.[endpoint]?.['availableModels'] ?? []; + const models = modelsConfig?.[endpoint] ?? []; if (!OptionComponent) { return null; diff --git a/client/src/components/Messages/Message.tsx b/client/src/components/Messages/Message.tsx index 0405132f8..f84ee5ade 100644 --- a/client/src/components/Messages/Message.tsx +++ b/client/src/components/Messages/Message.tsx @@ -9,7 +9,7 @@ import MultiMessage from './MultiMessage'; import HoverButtons from './HoverButtons'; import SiblingSwitch from './SiblingSwitch'; import { getIcon } from '~/components/Endpoints'; -import { useMessageHandler } from '~/hooks'; +import { useMessageHandler, useConversation } from '~/hooks'; import type { TMessageProps } from '~/common'; import { cn } from '~/utils'; import store from '~/store'; @@ -27,7 +27,7 @@ export default function Message({ const setLatestMessage = useSetRecoilState(store.latestMessage); const [abortScroll, setAbort] = useState(false); const { isSubmitting, ask, regenerate, handleContinue } = useMessageHandler(); - const { switchToConversation } = store.useConversation(); + const { switchToConversation } = useConversation(); const { text, children, diff --git a/client/src/components/Nav/ClearConvos.tsx b/client/src/components/Nav/ClearConvos.tsx index fa9844974..0d76d8bb9 100644 --- a/client/src/components/Nav/ClearConvos.tsx +++ b/client/src/components/Nav/ClearConvos.tsx @@ -3,12 +3,11 @@ import { Dialog } from '~/components/ui/'; import DialogTemplate from '~/components/ui/DialogTemplate'; import { ClearChatsButton } from './SettingsTabs/'; import { useClearConversationsMutation } from 'librechat-data-provider'; -import store from '~/store'; -import { useLocalize } from '~/hooks'; +import { useLocalize, useConversation, useConversations } from '~/hooks'; const ClearConvos = ({ open, onOpenChange }) => { - const { newConversation } = store.useConversation(); - const { refreshConversations } = store.useConversations(); + const { newConversation } = useConversation(); + const { refreshConversations } = useConversations(); const clearConvosMutation = useClearConversationsMutation(); const [confirmClear, setConfirmClear] = useState(false); const localize = useLocalize(); diff --git a/client/src/components/Nav/ExportConversation/ExportModel.jsx b/client/src/components/Nav/ExportConversation/ExportModel.jsx index b7628fdaa..fa85a4bd7 100644 --- a/client/src/components/Nav/ExportConversation/ExportModel.jsx +++ b/client/src/components/Nav/ExportConversation/ExportModel.jsx @@ -22,7 +22,6 @@ export default function ExportModel({ open, onOpenChange }) { const conversation = useRecoilValue(store.conversation) || {}; const messagesTree = useRecoilValue(store.messagesTree) || []; - const endpointsConfig = useRecoilValue(store.endpointsConfig); const getSiblingIdx = useRecoilCallback( ({ snapshot }) => @@ -197,7 +196,7 @@ export default function ExportModel({ open, onOpenChange }) { if (includeOptions) { data += '\n## Options\n'; - const options = cleanupPreset({ preset: conversation, endpointsConfig }); + const options = cleanupPreset({ preset: conversation }); for (const key of Object.keys(options)) { data += `- ${key}: ${options[key]}\n`; @@ -246,7 +245,7 @@ export default function ExportModel({ open, onOpenChange }) { if (includeOptions) { data += '\nOptions\n########################\n'; - const options = cleanupPreset({ preset: conversation, endpointsConfig }); + const options = cleanupPreset({ preset: conversation }); for (const key of Object.keys(options)) { data += `${key}: ${options[key]}\n`; @@ -295,7 +294,7 @@ export default function ExportModel({ open, onOpenChange }) { }; if (includeOptions) { - data.options = cleanupPreset({ preset: conversation, endpointsConfig }); + data.options = cleanupPreset({ preset: conversation }); } const messages = await buildMessageTree({ diff --git a/client/src/components/Nav/MobileNav.jsx b/client/src/components/Nav/MobileNav.jsx index be1d14c48..8c7650ad2 100644 --- a/client/src/components/Nav/MobileNav.jsx +++ b/client/src/components/Nav/MobileNav.jsx @@ -1,11 +1,11 @@ import React from 'react'; import { useRecoilValue } from 'recoil'; +import { useLocalize, useConversation } from '~/hooks'; import store from '~/store'; -import { useLocalize } from '~/hooks'; export default function MobileNav({ setNavVisible }) { const conversation = useRecoilValue(store.conversation); - const { newConversation } = store.useConversation(); + const { newConversation } = useConversation(); const { title = 'New Chat' } = conversation || {}; const localize = useLocalize(); diff --git a/client/src/components/Nav/Nav.tsx b/client/src/components/Nav/Nav.tsx index 00c7eab91..c51273d89 100644 --- a/client/src/components/Nav/Nav.tsx +++ b/client/src/components/Nav/Nav.tsx @@ -11,7 +11,14 @@ import SearchBar from './SearchBar'; import NavLinks from './NavLinks'; import { Panel, Spinner } from '~/components'; import { Conversations, Pages } from '../Conversations'; -import { useAuthContext, useDebounce, useMediaQuery, useLocalize } from '~/hooks'; +import { + useAuthContext, + useDebounce, + useMediaQuery, + useLocalize, + useConversation, + useConversations, +} from '~/hooks'; import { cn } from '~/utils/'; import store from '~/store'; @@ -47,14 +54,14 @@ export default function Nav({ navVisible, setNavVisible }) { const searchQuery = useRecoilValue(store.searchQuery); const isSearchEnabled = useRecoilValue(store.isSearchEnabled); const isSearching = useRecoilValue(store.isSearching); - const { newConversation, searchPlaceholderConversation } = store.useConversation(); + const { newConversation, searchPlaceholderConversation } = useConversation(); // current conversation const conversation = useRecoilValue(store.conversation); const { conversationId } = conversation || {}; const setSearchResultMessages = useSetRecoilState(store.searchResultMessages); const refreshConversationsHint = useRecoilValue(store.refreshConversationsHint); - const { refreshConversations } = store.useConversations(); + const { refreshConversations } = useConversations(); const [isFetching, setIsFetching] = useState(false); diff --git a/client/src/components/Nav/NewChat.jsx b/client/src/components/Nav/NewChat.jsx index 9a2f43025..20aa9d5ca 100644 --- a/client/src/components/Nav/NewChat.jsx +++ b/client/src/components/Nav/NewChat.jsx @@ -1,9 +1,8 @@ import React from 'react'; -import store from '~/store'; -import { useLocalize } from '~/hooks'; +import { useLocalize, useConversation } from '~/hooks'; export default function NewChat() { - const { newConversation } = store.useConversation(); + const { newConversation } = useConversation(); const localize = useLocalize(); const clickHandler = () => { diff --git a/client/src/components/Nav/SettingsTabs/General.tsx b/client/src/components/Nav/SettingsTabs/General.tsx index db336c60a..c97dceeb1 100644 --- a/client/src/components/Nav/SettingsTabs/General.tsx +++ b/client/src/components/Nav/SettingsTabs/General.tsx @@ -2,7 +2,13 @@ import { useRecoilState } from 'recoil'; import * as Tabs from '@radix-ui/react-tabs'; import React, { useState, useContext, useEffect, useCallback, useRef } from 'react'; import { useClearConversationsMutation } from 'librechat-data-provider'; -import { ThemeContext, useLocalize, useOnClickOutside } from '~/hooks'; +import { + ThemeContext, + useLocalize, + useOnClickOutside, + useConversation, + useConversations, +} from '~/hooks'; import type { TDangerButtonProps } from '~/common'; import DangerButton from './DangerButton'; import store from '~/store'; @@ -87,7 +93,6 @@ export const LangSelector = ({ - ); @@ -98,8 +103,8 @@ function General() { const clearConvosMutation = useClearConversationsMutation(); const [confirmClear, setConfirmClear] = useState(false); const [langcode, setLangcode] = useRecoilState(store.lang); - const { newConversation } = store.useConversation(); - const { refreshConversations } = store.useConversations(); + const { newConversation } = useConversation(); + const { refreshConversations } = useConversations(); const contentRef = useRef(null); useOnClickOutside(contentRef, () => confirmClear && setConfirmClear(false), []); diff --git a/client/src/hooks/AuthContext.tsx b/client/src/hooks/AuthContext.tsx index 6f67df8d9..9bbcd83db 100644 --- a/client/src/hooks/AuthContext.tsx +++ b/client/src/hooks/AuthContext.tsx @@ -95,7 +95,7 @@ const AuthContextProvider = ({ }); }, }); - }, [setUserContext, logoutUser]); + }, [setUserContext, doSetError, logoutUser]); const silentRefresh = useCallback(() => { refreshToken.mutate(undefined, { diff --git a/client/src/hooks/index.ts b/client/src/hooks/index.ts index 25f2755a9..ebc72fcbf 100644 --- a/client/src/hooks/index.ts +++ b/client/src/hooks/index.ts @@ -11,6 +11,9 @@ export { default as useSetOptions } from './useSetOptions'; export { default as useGenerations } from './useGenerations'; export { default as useScrollToRef } from './useScrollToRef'; export { default as useLocalStorage } from './useLocalStorage'; +export { default as useConversation } from './useConversation'; +export { default as useDefaultConvo } from './useDefaultConvo'; export { default as useServerStream } from './useServerStream'; +export { default as useConversations } from './useConversations'; export { default as useOnClickOutside } from './useOnClickOutside'; export { default as useMessageHandler } from './useMessageHandler'; diff --git a/client/src/hooks/useConversation.ts b/client/src/hooks/useConversation.ts new file mode 100644 index 000000000..e97fefc75 --- /dev/null +++ b/client/src/hooks/useConversation.ts @@ -0,0 +1,85 @@ +import { useCallback } from 'react'; +import { useSetRecoilState, useResetRecoilState, useRecoilCallback, useRecoilValue } from 'recoil'; +import { TConversation, TMessagesAtom, TSubmission, TPreset } from 'librechat-data-provider'; +import { buildDefaultConvo, getDefaultEndpoint } from '~/utils'; +import store from '~/store'; + +const useConversation = () => { + const setConversation = useSetRecoilState(store.conversation); + const setMessages = useSetRecoilState(store.messages); + const setSubmission = useSetRecoilState(store.submission); + const resetLatestMessage = useResetRecoilState(store.latestMessage); + const endpointsConfig = useRecoilValue(store.endpointsConfig); + + const switchToConversation = useRecoilCallback( + ({ snapshot }) => + async ( + conversation: TConversation, + messages: TMessagesAtom = null, + preset: TPreset | null = null, + ) => { + const modelsConfig = snapshot.getLoadable(store.modelsConfig).contents; + const { endpoint = null } = conversation; + + if (endpoint === null) { + const defaultEndpoint = getDefaultEndpoint({ + convoSetup: preset ?? conversation, + endpointsConfig, + }); + + const models = modelsConfig?.[defaultEndpoint] ?? []; + conversation = buildDefaultConvo({ + conversation, + lastConversationSetup: preset as TConversation, + endpoint: defaultEndpoint, + models, + }); + } + + setConversation(conversation); + setMessages(messages); + setSubmission({} as TSubmission); + resetLatestMessage(); + }, + [endpointsConfig], + ); + + const newConversation = useCallback( + (template = {}, preset?: TPreset) => { + switchToConversation( + { + conversationId: 'new', + title: 'New Chat', + ...template, + endpoint: null, + createdAt: '', + updatedAt: '', + }, + [], + preset, + ); + }, + [switchToConversation], + ); + + const searchPlaceholderConversation = useCallback(() => { + switchToConversation( + { + conversationId: 'search', + title: 'Search', + endpoint: null, + createdAt: '', + updatedAt: '', + }, + [], + ); + }, [switchToConversation]); + + return { + switchToConversation, + newConversation, + searchPlaceholderConversation, + }; +}; + +export default useConversation; diff --git a/client/src/hooks/useConversations.ts b/client/src/hooks/useConversations.ts new file mode 100644 index 000000000..1f74bf1c7 --- /dev/null +++ b/client/src/hooks/useConversations.ts @@ -0,0 +1,15 @@ +import { useSetRecoilState } from 'recoil'; +import { useCallback } from 'react'; +import store from '~/store'; + +const useConversations = () => { + const setRefreshConversationsHint = useSetRecoilState(store.refreshConversationsHint); + + const refreshConversations = useCallback(() => { + setRefreshConversationsHint((prevState) => prevState + 1); + }, [setRefreshConversationsHint]); + + return { refreshConversations }; +}; + +export default useConversations; diff --git a/client/src/hooks/useDefaultConvo.ts b/client/src/hooks/useDefaultConvo.ts new file mode 100644 index 000000000..c7ef1fd11 --- /dev/null +++ b/client/src/hooks/useDefaultConvo.ts @@ -0,0 +1,30 @@ +import { useRecoilValue } from 'recoil'; +import type { TConversation, TPreset } from 'librechat-data-provider'; +import { getDefaultEndpoint, buildDefaultConvo } from '~/utils'; +import store from '~/store'; + +type TDefaultConvo = { conversation: Partial; preset?: Partial | null }; + +const useDefaultConvo = () => { + const endpointsConfig = useRecoilValue(store.endpointsConfig); + const modelsConfig = useRecoilValue(store.modelsConfig); + + const getDefaultConversation = ({ conversation, preset }: TDefaultConvo) => { + const endpoint = getDefaultEndpoint({ + convoSetup: preset as TPreset, + endpointsConfig, + }); + const models = modelsConfig?.[endpoint] || []; + + return buildDefaultConvo({ + conversation: conversation as TConversation, + endpoint, + lastConversationSetup: preset as TConversation, + models, + }); + }; + + return getDefaultConversation; +}; + +export default useDefaultConvo; diff --git a/client/src/hooks/useMessageHandler.ts b/client/src/hooks/useMessageHandler.ts index 445c40540..d3915b929 100644 --- a/client/src/hooks/useMessageHandler.ts +++ b/client/src/hooks/useMessageHandler.ts @@ -1,7 +1,7 @@ import { v4 } from 'uuid'; import { useRecoilState, useRecoilValue, useSetRecoilState } from 'recoil'; import { parseConvo, getResponseSender } from 'librechat-data-provider'; -import type { TMessage, TSubmission } from 'librechat-data-provider'; +import type { TMessage, TSubmission, TEndpointOption } from 'librechat-data-provider'; import type { TAskFunction } from '~/common'; import useUserKey from './useUserKey'; import store from '~/store'; @@ -54,10 +54,10 @@ const useMessageHandler = () => { // set the endpoint option const convo = parseConvo(endpoint, currentConversation); const endpointOption = { - endpoint, ...convo, + endpoint, key: getExpiry(), - }; + } as TEndpointOption; const responseSender = getResponseSender(endpointOption); let currentMessages: TMessage[] | null = messages ?? []; diff --git a/client/src/hooks/usePresetOptions.ts b/client/src/hooks/usePresetOptions.ts index ffa8206d2..a52618553 100644 --- a/client/src/hooks/usePresetOptions.ts +++ b/client/src/hooks/usePresetOptions.ts @@ -23,7 +23,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => { ...prevState, ...update, }, - endpointsConfig, }), ); }; @@ -41,7 +40,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => { ...prevState, ...update, }, - endpointsConfig, }), ); }; @@ -57,7 +55,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => { ...prevState, ...update, }, - endpointsConfig, }), ); }; @@ -73,7 +70,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => { ...prevState, ...update, }, - endpointsConfig, }), ); return; @@ -86,7 +82,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => { ...prevState, ...update, }, - endpointsConfig, }), ); }; @@ -101,7 +96,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => { ...prevState, agentOptions, }, - endpointsConfig, }), ); }; diff --git a/client/src/hooks/useServerStream.ts b/client/src/hooks/useServerStream.ts index 054283376..c4dc33306 100644 --- a/client/src/hooks/useServerStream.ts +++ b/client/src/hooks/useServerStream.ts @@ -3,7 +3,9 @@ import { useResetRecoilState, useSetRecoilState } from 'recoil'; /* @ts-ignore */ import { SSE, createPayload, tMessageSchema, tConversationSchema } from 'librechat-data-provider'; import type { TResPlugin, TMessage, TConversation, TSubmission } from 'librechat-data-provider'; -import { useAuthContext } from '~/hooks/AuthContext'; +import useConversations from './useConversations'; +import { useAuthContext } from './AuthContext'; + import store from '~/store'; type TResData = { @@ -22,7 +24,7 @@ export default function useServerStream(submission: TSubmission | null) { const resetLatestMessage = useResetRecoilState(store.latestMessage); const { token } = useAuthContext(); - const { refreshConversations } = store.useConversations(); + const { refreshConversations } = useConversations(); const messageHandler = (data: string, submission: TSubmission) => { const { diff --git a/client/src/hooks/useTimeout.tsx b/client/src/hooks/useTimeout.tsx index e058e9ca8..2d325940e 100644 --- a/client/src/hooks/useTimeout.tsx +++ b/client/src/hooks/useTimeout.tsx @@ -2,14 +2,14 @@ import { useEffect, useRef } from 'react'; type TUseTimeoutParams = { callback: (error: string | number | boolean | null) => void; - delay?: number | undefined; + delay?: number; }; type TTimeout = ReturnType | null; function useTimeout({ callback, delay = 400 }: TUseTimeoutParams) { const timeout = useRef(null); - const callOnTimeout = (value: string | undefined) => { + const callOnTimeout = (value?: string) => { // Clear existing timeout if (timeout.current !== null) { clearTimeout(timeout.current); diff --git a/client/src/routes/Chat.tsx b/client/src/routes/Chat.tsx index cea4357ab..eb39c987d 100644 --- a/client/src/routes/Chat.tsx +++ b/client/src/routes/Chat.tsx @@ -1,19 +1,19 @@ import { useState, useEffect } from 'react'; -import { useAuthContext } from '~/hooks'; import { useNavigate, useParams } from 'react-router-dom'; import { useRecoilState, useRecoilValue, useSetRecoilState } from 'recoil'; - -import Landing from '~/components/ui/Landing'; -import Messages from '~/components/Messages/Messages'; -import TextChat from '~/components/Input/TextChat'; - -import store from '~/store'; import { useGetMessagesByConvoId, useGetConversationByIdMutation, useGetStartupConfig, } from 'librechat-data-provider'; +import Landing from '~/components/ui/Landing'; +import Messages from '~/components/Messages/Messages'; +import TextChat from '~/components/Input/TextChat'; + +import { useAuthContext, useConversation } from '~/hooks'; +import store from '~/store'; + export default function Chat() { const { isAuthenticated } = useAuthContext(); const [shouldNavigate, setShouldNavigate] = useState(true); @@ -22,7 +22,7 @@ export default function Chat() { const setMessages = useSetRecoilState(store.messages); const messagesTree = useRecoilValue(store.messagesTree); const isSubmitting = useRecoilValue(store.isSubmitting); - const { newConversation } = store.useConversation(); + const { newConversation } = useConversation(); const { conversationId } = useParams(); const navigate = useNavigate(); diff --git a/client/src/routes/Root.tsx b/client/src/routes/Root.tsx index 36b9ace6b..3dcf11ea6 100644 --- a/client/src/routes/Root.tsx +++ b/client/src/routes/Root.tsx @@ -4,6 +4,7 @@ import { useRecoilValue, useSetRecoilState } from 'recoil'; import { Outlet } from 'react-router-dom'; import { useGetEndpointsQuery, + useGetModelsQuery, useGetPresetsQuery, useGetSearchEnabledQuery, } from 'librechat-data-provider'; @@ -13,6 +14,7 @@ import { useAuthContext, useServerStream } from '~/hooks'; import store from '~/store'; export default function Root() { + const { user, isAuthenticated } = useAuthContext(); const [navVisible, setNavVisible] = useState(() => { const savedNavVisible = localStorage.getItem('navVisible'); return savedNavVisible !== null ? JSON.parse(savedNavVisible) : false; @@ -21,13 +23,14 @@ export default function Root() { const submission = useRecoilValue(store.submission); useServerStream(submission ?? null); + const setPresets = useSetRecoilState(store.presets); const setIsSearchEnabled = useSetRecoilState(store.isSearchEnabled); const setEndpointsConfig = useSetRecoilState(store.endpointsConfig); - const setPresets = useSetRecoilState(store.presets); - const { user, isAuthenticated } = useAuthContext(); + const setModelsConfig = useSetRecoilState(store.modelsConfig); const searchEnabledQuery = useGetSearchEnabledQuery(); const endpointsQuery = useGetEndpointsQuery(); + const modelsQuery = useGetModelsQuery(); const presetsQuery = useGetPresetsQuery({ enabled: !!user }); useEffect(() => { @@ -42,6 +45,14 @@ export default function Root() { } }, [endpointsQuery.data, endpointsQuery.isError]); + useEffect(() => { + if (modelsQuery.data) { + setModelsConfig(modelsQuery.data); + } else if (modelsQuery.isError) { + console.error('Failed to get models', modelsQuery.error); + } + }, [modelsQuery.data, modelsQuery.isError]); + useEffect(() => { if (presetsQuery.data) { setPresets(presetsQuery.data); diff --git a/client/src/routes/Search.tsx b/client/src/routes/Search.tsx index 95d7cc861..2e11e8e56 100644 --- a/client/src/routes/Search.tsx +++ b/client/src/routes/Search.tsx @@ -5,12 +5,13 @@ import { useRecoilState, useRecoilValue } from 'recoil'; import Messages from '~/components/Messages/Messages'; import TextChat from '~/components/Input/TextChat'; +import { useConversation } from '~/hooks'; import store from '~/store'; export default function Search() { const [searchQuery, setSearchQuery] = useRecoilState(store.searchQuery); const conversation = useRecoilValue(store.conversation); - const { searchPlaceholderConversation } = store.useConversation(); + const { searchPlaceholderConversation } = useConversation(); const { query } = useParams(); const navigate = useNavigate(); diff --git a/client/src/store/conversation.ts b/client/src/store/conversation.ts index f938d620d..028459e94 100644 --- a/client/src/store/conversation.ts +++ b/client/src/store/conversation.ts @@ -1,22 +1,6 @@ -import { useCallback } from 'react'; -import { - atom, - selector, - atomFamily, - useSetRecoilState, - useResetRecoilState, - useRecoilCallback, -} from 'recoil'; -import { - TConversation, - TMessagesAtom, - TMessage, - TSubmission, - TPreset, -} from 'librechat-data-provider'; -import { buildTree, getDefaultConversation } from '~/utils'; -import submission from './submission'; -import endpoints from './endpoints'; +import { atom, selector, atomFamily } from 'recoil'; +import { TConversation, TMessagesAtom, TMessage } from 'librechat-data-provider'; +import { buildTree } from '~/utils'; const conversation = atom({ key: 'conversation', @@ -48,94 +32,10 @@ const messagesSiblingIdxFamily = atomFamily({ default: 0, }); -const useConversation = () => { - const setConversation = useSetRecoilState(conversation); - const setMessages = useSetRecoilState(messages); - const setSubmission = useSetRecoilState(submission.submission); - const resetLatestMessage = useResetRecoilState(latestMessage); - - const _switchToConversation = ( - conversation: TConversation, - messages: TMessagesAtom = null, - preset: object | null = null, - { endpointsConfig = {} }, - ) => { - const { endpoint = null } = conversation; - - if (endpoint === null) { - // get the default model - conversation = getDefaultConversation({ - conversation, - endpointsConfig, - preset, - }); - } - - setConversation(conversation); - setMessages(messages); - setSubmission({} as TSubmission); - resetLatestMessage(); - }; - - const switchToConversation = useRecoilCallback( - ({ snapshot }) => - async ( - _conversation: TConversation, - messages: TMessagesAtom = null, - preset: object | null = null, - ) => { - const endpointsConfig = await snapshot.getPromise(endpoints.endpointsConfig); - _switchToConversation(_conversation, messages, preset, { - endpointsConfig, - }); - }, - [], - ); - - const newConversation = useCallback( - (template = {}, preset?: TPreset) => { - switchToConversation( - { - conversationId: 'new', - title: 'New Chat', - ...template, - endpoint: null, - createdAt: '', - updatedAt: '', - }, - [], - preset, - ); - }, - [switchToConversation], - ); - - const searchPlaceholderConversation = () => { - switchToConversation( - { - conversationId: 'search', - title: 'Search', - endpoint: null, - createdAt: '', - updatedAt: '', - }, - [], - ); - }; - - return { - _switchToConversation, - newConversation, - switchToConversation, - searchPlaceholderConversation, - }; -}; - export default { messages, conversation, messagesTree, latestMessage, messagesSiblingIdxFamily, - useConversation, }; diff --git a/client/src/store/conversations.ts b/client/src/store/conversations.ts index b32a5b5e1..5edfd6281 100644 --- a/client/src/store/conversations.ts +++ b/client/src/store/conversations.ts @@ -1,19 +1,8 @@ -import { atom, useSetRecoilState } from 'recoil'; -import { useCallback } from 'react'; +import { atom } from 'recoil'; -const refreshConversationsHint = atom({ +const refreshConversationsHint = atom({ key: 'refreshConversationsHint', default: 1, }); -const useConversations = () => { - const setRefreshConversationsHint = useSetRecoilState(refreshConversationsHint); - - const refreshConversations = useCallback(() => { - setRefreshConversationsHint((prevState) => prevState + 1); - }, [setRefreshConversationsHint]); - - return { refreshConversations }; -}; - -export default { refreshConversationsHint, useConversations }; +export default { refreshConversationsHint }; diff --git a/client/src/store/index.ts b/client/src/store/index.ts index 5328389e1..2fe0d1f1f 100644 --- a/client/src/store/index.ts +++ b/client/src/store/index.ts @@ -1,6 +1,7 @@ import conversation from './conversation'; import conversations from './conversations'; import endpoints from './endpoints'; +import models from './models'; import user from './user'; import text from './text'; import submission from './submission'; @@ -13,6 +14,7 @@ export default { ...conversation, ...conversations, ...endpoints, + ...models, ...user, ...text, ...submission, diff --git a/client/src/store/models.ts b/client/src/store/models.ts new file mode 100644 index 000000000..4040c5d1a --- /dev/null +++ b/client/src/store/models.ts @@ -0,0 +1,34 @@ +import { atom } from 'recoil'; +import { TModelsConfig } from 'librechat-data-provider'; +const openAIModels = [ + 'gpt-3.5-turbo', + 'gpt-3.5-turbo-16k', + 'gpt-3.5-turbo-0301', + 'text-davinci-003', + 'gpt-4', + 'gpt-4-0314', + 'gpt-4-0613', +]; + +const modelsConfig = atom({ + key: 'models', + default: { + openAI: openAIModels, + gptPlugins: openAIModels, + azureOpenAI: openAIModels, + bingAI: ['BingAI', 'Sydney'], + chatGPTBrowser: ['text-davinci-002-render-sha'], + google: ['chat-bison', 'text-bison', 'codechat-bison'], + anthropic: [ + 'claude-1', + 'claude-1-100k', + 'claude-instant-1', + 'claude-instant-1-100k', + 'claude-2', + ], + }, +}); + +export default { + modelsConfig, +}; diff --git a/client/src/utils/buildDefaultConvo.ts b/client/src/utils/buildDefaultConvo.ts new file mode 100644 index 000000000..2ea4d006b --- /dev/null +++ b/client/src/utils/buildDefaultConvo.ts @@ -0,0 +1,64 @@ +import { parseConvo } from 'librechat-data-provider'; +import getLocalStorageItems from './getLocalStorageItems'; +import type { TConversation, EModelEndpoint } from 'librechat-data-provider'; + +const buildDefaultConvo = ({ + conversation, + endpoint, + models, + lastConversationSetup, +}: { + conversation: TConversation; + endpoint: EModelEndpoint; + models: string[]; + lastConversationSetup: TConversation; +}) => { + const { lastSelectedModel, lastSelectedTools, lastBingSettings } = getLocalStorageItems(); + const { jailbreak, toneStyle } = lastBingSettings; + + if (!endpoint) { + return { + ...conversation, + endpoint, + }; + } + + const availableModels = models; + const model = lastConversationSetup?.model ?? lastSelectedModel?.[endpoint]; + const secondaryModel = + endpoint === 'gptPlugins' + ? lastConversationSetup?.agentOptions?.model ?? lastSelectedModel?.secondaryModel + : null; + + let possibleModels: string[], secondaryModels: string[]; + + if (availableModels.includes(model)) { + possibleModels = [model, ...availableModels]; + } else { + possibleModels = [...availableModels]; + } + + if (secondaryModel && availableModels.includes(secondaryModel)) { + secondaryModels = [secondaryModel, ...availableModels]; + } else { + secondaryModels = [...availableModels]; + } + + const convo = parseConvo(endpoint, lastConversationSetup, { + models: possibleModels, + secondaryModels, + }); + const defaultConvo = { + ...conversation, + ...convo, + endpoint, + }; + + defaultConvo.tools = lastSelectedTools ?? defaultConvo.tools; + defaultConvo.jailbreak = jailbreak ?? defaultConvo.jailbreak; + defaultConvo.toneStyle = toneStyle ?? defaultConvo.toneStyle; + + return defaultConvo; +}; + +export default buildDefaultConvo; diff --git a/client/src/utils/cleanupPreset.ts b/client/src/utils/cleanupPreset.ts index a2d9c4a38..81decab55 100644 --- a/client/src/utils/cleanupPreset.ts +++ b/client/src/utils/cleanupPreset.ts @@ -1,9 +1,8 @@ import { parseConvo } from 'librechat-data-provider'; -import type { TEndpointsConfig, TPreset } from 'librechat-data-provider'; +import type { TPreset } from 'librechat-data-provider'; type TCleanupPreset = { preset: Partial; - endpointsConfig: TEndpointsConfig; }; const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => { @@ -20,9 +19,9 @@ const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => { const parsedPreset = parseConvo(endpoint, _preset); return { - endpoint, presetId: _preset?.presetId ?? null, ...parsedPreset, + endpoint, title: _preset?.title ?? 'New Preset', } as TPreset; }; diff --git a/client/src/utils/getDefaultConversation.ts b/client/src/utils/getDefaultConversation.ts deleted file mode 100644 index 1fae97cf6..000000000 --- a/client/src/utils/getDefaultConversation.ts +++ /dev/null @@ -1,96 +0,0 @@ -import { parseConvo } from 'librechat-data-provider'; -import getLocalStorageItems from './getLocalStorageItems'; -import type { - TConversation, - TEndpointsConfig, - EModelEndpoint, - TConfig, -} from 'librechat-data-provider'; - -const defaultEndpoints = [ - 'openAI', - 'azureOpenAI', - 'bingAI', - 'chatGPTBrowser', - 'gptPlugins', - 'google', - 'anthropic', -]; - -const buildDefaultConversation = ({ - conversation, - endpoint, - endpointsConfig, - lastConversationSetup, -}: { - conversation: TConversation; - endpoint: EModelEndpoint; - endpointsConfig: TEndpointsConfig; - lastConversationSetup: TConversation; -}) => { - const { lastSelectedModel, lastSelectedTools, lastBingSettings } = getLocalStorageItems(); - const { jailbreak, toneStyle } = lastBingSettings; - - if (!endpoint) { - return { - ...conversation, - endpoint, - }; - } - - const { availableModels = [] } = endpointsConfig[endpoint] as TConfig; - const possibleModels = [lastSelectedModel[endpoint], ...availableModels]; - const convo = parseConvo(endpoint, lastConversationSetup, { model: possibleModels }); - const defaultConvo = { - ...conversation, - ...convo, - endpoint, - }; - - defaultConvo.tools = lastSelectedTools ?? defaultConvo.tools; - defaultConvo.jailbreak = jailbreak ?? defaultConvo.jailbreak; - defaultConvo.toneStyle = toneStyle ?? defaultConvo.toneStyle; - - return defaultConvo; -}; - -const getDefaultConversation = ({ conversation, endpointsConfig, preset }) => { - const getEndpointFromPreset = () => { - const { endpoint: targetEndpoint } = preset || {}; - if (targetEndpoint && endpointsConfig?.[targetEndpoint]) { - return targetEndpoint; - } else if (targetEndpoint) { - console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsConfig}`); - } - return null; - }; - - const getEndpointFromLocalStorage = () => { - try { - const { lastConversationSetup } = getLocalStorageItems(); - - return ( - lastConversationSetup.endpoint && - (endpointsConfig[lastConversationSetup.endpoint] ? lastConversationSetup.endpoint : null) - ); - } catch (error) { - console.error(error); - return null; - } - }; - - const getDefaultEndpoint = () => { - return defaultEndpoints.find((e) => endpointsConfig?.[e]) || null; - }; - - const endpoint = getEndpointFromPreset() || getEndpointFromLocalStorage() || getDefaultEndpoint(); - - return buildDefaultConversation({ - conversation, - endpoint, - lastConversationSetup: preset, - endpointsConfig, - }); -}; - -export default getDefaultConversation; diff --git a/client/src/utils/getDefaultEndpoint.ts b/client/src/utils/getDefaultEndpoint.ts new file mode 100644 index 000000000..1b960996e --- /dev/null +++ b/client/src/utils/getDefaultEndpoint.ts @@ -0,0 +1,54 @@ +import type { TConversation, TPreset, TEndpointsConfig } from 'librechat-data-provider'; +import getLocalStorageItems from './getLocalStorageItems'; + +type TConvoSetup = Partial | Partial; + +type TDefaultEndpoint = { convoSetup: TConvoSetup; endpointsConfig: TEndpointsConfig }; + +const defaultEndpoints = [ + 'openAI', + 'azureOpenAI', + 'bingAI', + 'chatGPTBrowser', + 'gptPlugins', + 'google', + 'anthropic', +]; + +const getEndpointFromSetup = (convoSetup: TConvoSetup, endpointsConfig: TEndpointsConfig) => { + const { endpoint: targetEndpoint } = convoSetup || {}; + if (targetEndpoint && endpointsConfig?.[targetEndpoint]) { + return targetEndpoint; + } else if (targetEndpoint) { + console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsConfig}`); + } + return null; +}; + +const getEndpointFromLocalStorage = (endpointsConfig: TEndpointsConfig) => { + try { + const { lastConversationSetup } = getLocalStorageItems(); + + return ( + lastConversationSetup.endpoint && + (endpointsConfig[lastConversationSetup.endpoint] ? lastConversationSetup.endpoint : null) + ); + } catch (error) { + console.error(error); + return null; + } +}; + +const getDefinedEndpoint = (endpointsConfig: TEndpointsConfig) => { + return defaultEndpoints.find((e) => Object.hasOwn(endpointsConfig ?? {}, e)) ?? 'openAI'; +}; + +const getDefaultEndpoint = ({ convoSetup, endpointsConfig }: TDefaultEndpoint) => { + return ( + getEndpointFromSetup(convoSetup, endpointsConfig) || + getEndpointFromLocalStorage(endpointsConfig) || + getDefinedEndpoint(endpointsConfig) + ); +}; + +export default getDefaultEndpoint; diff --git a/client/src/utils/index.ts b/client/src/utils/index.ts index b8a199b2f..5f524176d 100644 --- a/client/src/utils/index.ts +++ b/client/src/utils/index.ts @@ -7,8 +7,9 @@ export { default as getLoginError } from './getLoginError'; export { default as cleanupPreset } from './cleanupPreset'; export { default as validateIframe } from './validateIframe'; export { default as getMessageError } from './getMessageError'; +export { default as buildDefaultConvo } from './buildDefaultConvo'; +export { default as getDefaultEndpoint } from './getDefaultEndpoint'; export { default as getLocalStorageItems } from './getLocalStorageItems'; -export { default as getDefaultConversation } from './getDefaultConversation'; export function cn(...inputs: string[]) { return twMerge(clsx(inputs)); diff --git a/docs/install/free_ai_apis.md b/docs/install/free_ai_apis.md index c7c038d78..e894f0b6f 100644 --- a/docs/install/free_ai_apis.md +++ b/docs/install/free_ai_apis.md @@ -1,8 +1,34 @@ # Free AI APIs -There are APIs offering free access to AI APIs via reverse proxy, and one of the major players, compatible with LibreChat, is NagaAI. +There are APIs offering free/free-trial access to AI APIs via reverse proxy. -Feel free to check out the others, but I haven't personally tested them: [Free AI APIs](https://github.com/NovaOSS/free-ai-apis) +Here is a well-maintained public list of [Free AI APIs](https://github.com/NovaOSS/free-ai-apis) that may or may not be compatible with LibreChat + +### [OpenRouter](https://openrouter.ai/) ⇆ (preferred) + +While not completely free, you get free trial credits when you [sign up to OpenRouter](https://openrouter.ai/), a legitimate proxy service to a multitude of LLMs, both closed and open source, including: +- OpenAI models (great if you are barred from their API for whatever reason) +- Anthropic Claude models (same as above) +- Meta's Llama models +- pygmalionai/mythalion-13b +- and many more open source models. Newer integrations are usually discounted, too! + +OpenRouter is so great, I decided to integrate it to the project as a standalone feature. + +**Setup:** +- Signup to [OpenRouter](https://openrouter.ai/) and create a key. You should name it and set a limit as well. +- Set the environment variable `OPENROUTER_API_KEY` in your .env file to the key you just created. +- Restart your LibreChat server and use the OpenAI or Plugins endpoints. + +**Notes:** +- [TODO] **In the future, you will be able to set up OpenRouter from the frontend as well.** +- This will override the official OpenAI API or your reverse proxy settings for both Plugins and OpenAI. +- On initial setup, you may need to refresh your page twice to see all their supported models populate automatically. +- Plugins: Functions Agent works with OpenRouter when using OpenAI models. +- Plugins: Turn functions off to try plugins with non-OpenAI models (ChatGPT plugins will not work and others may not work as expected). +- Plugins: Make sure `PLUGINS_USE_AZURE` is not set in your .env file when wanting to use OpenRouter and you have Azure configured. + +> ⚠️ OpenRouter is in a category of its own, and is highly recommended over the "free" services below. NagaAI and other 'free' API proxies tend to have intermittent issues, data leaks, and/or problems with the guidelines of the platforms they advertise on. Use the below at your own risk. ### NagaAI diff --git a/package.json b/package.json index f6545c6e6..745896ddf 100644 --- a/package.json +++ b/package.json @@ -48,7 +48,7 @@ "b:api": "NODE_ENV=production bun run api/server/index.js", "b:api:dev": "NODE_ENV=development bun run --watch api/server/index.js", "b:data-provider": "cd packages/data-provider && bun run b:build", - "b:client": "bun run b:data-provider && cd client && bun run b:build", + "b:client": "bun --bun run b:data-provider && cd client && bun --bun run b:build", "b:client:dev": "cd client && bun run b:dev", "b:test:client": "cd client && bun run b:test", "b:test:api": "cd api && bun run b:test" diff --git a/packages/data-provider/src/api-endpoints.ts b/packages/data-provider/src/api-endpoints.ts index eea302867..1911b6972 100644 --- a/packages/data-provider/src/api-endpoints.ts +++ b/packages/data-provider/src/api-endpoints.ts @@ -36,6 +36,8 @@ export const deletePreset = () => '/api/presets/delete'; export const aiEndpoints = () => '/api/endpoints'; +export const models = () => '/api/models'; + export const tokenizer = () => '/api/tokenizer'; export const login = () => '/api/auth/login'; diff --git a/packages/data-provider/src/data-service.ts b/packages/data-provider/src/data-service.ts index 76eba9c68..5bed16feb 100644 --- a/packages/data-provider/src/data-service.ts +++ b/packages/data-provider/src/data-service.ts @@ -1,5 +1,7 @@ import * as t from './types'; import * as s from './schemas'; +/* TODO: fix dependency cycle */ +// eslint-disable-next-line import/no-cycle import request from './request'; import * as endpoints from './api-endpoints'; @@ -99,6 +101,10 @@ export const getAIEndpoints = () => { return request.get(endpoints.aiEndpoints()); }; +export const getModels = () => { + return request.get(endpoints.models()); +}; + export const updateTokenCount = (text: string) => { return request.post(endpoints.tokenizer(), { arg: text }); }; diff --git a/packages/data-provider/src/react-query-service.ts b/packages/data-provider/src/react-query-service.ts index bb922ee87..e3088f624 100644 --- a/packages/data-provider/src/react-query-service.ts +++ b/packages/data-provider/src/react-query-service.ts @@ -17,6 +17,7 @@ export enum QueryKeys { searchEnabled = 'searchEnabled', user = 'user', name = 'name', // user key name + models = 'models', endpoints = 'endpoints', presets = 'presets', searchResults = 'searchResults', @@ -218,6 +219,14 @@ export const useGetEndpointsQuery = (): QueryObserverResult }); }; +export const useGetModelsQuery = (): QueryObserverResult => { + return useQuery([QueryKeys.models], () => dataService.getModels(), { + refetchOnWindowFocus: false, + refetchOnReconnect: false, + refetchOnMount: false, + }); +}; + export const useCreatePresetMutation = (): UseMutationResult< s.TPreset[], unknown, @@ -313,6 +322,9 @@ export const useLoginUserMutation = (): UseMutationResult< onSuccess: () => { queryClient.invalidateQueries([QueryKeys.user]); }, + onMutate: () => { + queryClient.invalidateQueries([QueryKeys.models]); + }, }); }; @@ -345,7 +357,12 @@ export const useRefreshTokenMutation = (): UseMutationResult< unknown, unknown > => { - return useMutation(() => dataService.refreshToken(), {}); + const queryClient = useQueryClient(); + return useMutation(() => dataService.refreshToken(), { + onMutate: () => { + queryClient.invalidateQueries([QueryKeys.models]); + }, + }); }; export const useUserKeyQuery = ( diff --git a/packages/data-provider/src/request.ts b/packages/data-provider/src/request.ts index 6d7c82ac4..57d9372f1 100644 --- a/packages/data-provider/src/request.ts +++ b/packages/data-provider/src/request.ts @@ -1,5 +1,6 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import axios, { AxiosRequestConfig, AxiosError } from 'axios'; +/* TODO: fix dependency cycle */ // eslint-disable-next-line import/no-cycle import { refreshToken } from './data-service'; import { setTokenHeader } from './headers-helpers'; diff --git a/packages/data-provider/src/schemas.ts b/packages/data-provider/src/schemas.ts index e703389f2..27543c4db 100644 --- a/packages/data-provider/src/schemas.ts +++ b/packages/data-provider/src/schemas.ts @@ -369,7 +369,8 @@ function getFirstDefinedValue(possibleValues: string[]) { } type TPossibleValues = { - model: string[]; + models: string[]; + secondaryModels?: string[]; }; export const parseConvo = ( @@ -383,10 +384,15 @@ export const parseConvo = ( throw new Error(`Unknown endpoint: ${endpoint}`); } - const convo = schema.parse(conversation); + const convo = schema.parse(conversation) as TConversation; + const { models, secondaryModels } = possibleValues ?? {}; - if (possibleValues && convo) { - convo.model = getFirstDefinedValue(possibleValues.model) ?? convo.model; + if (models && convo) { + convo.model = getFirstDefinedValue(models) ?? convo.model; + } + + if (secondaryModels && convo.agentOptions) { + convo.agentOptions.model = getFirstDefinedValue(secondaryModels) ?? convo.agentOptions.model; } return convo; diff --git a/packages/data-provider/src/types.ts b/packages/data-provider/src/types.ts index 82442c412..3ee155520 100644 --- a/packages/data-provider/src/types.ts +++ b/packages/data-provider/src/types.ts @@ -111,21 +111,16 @@ export type TSearchResults = { }; export type TConfig = { - availableModels: []; + availableModels?: []; userProvide?: boolean | null; availableTools?: []; plugins?: []; + azure?: boolean; }; -export type TEndpointsConfig = { - azureOpenAI: TConfig | null; - bingAI: TConfig | null; - chatGPTBrowser: TConfig | null; - anthropic: TConfig | null; - google: TConfig | null; - openAI: TConfig | null; - gptPlugins: TConfig | null; -}; +export type TModelsConfig = Record; + +export type TEndpointsConfig = Record; export type TUpdateTokenCountResponse = { count: number;