diff --git a/.env.example b/.env.example index c3793945a..c51f71430 100644 --- a/.env.example +++ b/.env.example @@ -46,7 +46,7 @@ DEBUG_CONSOLE=false # Endpoints # #===================================================# -# ENDPOINTS=openAI,azureOpenAI,bingAI,chatGPTBrowser,google,gptPlugins,anthropic +# ENDPOINTS=openAI,assistants,azureOpenAI,bingAI,chatGPTBrowser,google,gptPlugins,anthropic PROXY= diff --git a/client/src/components/Tools/ToolSelectDialog.tsx b/client/src/components/Tools/ToolSelectDialog.tsx index 7f19b65bd..ee2f245b1 100644 --- a/client/src/components/Tools/ToolSelectDialog.tsx +++ b/client/src/components/Tools/ToolSelectDialog.tsx @@ -13,7 +13,6 @@ import ToolItem from './ToolItem'; function ToolSelectDialog({ isOpen, setIsOpen, - assistant_id, }: TPluginStoreDialogProps & { assistant_id?: string }) { const localize = useLocalize(); const { getValues, setValue } = useFormContext(); @@ -152,11 +151,9 @@ function ToolSelectDialog({ {localize('com_nav_tool_dialog')} - {!assistant_id && ( - - {localize('com_nav_tool_dialog_description')} - - )} + + {localize('com_nav_tool_dialog_description')} +
diff --git a/docs/install/configuration/ai_setup.md b/docs/install/configuration/ai_setup.md index 0571cdc2a..dd03a1a7b 100644 --- a/docs/install/configuration/ai_setup.md +++ b/docs/install/configuration/ai_setup.md @@ -64,7 +64,7 @@ In the case where you have multiple endpoints setup, but want a specific one to ```bash # .env file # No spaces between values -ENDPOINTS=azureOpenAI,openAI,google +ENDPOINTS=azureOpenAI,openAI,assistants,google ``` Note that LibreChat will use your last selected endpoint when creating a new conversation. So if Azure OpenAI is first in the order, but you used or view an OpenAI conversation last, when you hit "New Chat," OpenAI will be selected with its default conversation settings. @@ -138,6 +138,7 @@ ASSISTANTS_BASE_URL=http://your-alt-baseURL:3080/ - gpt-4-1106-preview - gpt-3.5-turbo-1106 - Vision capability is not yet supported. +- If you have previously set the [`ENDPOINTS` value in your .env file](./dotenv.md#endpoints), you will need to add the value `assistants` --- diff --git a/docs/install/configuration/dotenv.md b/docs/install/configuration/dotenv.md index f62d8b67c..d988afa17 100644 --- a/docs/install/configuration/dotenv.md +++ b/docs/install/configuration/dotenv.md @@ -124,7 +124,7 @@ In this section you can configure the endpoints and models selection, their API - `PROXY` is to be used by all endpoints (leave blank by default) ```bash -ENDPOINTS=openAI,azureOpenAI,bingAI,chatGPTBrowser,google,gptPlugins,anthropic +ENDPOINTS=openAI,assistants,azureOpenAI,bingAI,chatGPTBrowser,google,gptPlugins,anthropic PROXY= ``` @@ -377,6 +377,8 @@ ASSISTANTS_MODELS=gpt-3.5-turbo-0125,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-16k,gp ASSISTANTS_BASE_URL=http://your-alt-baseURL:3080/ ``` +- If you have previously set the [`ENDPOINTS` value in your .env file](#endpoints), you will need to add the value `assistants` + - There is additional, optional configuration, depending on your needs, such as disabling the assistant builder UI, and determining which assistants can be used, that are available via the [`librechat.yaml` custom config file](./custom_config.md#assistants-endpoint-object-structure). ### OpenRouter