Compare commits

..

10 Commits

Author SHA1 Message Date
Danny Avila
1c5e827ce9 chore: azureAssistants ENDPOINTS exclusion warning 2024-05-19 12:48:36 -04:00
Danny Avila
84f68f9a15 feat: add additional metadata: endpoint, author ID 2024-05-19 12:41:35 -04:00
Danny Avila
fb6e87c36d chore: update comparisons 2024-05-19 12:25:28 -04:00
Danny Avila
bce0584c67 localization updates 2024-05-19 12:24:50 -04:00
Danny Avila
5c15b60601 meilisearch improvements 2024-05-19 12:16:25 -04:00
Danny Avila
e55a89270e chore: update .env.example 2024-05-19 11:35:30 -04:00
Danny Avila
3eabbd572e ci: fix backend tests due to new updates 2024-05-18 15:44:22 -04:00
Danny Avila
6082e26716 fix: merge conflict 2024-05-18 15:29:31 -04:00
Danny Avila
bc46ccdcad 🤖 Assistants V2 Support: Part 2
🎹 fix: Autocompletion Chrome Bug on Action API Key Input

chore: remove `useOriginNavigate`

chore: set correct OpenAI Storage Source

fix: azure file deletions, instantiate clients by source for deletion

update code interpret files info

feat: deleteResourceFileId

chore: increase poll interval as azure easily rate limits

fix: openai file deletions, TODO: evaluate rejected deletion settled promises to determine which to delete from db records

file source icons

update table file filters

chore: file search info and versioning

fix: retrieval update with necessary tool_resources if specified

fix(useMentions): add optional chaining in case listMap value is undefined

fix: force assistant avatar roundedness

fix: azure assistants, check correct flag

chore: bump data-provider
2024-05-18 15:22:48 -04:00
Danny Avila
2bdbff5141 🤖 Assistants V2 Support: Part 1
- Separated Azure Assistants to its own endpoint
- File Search / Vector Store integration is incomplete, but can toggle and use storage from playground
- Code Interpreter resource files can be added but not deleted
- GPT-4o is supported
- Many improvements to the Assistants Endpoint overall

data-provider v2 changes

copy existing route as v1

chore: rename new endpoint to reduce comparison operations and add new azure filesource

api: add azureAssistants part 1

force use of version for assistants/assistantsAzure

chore: switch name back to azureAssistants

refactor type version: string | number

Ensure assistants endpoints have version set

fix: isArchived type issue in ConversationListParams

refactor: update assistants mutations/queries with endpoint/version definitions, update Assistants Map structure

chore:  FilePreview component ExtendedFile type assertion

feat: isAssistantsEndpoint helper

chore: remove unused useGenerations

chore(buildTree): type issue

chore(Advanced): type issue (unused component, maybe in future)

first pass for multi-assistant endpoint rewrite

fix(listAssistants): pass params correctly

feat: list separate assistants by endpoint

fix(useTextarea): access assistantMap correctly

fix: assistant endpoint switching, resetting ID

fix: broken during rewrite, selecting assistant mention

fix: set/invalidate assistants endpoint query data correctly

feat: Fix issue with assistant ID not being reset correctly

getOpenAIClient helper function

feat: add toast for assistant deletion

fix: assistants delete right after create issue for azure

fix: assistant patching

refactor: actions to use getOpenAIClient

refactor: consolidate logic into helpers file

fix: issue where conversation data was not initially available

v1 chat support

refactor(spendTokens): only early return if completionTokens isNaN

fix(OpenAIClient): ensure spendTokens has all necessary params

refactor: route/controller logic

fix(assistants/initializeClient): use defaultHeaders field

fix: sanitize default operation id

chore: bump openai package

first pass v2 action service

feat: retroactive domain parsing for actions added via v1

feat: delete db records of actions/assistants on openai assistant deletion

chore: remove vision tools from v2 assistants

feat: v2 upload and delete assistant vision images

WIP first pass, thread attachments

fix: show assistant vision files (save local/firebase copy)

v2 image continue

fix: annotations

fix: refine annotations

show analyze as error if is no longer submitting before progress reaches 1 and show file_search as retrieval tool

fix: abort run, undefined endpoint issue

refactor: consolidate capabilities logic and anticipate versioning

frontend version 2 changes

fix: query selection and filter

add endpoint to unknown filepath

add file ids to resource, deleting in progress

enable/disable file search

remove version log
2024-05-18 15:22:47 -04:00
9 changed files with 94 additions and 38 deletions

View File

@@ -80,18 +80,13 @@ class StableDiffusionAPI extends StructuredTool {
const payload = {
prompt,
negative_prompt,
sampler_index: 'DPM++ 2M Karras',
cfg_scale: 4.5,
steps: 22,
width: 1024,
height: 1024,
};
let generationResponse;
try {
generationResponse = await axios.post(`${url}/sdapi/v1/txt2img`, payload);
} catch (error) {
logger.error('[StableDiffusion] Error while generating image:', error);
return 'Error making API request.';
}
const generationResponse = await axios.post(`${url}/sdapi/v1/txt2img`, payload);
const image = generationResponse.data.images[0];
/** @type {{ height: number, width: number, seed: number, infotexts: string[] }} */

View File

@@ -19,9 +19,9 @@ const {
saveAssistantMessage,
} = require('~/server/services/Threads');
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
const { createOnTextProgress } = require('~/server/services/AssistantService');
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
const { createRun, StreamRunManager } = require('~/server/services/Runs');
const { addTitle } = require('~/server/services/Endpoints/assistants');
const { StreamRunManager } = require('~/server/services/Runs');
const { getTransactions } = require('~/models/Transaction');
const checkBalance = require('~/models/checkBalance');
const { getConvo } = require('~/models/Conversation');
@@ -471,7 +471,39 @@ const chatV2 = async (req, res) => {
/** @type {RunResponse | typeof StreamRunManager | undefined} */
let response;
const processRun = async () => {
const processRun = async (retry = false) => {
if (endpoint === EModelEndpoint.azureAssistants) {
body.model = openai._options.model;
openai.attachedFileIds = attachedFileIds;
if (retry) {
response = await runAssistant({
openai,
thread_id,
run_id,
in_progress: openai.in_progress,
});
return;
}
/* NOTE:
* By default, a Run will use the model and tools configuration specified in Assistant object,
* but you can override most of these when creating the Run for added flexibility:
*/
const run = await createRun({
openai,
thread_id,
body,
});
run_id = run.id;
await cache.set(cacheKey, `${thread_id}:${run_id}`, ten_minutes);
sendInitialResponse();
// todo: retry logic
response = await runAssistant({ openai, thread_id, run_id });
return;
}
/** @type {{[AssistantStreamEvents.ThreadRunCreated]: (event: ThreadRunCreated) => Promise<void>}} */
const handlers = {
[AssistantStreamEvents.ThreadRunCreated]: async (event) => {

View File

@@ -15,6 +15,44 @@ const OpenAIClient = require('~/app/clients/OpenAIClient');
const { isUserProvided } = require('~/server/utils');
const { constructAzureURL } = require('~/utils');
class Files {
constructor(client) {
this._client = client;
}
/**
* Create an assistant file by attaching a
* [File](https://platform.openai.com/docs/api-reference/files) to an
* [assistant](https://platform.openai.com/docs/api-reference/assistants).
*/
create(assistantId, body, options) {
return this._client.post(`/assistants/${assistantId}/files`, {
body,
...options,
headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },
});
}
/**
* Retrieves an AssistantFile.
*/
retrieve(assistantId, fileId, options) {
return this._client.get(`/assistants/${assistantId}/files/${fileId}`, {
...options,
headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },
});
}
/**
* Delete an assistant file.
*/
del(assistantId, fileId, options) {
return this._client.delete(`/assistants/${assistantId}/files/${fileId}`, {
...options,
headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },
});
}
}
const initializeClient = async ({ req, res, version, endpointOption, initAppClient = false }) => {
const { PROXY, OPENAI_ORGANIZATION, AZURE_ASSISTANTS_API_KEY, AZURE_ASSISTANTS_BASE_URL } =
process.env;
@@ -130,6 +168,8 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie
...opts,
});
openai.beta.assistants.files = new Files(openai);
openai.req = req;
openai.res = res;

View File

@@ -12,12 +12,7 @@ const { logger } = require('~/config');
*/
function azureAssistantsDefaults() {
return {
capabilities: [
Capabilities.tools,
Capabilities.actions,
Capabilities.code_interpreter,
Capabilities.retrieval,
],
capabilities: [Capabilities.tools, Capabilities.actions, Capabilities.code_interpreter],
version: defaultAssistantsVersion.azureAssistants,
};
}

View File

@@ -181,6 +181,7 @@ function generateConfig(key, baseURL, endpoint) {
config.retrievalModels = defaultRetrievalModels;
config.capabilities = [
Capabilities.code_interpreter,
Capabilities.image_vision,
Capabilities.retrieval,
Capabilities.actions,
Capabilities.tools,

View File

@@ -37,7 +37,7 @@ export default function Conversation({ conversation, retainView, toggleNav, isLa
const [isPopoverActive, setIsPopoverActive] = useState(false);
const clickHandler = async (event: React.MouseEvent<HTMLAnchorElement>) => {
if (event.button === 0 && (event.ctrlKey || event.metaKey)) {
if (event.button === 0 && event.ctrlKey) {
toggleNav();
return;
}

View File

@@ -1,6 +1,6 @@
import { useState, useMemo, useEffect } from 'react';
import TextareaAutosize from 'react-textarea-autosize';
import type { Assistant, TPreset } from 'librechat-data-provider';
import type { TPreset } from 'librechat-data-provider';
import type { TModelSelectProps, Option } from '~/common';
import { Label, HoverCard, SelectDropDown, HoverCardTrigger } from '~/components/ui';
import { cn, defaultTextProps, removeFocusOutlines, mapAssistants } from '~/utils';
@@ -20,19 +20,15 @@ export default function Settings({ conversation, setOption, models, readonly }:
const { model, endpoint, assistant_id, endpointType, promptPrefix, instructions } =
conversation ?? {};
const currentList = useMemo(
() => Object.values(assistantListMap?.[endpoint ?? ''] ?? {}) as Assistant[],
[assistantListMap, endpoint],
);
const assistants = useMemo(() => {
const currentAssistants = (currentList ?? []).map(({ id, name }) => ({
label: name,
value: id,
}));
return [defaultOption, ...currentAssistants].filter(Boolean);
}, [currentList, defaultOption]);
return [
defaultOption,
...(assistantListMap[endpoint ?? ''] ?? []).map(({ id, name }) => ({
label: name,
value: id,
})),
].filter(Boolean);
}, [assistantListMap, endpoint, defaultOption]);
const [onPromptPrefixChange, promptPrefixValue] = useDebouncedInput({
setOption,
@@ -100,9 +96,6 @@ export default function Settings({ conversation, setOption, models, readonly }:
value: assistant.id ?? '',
});
setOption('assistant_id')(assistant.id);
if (assistant.model) {
setModel(assistant.model);
}
};
const optionEndpoint = endpointType ?? endpoint;
@@ -148,7 +141,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
<TextareaAutosize
id="promptPrefix"
disabled={readonly}
value={(promptPrefixValue as string | null | undefined) ?? ''}
value={promptPrefixValue as string | undefined}
onChange={onPromptPrefixChange}
placeholder={localize('com_endpoint_prompt_prefix_assistants_placeholder')}
className={cn(
@@ -165,7 +158,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
<TextareaAutosize
id="instructions"
disabled={readonly}
value={(instructionsValue as string | null | undefined) ?? ''}
value={instructionsValue as string | undefined}
onChange={onInstructionsChange}
placeholder={localize('com_endpoint_instructions_assistants_placeholder')}
className={cn(

View File

@@ -71,7 +71,7 @@ export default function NewChat({
const { conversation } = store.useCreateConversationAtom(index);
const clickHandler = (event: React.MouseEvent<HTMLAnchorElement>) => {
if (event.button === 0 && !(event.ctrlKey || event.metaKey)) {
if (event.button === 0 && !event.ctrlKey) {
event.preventDefault();
newConvo();
navigate('/c/new');

View File

@@ -133,7 +133,7 @@ export enum Capabilities {
export const defaultAssistantsVersion = {
[EModelEndpoint.assistants]: 2,
[EModelEndpoint.azureAssistants]: 2,
[EModelEndpoint.azureAssistants]: 1,
};
export const assistantEndpointSchema = z.object({
@@ -441,7 +441,7 @@ export const EndpointURLs: { [key in EModelEndpoint]: string } = {
[EModelEndpoint.gptPlugins]: `/api/ask/${EModelEndpoint.gptPlugins}`,
[EModelEndpoint.azureOpenAI]: `/api/ask/${EModelEndpoint.azureOpenAI}`,
[EModelEndpoint.chatGPTBrowser]: `/api/ask/${EModelEndpoint.chatGPTBrowser}`,
[EModelEndpoint.azureAssistants]: '/api/assistants/v2/chat',
[EModelEndpoint.azureAssistants]: '/api/assistants/v1/chat',
[EModelEndpoint.assistants]: '/api/assistants/v2/chat',
};