Compare commits
2 Commits
update-tit
...
azure-v2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
78283e1686 | ||
|
|
f00a8f87f7 |
@@ -80,13 +80,18 @@ class StableDiffusionAPI extends StructuredTool {
|
||||
const payload = {
|
||||
prompt,
|
||||
negative_prompt,
|
||||
sampler_index: 'DPM++ 2M Karras',
|
||||
cfg_scale: 4.5,
|
||||
steps: 22,
|
||||
width: 1024,
|
||||
height: 1024,
|
||||
};
|
||||
const generationResponse = await axios.post(`${url}/sdapi/v1/txt2img`, payload);
|
||||
let generationResponse;
|
||||
try {
|
||||
generationResponse = await axios.post(`${url}/sdapi/v1/txt2img`, payload);
|
||||
} catch (error) {
|
||||
logger.error('[StableDiffusion] Error while generating image:', error);
|
||||
return 'Error making API request.';
|
||||
}
|
||||
const image = generationResponse.data.images[0];
|
||||
|
||||
/** @type {{ height: number, width: number, seed: number, infotexts: string[] }} */
|
||||
|
||||
@@ -19,9 +19,9 @@ const {
|
||||
saveAssistantMessage,
|
||||
} = require('~/server/services/Threads');
|
||||
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
|
||||
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
|
||||
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||
const { createOnTextProgress } = require('~/server/services/AssistantService');
|
||||
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||
const { StreamRunManager } = require('~/server/services/Runs');
|
||||
const { getTransactions } = require('~/models/Transaction');
|
||||
const checkBalance = require('~/models/checkBalance');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
@@ -471,39 +471,7 @@ const chatV2 = async (req, res) => {
|
||||
/** @type {RunResponse | typeof StreamRunManager | undefined} */
|
||||
let response;
|
||||
|
||||
const processRun = async (retry = false) => {
|
||||
if (endpoint === EModelEndpoint.azureAssistants) {
|
||||
body.model = openai._options.model;
|
||||
openai.attachedFileIds = attachedFileIds;
|
||||
if (retry) {
|
||||
response = await runAssistant({
|
||||
openai,
|
||||
thread_id,
|
||||
run_id,
|
||||
in_progress: openai.in_progress,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
/* NOTE:
|
||||
* By default, a Run will use the model and tools configuration specified in Assistant object,
|
||||
* but you can override most of these when creating the Run for added flexibility:
|
||||
*/
|
||||
const run = await createRun({
|
||||
openai,
|
||||
thread_id,
|
||||
body,
|
||||
});
|
||||
|
||||
run_id = run.id;
|
||||
await cache.set(cacheKey, `${thread_id}:${run_id}`, ten_minutes);
|
||||
sendInitialResponse();
|
||||
|
||||
// todo: retry logic
|
||||
response = await runAssistant({ openai, thread_id, run_id });
|
||||
return;
|
||||
}
|
||||
|
||||
const processRun = async () => {
|
||||
/** @type {{[AssistantStreamEvents.ThreadRunCreated]: (event: ThreadRunCreated) => Promise<void>}} */
|
||||
const handlers = {
|
||||
[AssistantStreamEvents.ThreadRunCreated]: async (event) => {
|
||||
|
||||
@@ -15,44 +15,6 @@ const OpenAIClient = require('~/app/clients/OpenAIClient');
|
||||
const { isUserProvided } = require('~/server/utils');
|
||||
const { constructAzureURL } = require('~/utils');
|
||||
|
||||
class Files {
|
||||
constructor(client) {
|
||||
this._client = client;
|
||||
}
|
||||
/**
|
||||
* Create an assistant file by attaching a
|
||||
* [File](https://platform.openai.com/docs/api-reference/files) to an
|
||||
* [assistant](https://platform.openai.com/docs/api-reference/assistants).
|
||||
*/
|
||||
create(assistantId, body, options) {
|
||||
return this._client.post(`/assistants/${assistantId}/files`, {
|
||||
body,
|
||||
...options,
|
||||
headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves an AssistantFile.
|
||||
*/
|
||||
retrieve(assistantId, fileId, options) {
|
||||
return this._client.get(`/assistants/${assistantId}/files/${fileId}`, {
|
||||
...options,
|
||||
headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete an assistant file.
|
||||
*/
|
||||
del(assistantId, fileId, options) {
|
||||
return this._client.delete(`/assistants/${assistantId}/files/${fileId}`, {
|
||||
...options,
|
||||
headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const initializeClient = async ({ req, res, version, endpointOption, initAppClient = false }) => {
|
||||
const { PROXY, OPENAI_ORGANIZATION, AZURE_ASSISTANTS_API_KEY, AZURE_ASSISTANTS_BASE_URL } =
|
||||
process.env;
|
||||
@@ -168,8 +130,6 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie
|
||||
...opts,
|
||||
});
|
||||
|
||||
openai.beta.assistants.files = new Files(openai);
|
||||
|
||||
openai.req = req;
|
||||
openai.res = res;
|
||||
|
||||
|
||||
@@ -12,7 +12,12 @@ const { logger } = require('~/config');
|
||||
*/
|
||||
function azureAssistantsDefaults() {
|
||||
return {
|
||||
capabilities: [Capabilities.tools, Capabilities.actions, Capabilities.code_interpreter],
|
||||
capabilities: [
|
||||
Capabilities.tools,
|
||||
Capabilities.actions,
|
||||
Capabilities.code_interpreter,
|
||||
Capabilities.retrieval,
|
||||
],
|
||||
version: defaultAssistantsVersion.azureAssistants,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -181,7 +181,6 @@ function generateConfig(key, baseURL, endpoint) {
|
||||
config.retrievalModels = defaultRetrievalModels;
|
||||
config.capabilities = [
|
||||
Capabilities.code_interpreter,
|
||||
Capabilities.image_vision,
|
||||
Capabilities.retrieval,
|
||||
Capabilities.actions,
|
||||
Capabilities.tools,
|
||||
|
||||
@@ -133,7 +133,7 @@ export enum Capabilities {
|
||||
|
||||
export const defaultAssistantsVersion = {
|
||||
[EModelEndpoint.assistants]: 2,
|
||||
[EModelEndpoint.azureAssistants]: 1,
|
||||
[EModelEndpoint.azureAssistants]: 2,
|
||||
};
|
||||
|
||||
export const assistantEndpointSchema = z.object({
|
||||
@@ -441,7 +441,7 @@ export const EndpointURLs: { [key in EModelEndpoint]: string } = {
|
||||
[EModelEndpoint.gptPlugins]: `/api/ask/${EModelEndpoint.gptPlugins}`,
|
||||
[EModelEndpoint.azureOpenAI]: `/api/ask/${EModelEndpoint.azureOpenAI}`,
|
||||
[EModelEndpoint.chatGPTBrowser]: `/api/ask/${EModelEndpoint.chatGPTBrowser}`,
|
||||
[EModelEndpoint.azureAssistants]: '/api/assistants/v1/chat',
|
||||
[EModelEndpoint.azureAssistants]: '/api/assistants/v2/chat',
|
||||
[EModelEndpoint.assistants]: '/api/assistants/v2/chat',
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user