Compare commits
4 Commits
feat/bette
...
chore/tigh
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
70a82652a5 | ||
|
|
f211e25aac | ||
|
|
800391b264 | ||
|
|
6605b6c800 |
@@ -27,6 +27,7 @@ const {
|
||||
const { getModelMaxTokens, getModelMaxOutputTokens, matchModelName } = require('~/utils');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { encodeAndFormatDocuments } = require('~/server/services/Files/documents');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const BaseClient = require('./BaseClient');
|
||||
const { logger } = require('~/config');
|
||||
@@ -312,6 +313,33 @@ class AnthropicClient extends BaseClient {
|
||||
return files;
|
||||
}
|
||||
|
||||
async addDocuments(message, attachments) {
|
||||
// Only process documents
|
||||
const documentResult = await encodeAndFormatDocuments(
|
||||
this.options.req,
|
||||
attachments,
|
||||
EModelEndpoint.anthropic,
|
||||
);
|
||||
|
||||
message.documents =
|
||||
documentResult.documents && documentResult.documents.length
|
||||
? documentResult.documents
|
||||
: undefined;
|
||||
|
||||
return documentResult.files;
|
||||
}
|
||||
|
||||
async processAttachments(message, attachments) {
|
||||
// Process both images and documents
|
||||
const [imageFiles, documentFiles] = await Promise.all([
|
||||
this.addImageURLs(message, attachments),
|
||||
this.addDocuments(message, attachments),
|
||||
]);
|
||||
|
||||
// Combine files from both processors
|
||||
return [...imageFiles, ...documentFiles];
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {object} params
|
||||
* @param {number} params.promptTokens
|
||||
@@ -382,7 +410,7 @@ class AnthropicClient extends BaseClient {
|
||||
};
|
||||
}
|
||||
|
||||
const files = await this.addImageURLs(latestMessage, attachments);
|
||||
const files = await this.processAttachments(latestMessage, attachments);
|
||||
|
||||
this.options.attachments = files;
|
||||
}
|
||||
@@ -941,7 +969,7 @@ class AnthropicClient extends BaseClient {
|
||||
const content = `<conversation_context>
|
||||
${convo}
|
||||
</conversation_context>
|
||||
|
||||
|
||||
Please generate a title for this conversation.`;
|
||||
|
||||
const titleMessage = { role: 'user', content };
|
||||
|
||||
@@ -1233,7 +1233,7 @@ class BaseClient {
|
||||
{},
|
||||
);
|
||||
|
||||
await this.addImageURLs(message, files, this.visionMode);
|
||||
await this.processAttachments(message, files, this.visionMode);
|
||||
|
||||
this.message_file_map[message.messageId] = files;
|
||||
return message;
|
||||
|
||||
@@ -268,7 +268,7 @@ class GoogleClient extends BaseClient {
|
||||
const formattedMessages = [];
|
||||
const attachments = await this.options.attachments;
|
||||
const latestMessage = { ...messages[messages.length - 1] };
|
||||
const files = await this.addImageURLs(latestMessage, attachments, VisionModes.generative);
|
||||
const files = await this.processAttachments(latestMessage, attachments, VisionModes.generative);
|
||||
this.options.attachments = files;
|
||||
messages[messages.length - 1] = latestMessage;
|
||||
|
||||
@@ -312,6 +312,20 @@ class GoogleClient extends BaseClient {
|
||||
return files;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
async addDocuments(message, attachments) {
|
||||
// GoogleClient doesn't support document processing yet
|
||||
// Return empty results for consistency
|
||||
return [];
|
||||
}
|
||||
|
||||
async processAttachments(message, attachments, mode = '') {
|
||||
// For GoogleClient, only process images
|
||||
const imageFiles = await this.addImageURLs(message, attachments, mode);
|
||||
const documentFiles = await this.addDocuments(message, attachments);
|
||||
return [...imageFiles, ...documentFiles];
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the augmented prompt for attachments
|
||||
* TODO: Add File API Support
|
||||
@@ -345,7 +359,7 @@ class GoogleClient extends BaseClient {
|
||||
|
||||
const { prompt } = await this.buildMessagesPrompt(messages, parentMessageId);
|
||||
|
||||
const files = await this.addImageURLs(latestMessage, attachments);
|
||||
const files = await this.processAttachments(latestMessage, attachments);
|
||||
|
||||
this.options.attachments = files;
|
||||
|
||||
|
||||
@@ -372,6 +372,19 @@ class OpenAIClient extends BaseClient {
|
||||
return files;
|
||||
}
|
||||
|
||||
async addDocuments(message, attachments) {
|
||||
// OpenAI doesn't support native document processing yet
|
||||
// Return empty results for consistency
|
||||
return [];
|
||||
}
|
||||
|
||||
async processAttachments(message, attachments) {
|
||||
// For OpenAI, only process images
|
||||
const imageFiles = await this.addImageURLs(message, attachments);
|
||||
const documentFiles = await this.addDocuments(message, attachments);
|
||||
return [...imageFiles, ...documentFiles];
|
||||
}
|
||||
|
||||
async buildMessages(messages, parentMessageId, { promptPrefix = null }, opts) {
|
||||
let orderedMessages = this.constructor.getMessagesForConversation({
|
||||
messages,
|
||||
@@ -400,7 +413,7 @@ class OpenAIClient extends BaseClient {
|
||||
};
|
||||
}
|
||||
|
||||
const files = await this.addImageURLs(
|
||||
const files = await this.processAttachments(
|
||||
orderedMessages[orderedMessages.length - 1],
|
||||
attachments,
|
||||
);
|
||||
|
||||
@@ -3,24 +3,61 @@ const { EModelEndpoint, ContentTypes } = require('librechat-data-provider');
|
||||
const { HumanMessage, AIMessage, SystemMessage } = require('@langchain/core/messages');
|
||||
|
||||
/**
|
||||
* Formats a message to OpenAI Vision API payload format.
|
||||
* Formats a message with document attachments for specific endpoints.
|
||||
*
|
||||
* @param {Object} params - The parameters for formatting.
|
||||
* @param {Object} params.message - The message object to format.
|
||||
* @param {string} [params.message.role] - The role of the message sender (must be 'user').
|
||||
* @param {string} [params.message.content] - The text content of the message.
|
||||
* @param {Array<Object>} [params.documents] - The document attachments for the message.
|
||||
* @param {EModelEndpoint} [params.endpoint] - Identifier for specific endpoint handling
|
||||
* @returns {(Object)} - The formatted message.
|
||||
*/
|
||||
const formatDocumentMessage = ({ message, documents, endpoint }) => {
|
||||
const contentParts = [];
|
||||
|
||||
// Add documents first (for Anthropic PDFs)
|
||||
if (documents && documents.length > 0) {
|
||||
contentParts.push(...documents);
|
||||
}
|
||||
|
||||
// Add text content
|
||||
contentParts.push({ type: ContentTypes.TEXT, text: message.content });
|
||||
|
||||
if (endpoint === EModelEndpoint.anthropic) {
|
||||
message.content = contentParts;
|
||||
return message;
|
||||
}
|
||||
|
||||
// For other endpoints, might need different handling
|
||||
message.content = contentParts;
|
||||
return message;
|
||||
};
|
||||
|
||||
/**
|
||||
* Formats a message with vision capabilities (image_urls) for specific endpoints.
|
||||
*
|
||||
* @param {Object} params - The parameters for formatting.
|
||||
* @param {Object} params.message - The message object to format.
|
||||
* @param {Array<string>} [params.image_urls] - The image_urls to attach to the message.
|
||||
* @param {EModelEndpoint} [params.endpoint] - Identifier for specific endpoint handling
|
||||
* @returns {(Object)} - The formatted message.
|
||||
*/
|
||||
const formatVisionMessage = ({ message, image_urls, endpoint }) => {
|
||||
const contentParts = [];
|
||||
|
||||
// Add images
|
||||
if (image_urls && image_urls.length > 0) {
|
||||
contentParts.push(...image_urls);
|
||||
}
|
||||
|
||||
// Add text content
|
||||
contentParts.push({ type: ContentTypes.TEXT, text: message.content });
|
||||
|
||||
if (endpoint === EModelEndpoint.anthropic) {
|
||||
message.content = [...image_urls, { type: ContentTypes.TEXT, text: message.content }];
|
||||
message.content = contentParts;
|
||||
return message;
|
||||
}
|
||||
|
||||
message.content = [{ type: ContentTypes.TEXT, text: message.content }, ...image_urls];
|
||||
|
||||
return message;
|
||||
};
|
||||
|
||||
@@ -58,7 +95,18 @@ const formatMessage = ({ message, userName, assistantName, endpoint, langChain =
|
||||
content,
|
||||
};
|
||||
|
||||
const { image_urls } = message;
|
||||
const { image_urls, documents } = message;
|
||||
|
||||
// Handle documents
|
||||
if (Array.isArray(documents) && documents.length > 0 && role === 'user') {
|
||||
return formatDocumentMessage({
|
||||
message: formattedMessage,
|
||||
documents: message.documents,
|
||||
endpoint,
|
||||
});
|
||||
}
|
||||
|
||||
// Handle images
|
||||
if (Array.isArray(image_urls) && image_urls.length > 0 && role === 'user') {
|
||||
return formatVisionMessage({
|
||||
message: formattedMessage,
|
||||
@@ -146,7 +194,21 @@ const formatAgentMessages = (payload) => {
|
||||
message.content = [{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: message.content }];
|
||||
}
|
||||
if (message.role !== 'assistant') {
|
||||
messages.push(formatMessage({ message, langChain: true }));
|
||||
// Check if message has documents and preserve array structure
|
||||
const hasDocuments =
|
||||
Array.isArray(message.content) &&
|
||||
message.content.some((part) => part && part.type === 'document');
|
||||
|
||||
if (hasDocuments && message.role === 'user') {
|
||||
// For user messages with documents, create HumanMessage directly with array content
|
||||
messages.push(new HumanMessage({ content: message.content }));
|
||||
} else if (hasDocuments && message.role === 'system') {
|
||||
// For system messages with documents, create SystemMessage directly with array content
|
||||
messages.push(new SystemMessage({ content: message.content }));
|
||||
} else {
|
||||
// Use regular formatting for messages without documents
|
||||
messages.push(formatMessage({ message, langChain: true }));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -239,6 +301,8 @@ const formatAgentMessages = (payload) => {
|
||||
|
||||
module.exports = {
|
||||
formatMessage,
|
||||
formatDocumentMessage,
|
||||
formatVisionMessage,
|
||||
formatFromLangChain,
|
||||
formatAgentMessages,
|
||||
formatLangChainMessages,
|
||||
|
||||
@@ -226,6 +226,42 @@ class AgentClient extends BaseClient {
|
||||
return files;
|
||||
}
|
||||
|
||||
async addDocuments(message, attachments) {
|
||||
const documentResult =
|
||||
await require('~/server/services/Files/documents').encodeAndFormatDocuments(
|
||||
this.options.req,
|
||||
attachments,
|
||||
this.options.agent.provider,
|
||||
);
|
||||
message.documents =
|
||||
documentResult.documents && documentResult.documents.length
|
||||
? documentResult.documents
|
||||
: undefined;
|
||||
return documentResult.files;
|
||||
}
|
||||
|
||||
async processAttachments(message, attachments) {
|
||||
const [imageFiles, documentFiles] = await Promise.all([
|
||||
this.addImageURLs(message, attachments),
|
||||
this.addDocuments(message, attachments),
|
||||
]);
|
||||
|
||||
const allFiles = [...imageFiles, ...documentFiles];
|
||||
const seenFileIds = new Set();
|
||||
const uniqueFiles = [];
|
||||
|
||||
for (const file of allFiles) {
|
||||
if (file.file_id && !seenFileIds.has(file.file_id)) {
|
||||
seenFileIds.add(file.file_id);
|
||||
uniqueFiles.push(file);
|
||||
} else if (!file.file_id) {
|
||||
uniqueFiles.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
return uniqueFiles;
|
||||
}
|
||||
|
||||
async buildMessages(
|
||||
messages,
|
||||
parentMessageId,
|
||||
@@ -259,7 +295,7 @@ class AgentClient extends BaseClient {
|
||||
};
|
||||
}
|
||||
|
||||
const files = await this.addImageURLs(
|
||||
const files = await this.processAttachments(
|
||||
orderedMessages[orderedMessages.length - 1],
|
||||
attachments,
|
||||
);
|
||||
@@ -282,6 +318,23 @@ class AgentClient extends BaseClient {
|
||||
assistantName: this.options?.modelLabel,
|
||||
});
|
||||
|
||||
if (
|
||||
message.documents &&
|
||||
message.documents.length > 0 &&
|
||||
message.role === 'user' &&
|
||||
this.options.agent.provider === EModelEndpoint.anthropic
|
||||
) {
|
||||
const contentParts = [];
|
||||
contentParts.push(...message.documents);
|
||||
if (message.image_urls && message.image_urls.length > 0) {
|
||||
contentParts.push(...message.image_urls);
|
||||
}
|
||||
const textContent =
|
||||
typeof formattedMessage.content === 'string' ? formattedMessage.content : '';
|
||||
contentParts.push({ type: 'text', text: textContent });
|
||||
formattedMessage.content = contentParts;
|
||||
}
|
||||
|
||||
if (message.ocr && i !== orderedMessages.length - 1) {
|
||||
if (typeof formattedMessage.content === 'string') {
|
||||
formattedMessage.content = message.ocr + '\n' + formattedMessage.content;
|
||||
@@ -777,6 +830,51 @@ class AgentClient extends BaseClient {
|
||||
};
|
||||
|
||||
const toolSet = new Set((this.options.agent.tools ?? []).map((tool) => tool && tool.name));
|
||||
|
||||
if (
|
||||
this.options.agent.provider === EModelEndpoint.anthropic &&
|
||||
payload &&
|
||||
Array.isArray(payload)
|
||||
) {
|
||||
let userMessageWithDocs = null;
|
||||
|
||||
if (this.userMessage?.documents) {
|
||||
userMessageWithDocs = this.userMessage;
|
||||
} else if (this.currentMessages?.length > 0) {
|
||||
const lastMessage = this.currentMessages[this.currentMessages.length - 1];
|
||||
if (lastMessage.documents?.length > 0) {
|
||||
userMessageWithDocs = lastMessage;
|
||||
}
|
||||
} else if (this.messages?.length > 0) {
|
||||
const lastMessage = this.messages[this.messages.length - 1];
|
||||
if (lastMessage.documents?.length > 0) {
|
||||
userMessageWithDocs = lastMessage;
|
||||
}
|
||||
}
|
||||
|
||||
if (userMessageWithDocs) {
|
||||
for (const payloadMessage of payload) {
|
||||
if (
|
||||
payloadMessage.role === 'user' &&
|
||||
userMessageWithDocs.text === payloadMessage.content
|
||||
) {
|
||||
if (typeof payloadMessage.content === 'string') {
|
||||
payloadMessage.content = [
|
||||
...userMessageWithDocs.documents,
|
||||
{ type: 'text', text: payloadMessage.content },
|
||||
];
|
||||
} else if (Array.isArray(payloadMessage.content)) {
|
||||
payloadMessage.content = [
|
||||
...userMessageWithDocs.documents,
|
||||
...payloadMessage.content,
|
||||
];
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let { messages: initialMessages, indexTokenCountMap } = formatAgentMessages(
|
||||
payload,
|
||||
this.indexTokenCountMap,
|
||||
|
||||
166
api/server/services/Files/documents/encode.js
Normal file
166
api/server/services/Files/documents/encode.js
Normal file
@@ -0,0 +1,166 @@
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { validateAnthropicPdf } = require('../validation/pdfValidator');
|
||||
|
||||
/**
|
||||
* Converts a readable stream to a buffer.
|
||||
*
|
||||
* @param {NodeJS.ReadableStream} stream - The readable stream to convert.
|
||||
* @returns {Promise<Buffer>} - Promise resolving to the buffer.
|
||||
*/
|
||||
async function streamToBuffer(stream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks = [];
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
try {
|
||||
const buffer = Buffer.concat(chunks);
|
||||
chunks.length = 0; // Clear the array
|
||||
resolve(buffer);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('error', (error) => {
|
||||
chunks.length = 0;
|
||||
reject(error);
|
||||
});
|
||||
}).finally(() => {
|
||||
// Clean up the stream if required
|
||||
if (stream.destroy && typeof stream.destroy === 'function') {
|
||||
stream.destroy();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes and encodes document files for various endpoints
|
||||
*
|
||||
* @param {Express.Request} req - Express request object
|
||||
* @param {MongoFile[]} files - Array of file objects to process
|
||||
* @param {string} endpoint - The endpoint identifier (e.g., EModelEndpoint.anthropic)
|
||||
* @returns {Promise<{documents: MessageContentDocument[], files: MongoFile[]}>}
|
||||
*/
|
||||
async function encodeAndFormatDocuments(req, files, endpoint) {
|
||||
const promises = [];
|
||||
/** @type {Record<FileSources, Pick<ReturnType<typeof getStrategyFunctions>, 'prepareDocumentPayload' | 'getDownloadStream'>>} */
|
||||
const encodingMethods = {};
|
||||
/** @type {{ documents: MessageContentDocument[]; files: MongoFile[] }} */
|
||||
const result = {
|
||||
documents: [],
|
||||
files: [],
|
||||
};
|
||||
|
||||
if (!files || !files.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// Filter for document files only
|
||||
const documentFiles = files.filter(
|
||||
(file) => file.type === 'application/pdf' || file.type?.startsWith('application/'), // Future: support for other document types
|
||||
);
|
||||
|
||||
if (!documentFiles.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
for (let file of documentFiles) {
|
||||
/** @type {FileSources} */
|
||||
const source = file.source ?? 'local';
|
||||
|
||||
// Only process PDFs for Anthropic for now
|
||||
if (file.type !== 'application/pdf' || endpoint !== EModelEndpoint.anthropic) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!encodingMethods[source]) {
|
||||
encodingMethods[source] = getStrategyFunctions(source);
|
||||
}
|
||||
|
||||
// Prepare file metadata
|
||||
const fileMetadata = {
|
||||
file_id: file.file_id || file._id,
|
||||
temp_file_id: file.temp_file_id,
|
||||
filepath: file.filepath,
|
||||
source: file.source,
|
||||
filename: file.filename,
|
||||
type: file.type,
|
||||
};
|
||||
|
||||
promises.push([file, fileMetadata]);
|
||||
}
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
promises.map(async ([file, fileMetadata]) => {
|
||||
if (!file || !fileMetadata) {
|
||||
return { file: null, content: null, metadata: fileMetadata };
|
||||
}
|
||||
|
||||
try {
|
||||
const source = file.source ?? 'local';
|
||||
const { getDownloadStream } = encodingMethods[source];
|
||||
|
||||
const stream = await getDownloadStream(req, file.filepath);
|
||||
const buffer = await streamToBuffer(stream);
|
||||
const documentContent = buffer.toString('base64');
|
||||
|
||||
return {
|
||||
file,
|
||||
content: documentContent,
|
||||
metadata: fileMetadata,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error processing document ${file.filename}:`, error);
|
||||
return { file, content: null, metadata: fileMetadata };
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
for (const settledResult of results) {
|
||||
if (settledResult.status === 'rejected') {
|
||||
console.error('Document processing failed:', settledResult.reason);
|
||||
continue;
|
||||
}
|
||||
|
||||
const { file, content, metadata } = settledResult.value;
|
||||
|
||||
if (!content || !file) {
|
||||
if (metadata) {
|
||||
result.files.push(metadata);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (file.type === 'application/pdf' && endpoint === EModelEndpoint.anthropic) {
|
||||
const pdfBuffer = Buffer.from(content, 'base64');
|
||||
const validation = await validateAnthropicPdf(pdfBuffer, pdfBuffer.length);
|
||||
|
||||
if (!validation.isValid) {
|
||||
throw new Error(`PDF validation failed: ${validation.error}`);
|
||||
}
|
||||
|
||||
const documentPart = {
|
||||
type: 'document',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: 'application/pdf',
|
||||
data: content,
|
||||
},
|
||||
};
|
||||
|
||||
result.documents.push(documentPart);
|
||||
result.files.push(metadata);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
encodeAndFormatDocuments,
|
||||
};
|
||||
5
api/server/services/Files/documents/index.js
Normal file
5
api/server/services/Files/documents/index.js
Normal file
@@ -0,0 +1,5 @@
|
||||
const { encodeAndFormatDocuments } = require('./encode');
|
||||
|
||||
module.exports = {
|
||||
encodeAndFormatDocuments,
|
||||
};
|
||||
@@ -391,7 +391,17 @@ const processFileUpload = async ({ req, res, metadata }) => {
|
||||
const isAssistantUpload = isAssistantsEndpoint(metadata.endpoint);
|
||||
const assistantSource =
|
||||
metadata.endpoint === EModelEndpoint.azureAssistants ? FileSources.azure : FileSources.openai;
|
||||
const source = isAssistantUpload ? assistantSource : FileSources.vectordb;
|
||||
|
||||
// Use local storage for Anthropic native PDF support, vectordb for others
|
||||
const isAnthropicUpload = metadata.endpoint === EModelEndpoint.anthropic;
|
||||
let source;
|
||||
if (isAssistantUpload) {
|
||||
source = assistantSource;
|
||||
} else if (isAnthropicUpload) {
|
||||
source = FileSources.local;
|
||||
} else {
|
||||
source = FileSources.vectordb;
|
||||
}
|
||||
const { handleFileUpload } = getStrategyFunctions(source);
|
||||
const { file_id, temp_file_id } = metadata;
|
||||
|
||||
|
||||
77
api/server/services/Files/validation/pdfValidator.js
Normal file
77
api/server/services/Files/validation/pdfValidator.js
Normal file
@@ -0,0 +1,77 @@
|
||||
const { logger } = require('~/config');
|
||||
const { anthropicPdfSizeLimit } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* Validates if a PDF meets Anthropic's requirements
|
||||
* @param {Buffer} pdfBuffer - The PDF file as a buffer
|
||||
* @param {number} fileSize - The file size in bytes
|
||||
* @returns {Promise<{isValid: boolean, error?: string}>}
|
||||
*/
|
||||
async function validateAnthropicPdf(pdfBuffer, fileSize) {
|
||||
try {
|
||||
// Check file size (32MB limit)
|
||||
if (fileSize > anthropicPdfSizeLimit) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `PDF file size (${Math.round(fileSize / (1024 * 1024))}MB) exceeds Anthropic's 32MB limit`,
|
||||
};
|
||||
}
|
||||
|
||||
// Basic PDF header validation
|
||||
if (!pdfBuffer || pdfBuffer.length < 5) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid PDF file: too small or corrupted',
|
||||
};
|
||||
}
|
||||
|
||||
// Check PDF magic bytes
|
||||
const pdfHeader = pdfBuffer.subarray(0, 5).toString();
|
||||
if (!pdfHeader.startsWith('%PDF-')) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid PDF file: missing PDF header',
|
||||
};
|
||||
}
|
||||
|
||||
// Check for password protection/encryption
|
||||
const pdfContent = pdfBuffer.toString('binary');
|
||||
if (
|
||||
pdfContent.includes('/Encrypt ') ||
|
||||
pdfContent.includes('/U (') ||
|
||||
pdfContent.includes('/O (')
|
||||
) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'PDF is password-protected or encrypted. Anthropic requires unencrypted PDFs.',
|
||||
};
|
||||
}
|
||||
|
||||
// Estimate page count (this is a rough estimation)
|
||||
const pageMatches = pdfContent.match(/\/Type[\s]*\/Page[^s]/g);
|
||||
const estimatedPages = pageMatches ? pageMatches.length : 1;
|
||||
|
||||
if (estimatedPages > 100) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `PDF has approximately ${estimatedPages} pages, exceeding Anthropic's 100-page limit`,
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`PDF validation passed: ${Math.round(fileSize / 1024)}KB, ~${estimatedPages} pages`,
|
||||
);
|
||||
|
||||
return { isValid: true };
|
||||
} catch (error) {
|
||||
logger.error('PDF validation error:', error);
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Failed to validate PDF file',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
validateAnthropicPdf,
|
||||
};
|
||||
@@ -36,6 +36,7 @@ function AttachFileChat({ disableInputs }: { disableInputs: boolean }) {
|
||||
disabled={disableInputs}
|
||||
conversationId={conversationId}
|
||||
endpointFileConfig={endpointFileConfig}
|
||||
endpoint={endpoint}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React, { useRef, useState, useMemo } from 'react';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import { useSetRecoilState } from 'recoil';
|
||||
import { FileSearch, ImageUpIcon, TerminalSquareIcon, FileType2Icon } from 'lucide-react';
|
||||
import { FileSearch, ImageUpIcon, TerminalSquareIcon, FileType2Icon, FileText } from 'lucide-react';
|
||||
import { FileUpload, TooltipAnchor, DropdownPopup, AttachmentIcon } from '@librechat/client';
|
||||
import { EToolResources, EModelEndpoint, defaultAgentCapabilities } from 'librechat-data-provider';
|
||||
import type { EndpointFileConfig } from 'librechat-data-provider';
|
||||
@@ -13,9 +13,15 @@ interface AttachFileMenuProps {
|
||||
conversationId: string;
|
||||
disabled?: boolean | null;
|
||||
endpointFileConfig?: EndpointFileConfig;
|
||||
endpoint?: string | null;
|
||||
}
|
||||
|
||||
const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: AttachFileMenuProps) => {
|
||||
const AttachFileMenu = ({
|
||||
disabled,
|
||||
conversationId,
|
||||
endpointFileConfig,
|
||||
endpoint,
|
||||
}: AttachFileMenuProps) => {
|
||||
const localize = useLocalize();
|
||||
const isUploadDisabled = disabled ?? false;
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
@@ -23,7 +29,7 @@ const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: Attach
|
||||
const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(conversationId));
|
||||
const [toolResource, setToolResource] = useState<EToolResources | undefined>();
|
||||
const { handleFileChange } = useFileHandling({
|
||||
overrideEndpoint: EModelEndpoint.agents,
|
||||
overrideEndpoint: endpoint === EModelEndpoint.anthropic ? undefined : EModelEndpoint.agents,
|
||||
overrideEndpointFileConfig: endpointFileConfig,
|
||||
});
|
||||
|
||||
@@ -34,12 +40,18 @@ const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: Attach
|
||||
* */
|
||||
const capabilities = useAgentCapabilities(agentsConfig?.capabilities ?? defaultAgentCapabilities);
|
||||
|
||||
const handleUploadClick = (isImage?: boolean) => {
|
||||
const handleUploadClick = (fileType?: 'image' | 'document') => {
|
||||
if (!inputRef.current) {
|
||||
return;
|
||||
}
|
||||
inputRef.current.value = '';
|
||||
inputRef.current.accept = isImage === true ? 'image/*' : '';
|
||||
if (fileType === 'image') {
|
||||
inputRef.current.accept = 'image/*';
|
||||
} else if (fileType === 'document') {
|
||||
inputRef.current.accept = '.pdf,application/pdf';
|
||||
} else {
|
||||
inputRef.current.accept = '';
|
||||
}
|
||||
inputRef.current.click();
|
||||
inputRef.current.accept = '';
|
||||
};
|
||||
@@ -50,12 +62,24 @@ const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: Attach
|
||||
label: localize('com_ui_upload_image_input'),
|
||||
onClick: () => {
|
||||
setToolResource(undefined);
|
||||
handleUploadClick(true);
|
||||
handleUploadClick('image');
|
||||
},
|
||||
icon: <ImageUpIcon className="icon-md" />,
|
||||
},
|
||||
];
|
||||
|
||||
// Add document upload option for Anthropic endpoints
|
||||
if (endpoint === EModelEndpoint.anthropic) {
|
||||
items.push({
|
||||
label: 'Upload to Provider',
|
||||
onClick: () => {
|
||||
setToolResource(undefined);
|
||||
handleUploadClick('document');
|
||||
},
|
||||
icon: <FileText className="icon-md" />,
|
||||
});
|
||||
}
|
||||
|
||||
if (capabilities.ocrEnabled) {
|
||||
items.push({
|
||||
label: localize('com_ui_upload_ocr_text'),
|
||||
@@ -95,7 +119,7 @@ const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: Attach
|
||||
}
|
||||
|
||||
return items;
|
||||
}, [capabilities, localize, setToolResource, setEphemeralAgent]);
|
||||
}, [capabilities, localize, setToolResource, setEphemeralAgent, endpoint]);
|
||||
|
||||
const menuTrigger = (
|
||||
<TooltipAnchor
|
||||
|
||||
@@ -169,6 +169,10 @@ export const megabyte = 1024 * 1024;
|
||||
export const mbToBytes = (mb: number): number => mb * megabyte;
|
||||
|
||||
const defaultSizeLimit = mbToBytes(512);
|
||||
|
||||
// Anthropic PDF limits: 32MB max, 100 pages max
|
||||
export const anthropicPdfSizeLimit = mbToBytes(32);
|
||||
|
||||
const assistantsFileConfig = {
|
||||
fileLimit: 10,
|
||||
fileSizeLimit: defaultSizeLimit,
|
||||
@@ -182,6 +186,14 @@ export const fileConfig = {
|
||||
[EModelEndpoint.assistants]: assistantsFileConfig,
|
||||
[EModelEndpoint.azureAssistants]: assistantsFileConfig,
|
||||
[EModelEndpoint.agents]: assistantsFileConfig,
|
||||
[EModelEndpoint.anthropic]: {
|
||||
fileLimit: 10,
|
||||
fileSizeLimit: defaultSizeLimit,
|
||||
totalSizeLimit: defaultSizeLimit,
|
||||
supportedMimeTypes,
|
||||
disabled: false,
|
||||
pdfSizeLimit: anthropicPdfSizeLimit,
|
||||
},
|
||||
default: {
|
||||
fileLimit: 10,
|
||||
fileSizeLimit: defaultSizeLimit,
|
||||
|
||||
Reference in New Issue
Block a user