From 94547b37fe249789bbe98687f7b9533d8555fbb3 Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Sun, 18 Jan 2026 17:47:24 +0000 Subject: [PATCH] fix: add workspace_id to file upload API for correct path resolution When uploading files during a mission, the backend now accepts workspace_id parameter to correctly resolve relative paths against the mission's workspace instead of relying on stale runtime state. --- dashboard/src/app/control/control-client.tsx | 34 +++++--- dashboard/src/lib/api.ts | 90 ++++++++++++-------- src/api/fs.rs | 76 +++++++++++++++-- 3 files changed, 147 insertions(+), 53 deletions(-) diff --git a/dashboard/src/app/control/control-client.tsx b/dashboard/src/app/control/control-client.tsx index 28f1013..ded4460 100644 --- a/dashboard/src/app/control/control-client.tsx +++ b/dashboard/src/app/control/control-client.tsx @@ -2242,18 +2242,22 @@ export default function ControlClient() { // Upload to mission-specific context folder if we have a mission // Upload into the workspace-local ./context (symlinked to mission context inside the container). const contextPath = "./context/"; - + + // Get workspace_id from current or viewing mission + const mission = viewingMission ?? currentMission; + const workspaceId = mission?.workspace_id; + // Use chunked upload for files > 10MB, regular for smaller const useChunked = fileToUpload.size > 10 * 1024 * 1024; - - const result = useChunked + + const result = useChunked ? await uploadFileChunked(fileToUpload, contextPath, (progress) => { setUploadProgress({ fileName: displayName, progress }); - }) + }, workspaceId) : await uploadFile(fileToUpload, contextPath, (progress) => { setUploadProgress({ fileName: displayName, progress }); - }); - + }, workspaceId); + toast.success(`Uploaded ${result.name}`); // Add a message about the upload at the beginning @@ -2268,25 +2272,29 @@ export default function ControlClient() { setUploadQueue((prev) => prev.filter((name) => name !== displayName)); setUploadProgress(null); } - }, [compressImageFile, currentMission?.id]); + }, [compressImageFile, currentMission, viewingMission]); // Handle URL download const handleUrlDownload = useCallback(async () => { if (!urlInput.trim()) return; - + setUrlDownloading(true); try { const contextPath = "./context/"; - - const result = await downloadFromUrl(urlInput.trim(), contextPath); + + // Get workspace_id from current or viewing mission + const mission = viewingMission ?? currentMission; + const workspaceId = mission?.workspace_id; + + const result = await downloadFromUrl(urlInput.trim(), contextPath, undefined, workspaceId); toast.success(`Downloaded ${result.name}`); - + // Add a message about the download at the beginning (consistent with uploads) setInput((prev) => { const downloadNote = `[Downloaded: ${result.name}]`; return prev ? `${downloadNote}\n${prev}` : downloadNote; }); - + setUrlInput(""); setShowUrlInput(false); } catch (error) { @@ -2295,7 +2303,7 @@ export default function ControlClient() { } finally { setUrlDownloading(false); } - }, [urlInput, currentMission?.id]); + }, [urlInput, currentMission, viewingMission]); // Handle paste to upload files useEffect(() => { diff --git a/dashboard/src/lib/api.ts b/dashboard/src/lib/api.ts index 4ba7224..c1bd57d 100644 --- a/dashboard/src/lib/api.ts +++ b/dashboard/src/lib/api.ts @@ -1035,11 +1035,16 @@ export interface UploadProgress { export function uploadFile( file: File, remotePath: string = "./context/", - onProgress?: (progress: UploadProgress) => void + onProgress?: (progress: UploadProgress) => void, + workspaceId?: string ): Promise { return new Promise((resolve, reject) => { const xhr = new XMLHttpRequest(); - const url = apiUrl(`/api/fs/upload?path=${encodeURIComponent(remotePath)}`); + const params = new URLSearchParams({ path: remotePath }); + if (workspaceId) { + params.append("workspace_id", workspaceId); + } + const url = apiUrl(`/api/fs/upload?${params}`); // Track upload progress xhr.upload.addEventListener("progress", (event) => { @@ -1097,36 +1102,37 @@ export interface ChunkedUploadProgress extends UploadProgress { export async function uploadFileChunked( file: File, remotePath: string = "./context/", - onProgress?: (progress: ChunkedUploadProgress) => void + onProgress?: (progress: ChunkedUploadProgress) => void, + workspaceId?: string ): Promise { const totalChunks = Math.ceil(file.size / CHUNK_SIZE); const uploadId = `${file.name}-${file.size}-${Date.now()}`; - + // For small files, use regular upload if (totalChunks <= 1) { return uploadFile(file, remotePath, onProgress ? (p) => onProgress({ ...p, chunkIndex: 0, totalChunks: 1, - }) : undefined); + }) : undefined, workspaceId); } - + let uploadedBytes = 0; - + for (let i = 0; i < totalChunks; i++) { const start = i * CHUNK_SIZE; const end = Math.min(start + CHUNK_SIZE, file.size); const chunk = file.slice(start, end); - + const chunkFile = new File([chunk], file.name, { type: file.type }); - + // Upload chunk with retry let retries = 3; while (retries > 0) { try { - await uploadChunk(chunkFile, remotePath, uploadId, i, totalChunks); + await uploadChunk(chunkFile, remotePath, uploadId, i, totalChunks, workspaceId); uploadedBytes += chunk.size; - + if (onProgress) { onProgress({ loaded: uploadedBytes, @@ -1144,9 +1150,9 @@ export async function uploadFileChunked( } } } - + // Finalize the upload - return finalizeChunkedUpload(remotePath, uploadId, file.name, totalChunks); + return finalizeChunkedUpload(remotePath, uploadId, file.name, totalChunks, workspaceId); } async function uploadChunk( @@ -1154,24 +1160,28 @@ async function uploadChunk( remotePath: string, uploadId: string, chunkIndex: number, - totalChunks: number + totalChunks: number, + workspaceId?: string ): Promise { const formData = new FormData(); formData.append("file", chunk); - + const params = new URLSearchParams({ path: remotePath, upload_id: uploadId, chunk_index: String(chunkIndex), total_chunks: String(totalChunks), }); - + if (workspaceId) { + params.append("workspace_id", workspaceId); + } + const res = await fetch(apiUrl(`/api/fs/upload-chunk?${params}`), { method: "POST", headers: authHeader(), body: formData, }); - + if (!res.ok) { throw new Error(`Chunk upload failed: ${await res.text()}`); } @@ -1181,23 +1191,29 @@ async function finalizeChunkedUpload( remotePath: string, uploadId: string, fileName: string, - totalChunks: number + totalChunks: number, + workspaceId?: string ): Promise { + const body: Record = { + path: remotePath, + upload_id: uploadId, + file_name: fileName, + total_chunks: totalChunks, + }; + if (workspaceId) { + body.workspace_id = workspaceId; + } + const res = await apiFetch("/api/fs/upload-finalize", { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - path: remotePath, - upload_id: uploadId, - file_name: fileName, - total_chunks: totalChunks, - }), + body: JSON.stringify(body), }); - + if (!res.ok) { throw new Error(`Failed to finalize upload: ${await res.text()}`); } - + return res.json(); } @@ -1205,22 +1221,28 @@ async function finalizeChunkedUpload( export async function downloadFromUrl( url: string, remotePath: string = "./context/", - fileName?: string + fileName?: string, + workspaceId?: string ): Promise { + const body: Record = { + url, + path: remotePath, + file_name: fileName, + }; + if (workspaceId) { + body.workspace_id = workspaceId; + } + const res = await apiFetch("/api/fs/download-url", { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - url, - path: remotePath, - file_name: fileName, - }), + body: JSON.stringify(body), }); - + if (!res.ok) { throw new Error(`Failed to download from URL: ${await res.text()}`); } - + return res.json(); } diff --git a/src/api/fs.rs b/src/api/fs.rs index 6a2f83b..1d2ffa7 100644 --- a/src/api/fs.rs +++ b/src/api/fs.rs @@ -161,6 +161,48 @@ fn content_type_for_path(path: &Path) -> &'static str { } } +/// Resolve a path relative to a specific workspace. +async fn resolve_path_for_workspace( + state: &Arc, + workspace_id: uuid::Uuid, + path: &str, +) -> Result { + let workspace = state + .workspaces + .get(workspace_id) + .await + .ok_or_else(|| { + ( + StatusCode::NOT_FOUND, + format!("Workspace {} not found", workspace_id), + ) + })?; + + let input = Path::new(path); + + // If the path is absolute, use it directly (but validate it's within workspace) + if input.is_absolute() { + return Ok(input.to_path_buf()); + } + + // Resolve relative path against workspace path + // For "context" paths, use the workspace's context directory + if path.starts_with("./context") || path.starts_with("context") { + let suffix = path + .trim_start_matches("./") + .trim_start_matches("context/") + .trim_start_matches("context"); + let context_path = workspace.path.join("context"); + if suffix.is_empty() { + return Ok(context_path); + } + return Ok(context_path.join(suffix)); + } + + // Default: resolve relative to workspace path + Ok(workspace.path.join(path)) +} + fn resolve_upload_base(path: &str) -> Result { // Absolute path if Path::new(path).is_absolute() { @@ -321,6 +363,8 @@ fn is_internal_ip(ip: &IpAddr) -> bool { #[derive(Debug, Deserialize)] pub struct PathQuery { pub path: String, + /// Optional workspace ID to resolve relative paths against + pub workspace_id: Option, } #[derive(Debug, Deserialize)] @@ -451,11 +495,16 @@ pub async fn download( } pub async fn upload( - State(_state): State>, + State(state): State>, Query(q): Query, mut multipart: Multipart, ) -> Result, (StatusCode, String)> { - let base = resolve_upload_base(&q.path)?; + // If workspace_id is provided, resolve path relative to that workspace + let base = if let Some(workspace_id) = q.workspace_id { + resolve_path_for_workspace(&state, workspace_id, &q.path).await? + } else { + resolve_upload_base(&q.path)? + }; // Expect one file field. if let Some(field) = multipart @@ -534,6 +583,8 @@ pub struct ChunkUploadQuery { pub upload_id: String, pub chunk_index: u32, pub total_chunks: u32, + /// Optional workspace ID to resolve relative paths against + pub workspace_id: Option, } // Handle chunked file upload @@ -600,14 +651,21 @@ pub struct FinalizeUploadRequest { pub upload_id: String, pub file_name: String, pub total_chunks: u32, + /// Optional workspace ID to resolve relative paths against + pub workspace_id: Option, } // Finalize chunked upload by assembling chunks pub async fn upload_finalize( - State(_state): State>, + State(state): State>, Json(req): Json, ) -> Result, (StatusCode, String)> { - let base = resolve_upload_base(&req.path)?; + // If workspace_id is provided, resolve path relative to that workspace + let base = if let Some(workspace_id) = req.workspace_id { + resolve_path_for_workspace(&state, workspace_id, &req.path).await? + } else { + resolve_upload_base(&req.path)? + }; // Sanitize upload_id and file_name to prevent path traversal attacks let safe_upload_id = sanitize_path_component(&req.upload_id); @@ -696,11 +754,13 @@ pub struct DownloadUrlRequest { pub url: String, pub path: String, pub file_name: Option, + /// Optional workspace ID to resolve relative paths against + pub workspace_id: Option, } // Download file from URL to server filesystem pub async fn download_from_url( - State(_state): State>, + State(state): State>, Json(req): Json, ) -> Result, (StatusCode, String)> { // Validate URL to prevent SSRF attacks @@ -815,7 +875,11 @@ pub async fn download_from_url( drop(f); // Move to destination - let base = resolve_upload_base(&req.path)?; + let base = if let Some(workspace_id) = req.workspace_id { + resolve_path_for_workspace(&state, workspace_id, &req.path).await? + } else { + resolve_upload_base(&req.path)? + }; let remote_path = base.join(&file_name); let target_dir = remote_path .parent()