feat: upload progress bar, URL download, and chunked uploads
1. Upload progress bar - shows real-time progress with bytes/percentage 2. URL download - paste any URL, server downloads directly (faster for large files) 3. Chunked uploads - files >10MB split into 5MB chunks with retry (3 attempts) Dashboard changes: - Progress bar UI with bytes transferred - Link icon button to paste URLs - Uses chunked upload for large files automatically Backend changes: - /api/fs/upload-chunk - receives file chunks - /api/fs/upload-finalize - assembles chunks into final file - /api/fs/download-url - server downloads from URL to filesystem
This commit is contained in:
@@ -17,6 +17,9 @@ import {
|
||||
resumeMission,
|
||||
getCurrentMission,
|
||||
uploadFile,
|
||||
uploadFileChunked,
|
||||
downloadFromUrl,
|
||||
formatBytes,
|
||||
getProgress,
|
||||
getRunningMissions,
|
||||
cancelMission,
|
||||
@@ -28,6 +31,7 @@ import {
|
||||
type Mission,
|
||||
type MissionStatus,
|
||||
type RunningMissionInfo,
|
||||
type UploadProgress,
|
||||
} from "@/lib/api";
|
||||
import {
|
||||
Send,
|
||||
@@ -52,6 +56,8 @@ import {
|
||||
Layers,
|
||||
RefreshCw,
|
||||
PlayCircle,
|
||||
Link2,
|
||||
X,
|
||||
} from "lucide-react";
|
||||
import {
|
||||
OptionList,
|
||||
@@ -406,6 +412,13 @@ export default function ControlClient() {
|
||||
{ file: File; uploading: boolean }[]
|
||||
>([]);
|
||||
const [uploadQueue, setUploadQueue] = useState<string[]>([]);
|
||||
const [uploadProgress, setUploadProgress] = useState<{
|
||||
fileName: string;
|
||||
progress: UploadProgress;
|
||||
} | null>(null);
|
||||
const [showUrlInput, setShowUrlInput] = useState(false);
|
||||
const [urlInput, setUrlInput] = useState("");
|
||||
const [urlDownloading, setUrlDownloading] = useState(false);
|
||||
|
||||
// Model selection state
|
||||
const [availableModels, setAvailableModels] = useState<string[]>([]);
|
||||
@@ -499,13 +512,25 @@ export default function ControlClient() {
|
||||
// Handle file upload - wrapped in useCallback to avoid stale closures
|
||||
const handleFileUpload = useCallback(async (file: File) => {
|
||||
setUploadQueue((prev) => [...prev, file.name]);
|
||||
setUploadProgress({ fileName: file.name, progress: { loaded: 0, total: file.size, percentage: 0 } });
|
||||
|
||||
try {
|
||||
// Upload to mission-specific context folder if we have a mission
|
||||
const contextPath = currentMission?.id
|
||||
? `/root/context/${currentMission.id}/`
|
||||
: "/root/context/";
|
||||
const result = await uploadFile(file, contextPath);
|
||||
|
||||
// Use chunked upload for files > 10MB, regular for smaller
|
||||
const useChunked = file.size > 10 * 1024 * 1024;
|
||||
|
||||
const result = useChunked
|
||||
? await uploadFileChunked(file, contextPath, (progress) => {
|
||||
setUploadProgress({ fileName: file.name, progress });
|
||||
})
|
||||
: await uploadFile(file, contextPath, (progress) => {
|
||||
setUploadProgress({ fileName: file.name, progress });
|
||||
});
|
||||
|
||||
toast.success(`Uploaded ${result.name}`);
|
||||
|
||||
// Add a message about the upload
|
||||
@@ -518,9 +543,39 @@ export default function ControlClient() {
|
||||
toast.error(`Failed to upload ${file.name}`);
|
||||
} finally {
|
||||
setUploadQueue((prev) => prev.filter((name) => name !== file.name));
|
||||
setUploadProgress(null);
|
||||
}
|
||||
}, [currentMission?.id]);
|
||||
|
||||
// Handle URL download
|
||||
const handleUrlDownload = useCallback(async () => {
|
||||
if (!urlInput.trim()) return;
|
||||
|
||||
setUrlDownloading(true);
|
||||
try {
|
||||
const contextPath = currentMission?.id
|
||||
? `/root/context/${currentMission.id}/`
|
||||
: "/root/context/";
|
||||
|
||||
const result = await downloadFromUrl(urlInput.trim(), contextPath);
|
||||
toast.success(`Downloaded ${result.name}`);
|
||||
|
||||
// Add a message about the download
|
||||
setInput((prev) => {
|
||||
const uploadNote = `[Downloaded: ${result.name}]`;
|
||||
return prev ? `${prev}\n${uploadNote}` : uploadNote;
|
||||
});
|
||||
|
||||
setUrlInput("");
|
||||
setShowUrlInput(false);
|
||||
} catch (error) {
|
||||
console.error("URL download failed:", error);
|
||||
toast.error(`Failed to download from URL`);
|
||||
} finally {
|
||||
setUrlDownloading(false);
|
||||
}
|
||||
}, [urlInput, currentMission?.id]);
|
||||
|
||||
// Handle paste to upload files
|
||||
useEffect(() => {
|
||||
const textarea = textareaRef.current;
|
||||
@@ -1812,8 +1867,32 @@ export default function ControlClient() {
|
||||
|
||||
{/* Input */}
|
||||
<div className="border-t border-white/[0.06] bg-white/[0.01] p-4">
|
||||
{/* Upload queue */}
|
||||
{uploadQueue.length > 0 && (
|
||||
{/* Upload progress */}
|
||||
{uploadProgress && (
|
||||
<div className="mx-auto max-w-3xl mb-3">
|
||||
<div className="flex items-center gap-3 rounded-lg border border-white/[0.06] bg-white/[0.02] px-4 py-3">
|
||||
<Loader className="h-4 w-4 animate-spin text-indigo-400" />
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center justify-between text-sm mb-1">
|
||||
<span className="text-white/70 truncate">{uploadProgress.fileName}</span>
|
||||
<span className="text-white/50 ml-2 shrink-0">
|
||||
{formatBytes(uploadProgress.progress.loaded)} / {formatBytes(uploadProgress.progress.total)}
|
||||
</span>
|
||||
</div>
|
||||
<div className="h-1.5 bg-white/[0.06] rounded-full overflow-hidden">
|
||||
<div
|
||||
className="h-full bg-indigo-500 rounded-full transition-all duration-300"
|
||||
style={{ width: `${uploadProgress.progress.percentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<span className="text-sm text-white/50 shrink-0">{uploadProgress.progress.percentage}%</span>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Upload queue (for files waiting) */}
|
||||
{uploadQueue.length > 0 && !uploadProgress && (
|
||||
<div className="mx-auto max-w-3xl mb-3 flex flex-wrap gap-2">
|
||||
{uploadQueue.map((name) => (
|
||||
<AttachmentPreview
|
||||
@@ -1825,18 +1904,78 @@ export default function ControlClient() {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* URL Input */}
|
||||
{showUrlInput && (
|
||||
<div className="mx-auto max-w-3xl mb-3">
|
||||
<div className="flex items-center gap-2 rounded-lg border border-white/[0.06] bg-white/[0.02] px-3 py-2">
|
||||
<Link2 className="h-4 w-4 text-white/40 shrink-0" />
|
||||
<input
|
||||
type="url"
|
||||
value={urlInput}
|
||||
onChange={(e) => setUrlInput(e.target.value)}
|
||||
placeholder="Paste URL to download (Dropbox, Google Drive, direct link...)"
|
||||
className="flex-1 bg-transparent text-sm text-white placeholder:text-white/30 focus:outline-none"
|
||||
autoFocus
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter") {
|
||||
e.preventDefault();
|
||||
handleUrlDownload();
|
||||
} else if (e.key === "Escape") {
|
||||
setShowUrlInput(false);
|
||||
setUrlInput("");
|
||||
}
|
||||
}}
|
||||
/>
|
||||
{urlDownloading ? (
|
||||
<Loader className="h-4 w-4 animate-spin text-indigo-400" />
|
||||
) : (
|
||||
<>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleUrlDownload}
|
||||
disabled={!urlInput.trim()}
|
||||
className="text-sm text-indigo-400 hover:text-indigo-300 disabled:text-white/20 disabled:cursor-not-allowed"
|
||||
>
|
||||
Download
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => { setShowUrlInput(false); setUrlInput(""); }}
|
||||
className="text-white/40 hover:text-white/70"
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<p className="text-xs text-white/30 mt-1.5 px-1">
|
||||
Server will download the file directly — faster for large files
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<form
|
||||
onSubmit={handleSubmit}
|
||||
className="mx-auto flex max-w-3xl gap-3 items-end"
|
||||
>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => fileInputRef.current?.click()}
|
||||
className="p-3 rounded-xl border border-white/[0.06] bg-white/[0.02] text-white/40 hover:text-white/70 hover:bg-white/[0.04] transition-colors shrink-0"
|
||||
title="Attach files"
|
||||
>
|
||||
<Paperclip className="h-5 w-5" />
|
||||
</button>
|
||||
<div className="flex gap-1">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => fileInputRef.current?.click()}
|
||||
className="p-3 rounded-xl border border-white/[0.06] bg-white/[0.02] text-white/40 hover:text-white/70 hover:bg-white/[0.04] transition-colors shrink-0"
|
||||
title="Attach files"
|
||||
>
|
||||
<Paperclip className="h-5 w-5" />
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setShowUrlInput(!showUrlInput)}
|
||||
className={`p-3 rounded-xl border border-white/[0.06] bg-white/[0.02] text-white/40 hover:text-white/70 hover:bg-white/[0.04] transition-colors shrink-0 ${showUrlInput ? 'text-indigo-400 border-indigo-500/30' : ''}`}
|
||||
title="Download from URL"
|
||||
>
|
||||
<Link2 className="h-5 w-5" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<textarea
|
||||
ref={textareaRef}
|
||||
|
||||
@@ -726,29 +726,214 @@ export interface UploadResult {
|
||||
name: string;
|
||||
}
|
||||
|
||||
// Upload a file to the remote filesystem
|
||||
export async function uploadFile(
|
||||
file: File,
|
||||
remotePath: string = "/root/context/"
|
||||
): Promise<UploadResult> {
|
||||
const formData = new FormData();
|
||||
formData.append("file", file);
|
||||
export interface UploadProgress {
|
||||
loaded: number;
|
||||
total: number;
|
||||
percentage: number;
|
||||
}
|
||||
|
||||
const url = apiUrl(`/api/fs/upload?path=${encodeURIComponent(remotePath)}`);
|
||||
const res = await fetch(url, {
|
||||
// Upload a file to the remote filesystem with progress tracking
|
||||
export function uploadFile(
|
||||
file: File,
|
||||
remotePath: string = "/root/context/",
|
||||
onProgress?: (progress: UploadProgress) => void
|
||||
): Promise<UploadResult> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const xhr = new XMLHttpRequest();
|
||||
const url = apiUrl(`/api/fs/upload?path=${encodeURIComponent(remotePath)}`);
|
||||
|
||||
// Track upload progress
|
||||
xhr.upload.addEventListener("progress", (event) => {
|
||||
if (event.lengthComputable && onProgress) {
|
||||
onProgress({
|
||||
loaded: event.loaded,
|
||||
total: event.total,
|
||||
percentage: Math.round((event.loaded / event.total) * 100),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
xhr.addEventListener("load", () => {
|
||||
if (xhr.status >= 200 && xhr.status < 300) {
|
||||
try {
|
||||
resolve(JSON.parse(xhr.responseText));
|
||||
} catch {
|
||||
reject(new Error("Invalid response from server"));
|
||||
}
|
||||
} else {
|
||||
reject(new Error(`Upload failed: ${xhr.responseText || xhr.statusText}`));
|
||||
}
|
||||
});
|
||||
|
||||
xhr.addEventListener("error", () => {
|
||||
reject(new Error("Network error during upload"));
|
||||
});
|
||||
|
||||
xhr.addEventListener("abort", () => {
|
||||
reject(new Error("Upload cancelled"));
|
||||
});
|
||||
|
||||
xhr.open("POST", url);
|
||||
|
||||
// Add auth header
|
||||
const token = typeof window !== "undefined" ? sessionStorage.getItem("token") : null;
|
||||
if (token) {
|
||||
xhr.setRequestHeader("Authorization", `Bearer ${token}`);
|
||||
}
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append("file", file);
|
||||
xhr.send(formData);
|
||||
});
|
||||
}
|
||||
|
||||
// Upload a file in chunks with resume capability
|
||||
const CHUNK_SIZE = 5 * 1024 * 1024; // 5MB chunks
|
||||
|
||||
export interface ChunkedUploadProgress extends UploadProgress {
|
||||
chunkIndex: number;
|
||||
totalChunks: number;
|
||||
}
|
||||
|
||||
export async function uploadFileChunked(
|
||||
file: File,
|
||||
remotePath: string = "/root/context/",
|
||||
onProgress?: (progress: ChunkedUploadProgress) => void
|
||||
): Promise<UploadResult> {
|
||||
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
|
||||
const uploadId = `${file.name}-${file.size}-${Date.now()}`;
|
||||
|
||||
// For small files, use regular upload
|
||||
if (totalChunks <= 1) {
|
||||
return uploadFile(file, remotePath, onProgress ? (p) => onProgress({
|
||||
...p,
|
||||
chunkIndex: 0,
|
||||
totalChunks: 1,
|
||||
}) : undefined);
|
||||
}
|
||||
|
||||
let uploadedBytes = 0;
|
||||
|
||||
for (let i = 0; i < totalChunks; i++) {
|
||||
const start = i * CHUNK_SIZE;
|
||||
const end = Math.min(start + CHUNK_SIZE, file.size);
|
||||
const chunk = file.slice(start, end);
|
||||
|
||||
const chunkFile = new File([chunk], file.name, { type: file.type });
|
||||
|
||||
// Upload chunk with retry
|
||||
let retries = 3;
|
||||
while (retries > 0) {
|
||||
try {
|
||||
await uploadChunk(chunkFile, remotePath, uploadId, i, totalChunks);
|
||||
uploadedBytes += chunk.size;
|
||||
|
||||
if (onProgress) {
|
||||
onProgress({
|
||||
loaded: uploadedBytes,
|
||||
total: file.size,
|
||||
percentage: Math.round((uploadedBytes / file.size) * 100),
|
||||
chunkIndex: i + 1,
|
||||
totalChunks,
|
||||
});
|
||||
}
|
||||
break;
|
||||
} catch (err) {
|
||||
retries--;
|
||||
if (retries === 0) throw err;
|
||||
await new Promise(r => setTimeout(r, 1000)); // Wait 1s before retry
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Finalize the upload
|
||||
return finalizeChunkedUpload(remotePath, uploadId, file.name, totalChunks);
|
||||
}
|
||||
|
||||
async function uploadChunk(
|
||||
chunk: File,
|
||||
remotePath: string,
|
||||
uploadId: string,
|
||||
chunkIndex: number,
|
||||
totalChunks: number
|
||||
): Promise<void> {
|
||||
const formData = new FormData();
|
||||
formData.append("file", chunk);
|
||||
|
||||
const params = new URLSearchParams({
|
||||
path: remotePath,
|
||||
upload_id: uploadId,
|
||||
chunk_index: String(chunkIndex),
|
||||
total_chunks: String(totalChunks),
|
||||
});
|
||||
|
||||
const res = await fetch(apiUrl(`/api/fs/upload-chunk?${params}`), {
|
||||
method: "POST",
|
||||
headers: authHeader(),
|
||||
body: formData,
|
||||
});
|
||||
|
||||
|
||||
if (!res.ok) {
|
||||
const text = await res.text();
|
||||
throw new Error(`Failed to upload file: ${text}`);
|
||||
throw new Error(`Chunk upload failed: ${await res.text()}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function finalizeChunkedUpload(
|
||||
remotePath: string,
|
||||
uploadId: string,
|
||||
fileName: string,
|
||||
totalChunks: number
|
||||
): Promise<UploadResult> {
|
||||
const res = await apiFetch("/api/fs/upload-finalize", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
path: remotePath,
|
||||
upload_id: uploadId,
|
||||
file_name: fileName,
|
||||
total_chunks: totalChunks,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to finalize upload: ${await res.text()}`);
|
||||
}
|
||||
|
||||
return res.json();
|
||||
}
|
||||
|
||||
// Download file from URL to server filesystem
|
||||
export async function downloadFromUrl(
|
||||
url: string,
|
||||
remotePath: string = "/root/context/",
|
||||
fileName?: string
|
||||
): Promise<UploadResult> {
|
||||
const res = await apiFetch("/api/fs/download-url", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
url,
|
||||
path: remotePath,
|
||||
file_name: fileName,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to download from URL: ${await res.text()}`);
|
||||
}
|
||||
|
||||
return res.json();
|
||||
}
|
||||
|
||||
// Format bytes for display
|
||||
export function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return "0 B";
|
||||
const k = 1024;
|
||||
const sizes = ["B", "KB", "MB", "GB"];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return `${(bytes / Math.pow(k, i)).toFixed(1)} ${sizes[i]}`;
|
||||
}
|
||||
|
||||
// ==================== Models ====================
|
||||
|
||||
export interface ModelsResponse {
|
||||
|
||||
227
src/api/fs.rs
227
src/api/fs.rs
@@ -345,5 +345,232 @@ pub async fn upload(
|
||||
Err((StatusCode::BAD_REQUEST, "missing file".to_string()))
|
||||
}
|
||||
|
||||
// Chunked upload query params
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ChunkUploadQuery {
|
||||
pub path: String,
|
||||
pub upload_id: String,
|
||||
pub chunk_index: u32,
|
||||
pub total_chunks: u32,
|
||||
}
|
||||
|
||||
// Handle chunked file upload
|
||||
pub async fn upload_chunk(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Query(q): Query<ChunkUploadQuery>,
|
||||
mut multipart: Multipart,
|
||||
) -> Result<Json<serde_json::Value>, (StatusCode, String)> {
|
||||
// Store chunks in temp directory organized by upload_id
|
||||
let chunk_dir = std::env::temp_dir().join(format!("open_agent_chunks_{}", q.upload_id));
|
||||
tokio::fs::create_dir_all(&chunk_dir)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to create chunk dir: {}", e)))?;
|
||||
|
||||
while let Some(field) = multipart
|
||||
.next_field()
|
||||
.await
|
||||
.map_err(|e| (StatusCode::BAD_REQUEST, e.to_string()))?
|
||||
{
|
||||
let chunk_path = chunk_dir.join(format!("chunk_{:06}", q.chunk_index));
|
||||
let mut f = tokio::fs::File::create(&chunk_path)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
let mut field = field;
|
||||
while let Some(chunk) = field
|
||||
.chunk()
|
||||
.await
|
||||
.map_err(|e| (StatusCode::BAD_REQUEST, e.to_string()))?
|
||||
{
|
||||
f.write_all(&chunk)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
}
|
||||
f.flush().await.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
return Ok(Json(serde_json::json!({
|
||||
"ok": true,
|
||||
"chunk_index": q.chunk_index,
|
||||
"total_chunks": q.total_chunks,
|
||||
})));
|
||||
}
|
||||
|
||||
Err((StatusCode::BAD_REQUEST, "missing chunk data".to_string()))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct FinalizeUploadRequest {
|
||||
pub path: String,
|
||||
pub upload_id: String,
|
||||
pub file_name: String,
|
||||
pub total_chunks: u32,
|
||||
}
|
||||
|
||||
// Finalize chunked upload by assembling chunks
|
||||
pub async fn upload_finalize(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Json(req): Json<FinalizeUploadRequest>,
|
||||
) -> Result<Json<serde_json::Value>, (StatusCode, String)> {
|
||||
let (cfg, key_file) = get_key_and_cfg(&state).await?;
|
||||
|
||||
let chunk_dir = std::env::temp_dir().join(format!("open_agent_chunks_{}", req.upload_id));
|
||||
let assembled_path = std::env::temp_dir().join(format!("open_agent_assembled_{}", req.upload_id));
|
||||
|
||||
// Assemble chunks into single file
|
||||
let mut assembled = tokio::fs::File::create(&assembled_path)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to create assembled file: {}", e)))?;
|
||||
|
||||
for i in 0..req.total_chunks {
|
||||
let chunk_path = chunk_dir.join(format!("chunk_{:06}", i));
|
||||
let chunk_data = tokio::fs::read(&chunk_path)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to read chunk {}: {}", i, e)))?;
|
||||
assembled.write_all(&chunk_data)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to write chunk {}: {}", i, e)))?;
|
||||
}
|
||||
assembled.flush().await.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
drop(assembled);
|
||||
|
||||
// Move assembled file to destination
|
||||
let remote_path = if req.path.ends_with('/') {
|
||||
format!("{}{}", req.path, req.file_name)
|
||||
} else {
|
||||
format!("{}/{}", req.path, req.file_name)
|
||||
};
|
||||
|
||||
let target_dir = if req.path.ends_with('/') {
|
||||
req.path.trim_end_matches('/').to_string()
|
||||
} else {
|
||||
req.path.clone()
|
||||
};
|
||||
|
||||
if is_localhost(&cfg.host) {
|
||||
tokio::fs::create_dir_all(&target_dir)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to create directory: {}", e)))?;
|
||||
|
||||
if tokio::fs::rename(&assembled_path, &remote_path).await.is_err() {
|
||||
tokio::fs::copy(&assembled_path, &remote_path)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to copy file: {}", e)))?;
|
||||
let _ = tokio::fs::remove_file(&assembled_path).await;
|
||||
}
|
||||
} else {
|
||||
ssh_exec(&cfg, key_file.path(), "mkdir", &["-p".into(), target_dir])
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to create directory: {}", e)))?;
|
||||
|
||||
let batch = format!("put -p \"{}\" \"{}\"\n", assembled_path.to_string_lossy(), remote_path);
|
||||
sftp_batch(&cfg, key_file.path(), &batch)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
let _ = tokio::fs::remove_file(&assembled_path).await;
|
||||
}
|
||||
|
||||
// Cleanup chunk directory
|
||||
let _ = tokio::fs::remove_dir_all(&chunk_dir).await;
|
||||
|
||||
Ok(Json(serde_json::json!({ "ok": true, "path": req.path, "name": req.file_name })))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct DownloadUrlRequest {
|
||||
pub url: String,
|
||||
pub path: String,
|
||||
pub file_name: Option<String>,
|
||||
}
|
||||
|
||||
// Download file from URL to server filesystem
|
||||
pub async fn download_from_url(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Json(req): Json<DownloadUrlRequest>,
|
||||
) -> Result<Json<serde_json::Value>, (StatusCode, String)> {
|
||||
let (cfg, key_file) = get_key_and_cfg(&state).await?;
|
||||
|
||||
// Download to temp file
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(300)) // 5 min timeout
|
||||
.build()
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to create HTTP client: {}", e)))?;
|
||||
|
||||
let response = client.get(&req.url)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| (StatusCode::BAD_REQUEST, format!("Failed to fetch URL: {}", e)))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err((StatusCode::BAD_REQUEST, format!("URL returned error: {}", response.status())));
|
||||
}
|
||||
|
||||
// Try to get filename from Content-Disposition header or URL
|
||||
let file_name = req.file_name.unwrap_or_else(|| {
|
||||
response.headers()
|
||||
.get("content-disposition")
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.and_then(|s| {
|
||||
s.split("filename=").nth(1)
|
||||
.map(|f| f.trim_matches('"').trim_matches('\'').to_string())
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
req.url.split('/').last()
|
||||
.and_then(|s| s.split('?').next())
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| format!("download_{}", uuid::Uuid::new_v4()))
|
||||
})
|
||||
});
|
||||
|
||||
let tmp = std::env::temp_dir().join(format!("open_agent_url_{}", uuid::Uuid::new_v4()));
|
||||
let mut f = tokio::fs::File::create(&tmp)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
let bytes = response.bytes()
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to read response: {}", e)))?;
|
||||
|
||||
f.write_all(&bytes)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
f.flush().await.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
drop(f);
|
||||
|
||||
// Move to destination
|
||||
let remote_path = if req.path.ends_with('/') {
|
||||
format!("{}{}", req.path, file_name)
|
||||
} else {
|
||||
format!("{}/{}", req.path, file_name)
|
||||
};
|
||||
|
||||
let target_dir = if req.path.ends_with('/') {
|
||||
req.path.trim_end_matches('/').to_string()
|
||||
} else {
|
||||
req.path.clone()
|
||||
};
|
||||
|
||||
if is_localhost(&cfg.host) {
|
||||
tokio::fs::create_dir_all(&target_dir)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to create directory: {}", e)))?;
|
||||
|
||||
if tokio::fs::rename(&tmp, &remote_path).await.is_err() {
|
||||
tokio::fs::copy(&tmp, &remote_path)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to copy file: {}", e)))?;
|
||||
let _ = tokio::fs::remove_file(&tmp).await;
|
||||
}
|
||||
} else {
|
||||
ssh_exec(&cfg, key_file.path(), "mkdir", &["-p".into(), target_dir])
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to create directory: {}", e)))?;
|
||||
|
||||
let batch = format!("put -p \"{}\" \"{}\"\n", tmp.to_string_lossy(), remote_path);
|
||||
sftp_batch(&cfg, key_file.path(), &batch)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
let _ = tokio::fs::remove_file(&tmp).await;
|
||||
}
|
||||
|
||||
Ok(Json(serde_json::json!({ "ok": true, "path": req.path, "name": file_name })))
|
||||
}
|
||||
|
||||
@@ -100,9 +100,10 @@ pub async fn serve(config: Config) -> anyhow::Result<()> {
|
||||
// WebSocket console uses subprotocol-based auth (browser can't set Authorization header)
|
||||
.route("/api/console/ws", get(console::console_ws));
|
||||
|
||||
// File upload route with increased body limit (10GB)
|
||||
// File upload routes with increased body limit (10GB)
|
||||
let upload_route = Router::new()
|
||||
.route("/api/fs/upload", post(fs::upload))
|
||||
.route("/api/fs/upload-chunk", post(fs::upload_chunk))
|
||||
.layer(DefaultBodyLimit::max(10 * 1024 * 1024 * 1024));
|
||||
|
||||
let protected_routes = Router::new()
|
||||
@@ -144,6 +145,8 @@ pub async fn serve(config: Config) -> anyhow::Result<()> {
|
||||
.route("/api/fs/list", get(fs::list))
|
||||
.route("/api/fs/download", get(fs::download))
|
||||
.merge(upload_route)
|
||||
.route("/api/fs/upload-finalize", post(fs::upload_finalize))
|
||||
.route("/api/fs/download-url", post(fs::download_from_url))
|
||||
.route("/api/fs/mkdir", post(fs::mkdir))
|
||||
.route("/api/fs/rm", post(fs::rm))
|
||||
// MCP management endpoints
|
||||
|
||||
Reference in New Issue
Block a user