diff --git a/web/src/components/JobDetails.jsx b/web/src/components/JobDetails.jsx index 188727e..d1206a3 100644 --- a/web/src/components/JobDetails.jsx +++ b/web/src/components/JobDetails.jsx @@ -111,6 +111,41 @@ export default function JobDetails({ job, onClose, onUpdate }) { }); }, [taskData]); + // Helper function to load all files with pagination + const loadAllFiles = async (jobId, signal) => { + const allFiles = []; + let offset = 0; + const limit = 100; // Load 100 files per page + let hasMore = true; + + while (hasMore && !signal?.aborted) { + const fileList = await jobs.getFiles(jobId, { limit, offset, signal }); + + // Check for superseded sentinel + if (fileList === REQUEST_SUPERSEDED) { + return REQUEST_SUPERSEDED; + } + + const fileData = fileList?.data || fileList; + const files = Array.isArray(fileData) ? fileData : []; + allFiles.push(...files); + + // Check if there are more files to load + const total = fileList?.total; + if (total !== undefined) { + hasMore = offset + files.length < total; + } else { + // If total is not provided, check if we got a full page (or more) + // Use >= to safely handle edge cases where API returns different amounts + hasMore = files.length >= limit; + } + + offset += files.length; + } + + return allFiles; + }; + const loadDetails = async () => { // Guard against undefined job or job.id if (!job || !job.id) { @@ -122,9 +157,9 @@ export default function JobDetails({ job, onClose, onUpdate }) { setLoading(true); // Use summary endpoint for tasks initially - much faster const signal = abortControllerRef.current?.signal; - const [details, fileList, taskListResult] = await Promise.all([ + const [details, allFilesResult, taskListResult] = await Promise.all([ jobs.get(job.id, { signal }), - jobs.getFiles(job.id, { limit: 50, signal }), // Only load first page of files + loadAllFiles(job.id, signal), // Load all files with pagination jobs.getTasksSummary(job.id, { sort: 'frame:asc', signal }), // Get all tasks ]); @@ -135,11 +170,10 @@ export default function JobDetails({ job, onClose, onUpdate }) { setJobDetails(details); // Handle paginated file response - check for superseded sentinel - if (fileList === REQUEST_SUPERSEDED) { + if (allFilesResult === REQUEST_SUPERSEDED) { return; // Request was superseded, skip this update } - const fileData = fileList?.data || fileList; - setFiles(Array.isArray(fileData) ? fileData : []); + setFiles(Array.isArray(allFilesResult) ? allFilesResult : []); // Handle paginated task summary response - check for superseded sentinel if (taskListResult === REQUEST_SUPERSEDED) { @@ -617,16 +651,22 @@ export default function JobDetails({ job, onClose, onUpdate }) { }; reloadTasks(); } else if (data.type === 'file_added' && data.data) { - // New file was added - reload file list + // New file was added - reload all files const reloadFiles = async () => { try { - const fileList = await jobs.getFiles(job.id, { limit: 50 }); + const signal = abortControllerRef.current?.signal; + const allFilesResult = await loadAllFiles(job.id, signal); + + // Check if request was aborted + if (signal?.aborted) { + return; + } + // Check for superseded sentinel - if (fileList === REQUEST_SUPERSEDED) { + if (allFilesResult === REQUEST_SUPERSEDED) { return; // Request was superseded, skip this update } - const fileData = fileList.data || fileList; - setFiles(Array.isArray(fileData) ? fileData : []); + setFiles(Array.isArray(allFilesResult) ? allFilesResult : []); } catch (error) { console.error('Failed to reload files:', error); }