Add pagination support for file loading in JobDetails component
All checks were successful
Release Tag / release (push) Successful in 20s

- Introduced a helper function to load all files associated with a job using pagination, improving performance by fetching files in batches.
- Updated the loadDetails function to utilize the new pagination method for retrieving all files instead of just the first page.
- Adjusted file handling logic to ensure proper updates when new files are added, maintaining consistency with the paginated approach.
This commit is contained in:
2026-01-03 10:58:36 -06:00
parent bb57ce8659
commit d3c5ee0dba

View File

@@ -111,6 +111,41 @@ export default function JobDetails({ job, onClose, onUpdate }) {
}); });
}, [taskData]); }, [taskData]);
// Helper function to load all files with pagination
const loadAllFiles = async (jobId, signal) => {
const allFiles = [];
let offset = 0;
const limit = 100; // Load 100 files per page
let hasMore = true;
while (hasMore && !signal?.aborted) {
const fileList = await jobs.getFiles(jobId, { limit, offset, signal });
// Check for superseded sentinel
if (fileList === REQUEST_SUPERSEDED) {
return REQUEST_SUPERSEDED;
}
const fileData = fileList?.data || fileList;
const files = Array.isArray(fileData) ? fileData : [];
allFiles.push(...files);
// Check if there are more files to load
const total = fileList?.total;
if (total !== undefined) {
hasMore = offset + files.length < total;
} else {
// If total is not provided, check if we got a full page (or more)
// Use >= to safely handle edge cases where API returns different amounts
hasMore = files.length >= limit;
}
offset += files.length;
}
return allFiles;
};
const loadDetails = async () => { const loadDetails = async () => {
// Guard against undefined job or job.id // Guard against undefined job or job.id
if (!job || !job.id) { if (!job || !job.id) {
@@ -122,9 +157,9 @@ export default function JobDetails({ job, onClose, onUpdate }) {
setLoading(true); setLoading(true);
// Use summary endpoint for tasks initially - much faster // Use summary endpoint for tasks initially - much faster
const signal = abortControllerRef.current?.signal; const signal = abortControllerRef.current?.signal;
const [details, fileList, taskListResult] = await Promise.all([ const [details, allFilesResult, taskListResult] = await Promise.all([
jobs.get(job.id, { signal }), jobs.get(job.id, { signal }),
jobs.getFiles(job.id, { limit: 50, signal }), // Only load first page of files loadAllFiles(job.id, signal), // Load all files with pagination
jobs.getTasksSummary(job.id, { sort: 'frame:asc', signal }), // Get all tasks jobs.getTasksSummary(job.id, { sort: 'frame:asc', signal }), // Get all tasks
]); ]);
@@ -135,11 +170,10 @@ export default function JobDetails({ job, onClose, onUpdate }) {
setJobDetails(details); setJobDetails(details);
// Handle paginated file response - check for superseded sentinel // Handle paginated file response - check for superseded sentinel
if (fileList === REQUEST_SUPERSEDED) { if (allFilesResult === REQUEST_SUPERSEDED) {
return; // Request was superseded, skip this update return; // Request was superseded, skip this update
} }
const fileData = fileList?.data || fileList; setFiles(Array.isArray(allFilesResult) ? allFilesResult : []);
setFiles(Array.isArray(fileData) ? fileData : []);
// Handle paginated task summary response - check for superseded sentinel // Handle paginated task summary response - check for superseded sentinel
if (taskListResult === REQUEST_SUPERSEDED) { if (taskListResult === REQUEST_SUPERSEDED) {
@@ -617,16 +651,22 @@ export default function JobDetails({ job, onClose, onUpdate }) {
}; };
reloadTasks(); reloadTasks();
} else if (data.type === 'file_added' && data.data) { } else if (data.type === 'file_added' && data.data) {
// New file was added - reload file list // New file was added - reload all files
const reloadFiles = async () => { const reloadFiles = async () => {
try { try {
const fileList = await jobs.getFiles(job.id, { limit: 50 }); const signal = abortControllerRef.current?.signal;
const allFilesResult = await loadAllFiles(job.id, signal);
// Check if request was aborted
if (signal?.aborted) {
return;
}
// Check for superseded sentinel // Check for superseded sentinel
if (fileList === REQUEST_SUPERSEDED) { if (allFilesResult === REQUEST_SUPERSEDED) {
return; // Request was superseded, skip this update return; // Request was superseded, skip this update
} }
const fileData = fileList.data || fileList; setFiles(Array.isArray(allFilesResult) ? allFilesResult : []);
setFiles(Array.isArray(fileData) ? fileData : []);
} catch (error) { } catch (error) {
console.error('Failed to reload files:', error); console.error('Failed to reload files:', error);
} }