Enhance logging and context handling in job management. Introduce a logger initialization with configurable parameters in the manager and runner commands. Update job context handling to use tar files instead of tar.gz, and implement ETag generation for improved caching. Refactor API endpoints to support new context file structure and enhance error handling in job submissions. Add support for unhide objects and auto-execution options in job creation requests.
This commit is contained in:
@@ -3,6 +3,73 @@ const API_BASE = '/api';
|
||||
// Global auth error handler - will be set by useAuth hook
|
||||
let onAuthError = null;
|
||||
|
||||
// Request debouncing and deduplication
|
||||
const pendingRequests = new Map(); // key: endpoint+params, value: Promise
|
||||
const requestQueue = new Map(); // key: endpoint+params, value: { resolve, reject, timestamp }
|
||||
const DEBOUNCE_DELAY = 100; // 100ms debounce delay
|
||||
const DEDUPE_WINDOW = 5000; // 5 seconds - same request within this window uses cached promise
|
||||
|
||||
// Generate cache key from endpoint and params
|
||||
function getCacheKey(endpoint, options = {}) {
|
||||
const params = new URLSearchParams();
|
||||
Object.keys(options).sort().forEach(key => {
|
||||
if (options[key] !== undefined && options[key] !== null) {
|
||||
params.append(key, String(options[key]));
|
||||
}
|
||||
});
|
||||
const query = params.toString();
|
||||
return `${endpoint}${query ? '?' + query : ''}`;
|
||||
}
|
||||
|
||||
// Sentinel value to indicate a request was superseded (instead of rejecting)
|
||||
// Export it so components can check for it
|
||||
export const REQUEST_SUPERSEDED = Symbol('REQUEST_SUPERSEDED');
|
||||
|
||||
// Debounced request wrapper
|
||||
function debounceRequest(key, requestFn, delay = DEBOUNCE_DELAY) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Check if there's a pending request for this key
|
||||
if (pendingRequests.has(key)) {
|
||||
const pending = pendingRequests.get(key);
|
||||
// If request is very recent (within dedupe window), reuse it
|
||||
const now = Date.now();
|
||||
if (pending.timestamp && (now - pending.timestamp) < DEDUPE_WINDOW) {
|
||||
pending.promise.then(resolve).catch(reject);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Clear any existing timeout for this key
|
||||
if (requestQueue.has(key)) {
|
||||
const queued = requestQueue.get(key);
|
||||
clearTimeout(queued.timeout);
|
||||
// Resolve with sentinel value instead of rejecting - this prevents errors from propagating
|
||||
// The new request will handle the actual response
|
||||
queued.resolve(REQUEST_SUPERSEDED);
|
||||
}
|
||||
|
||||
// Queue new request
|
||||
const timeout = setTimeout(() => {
|
||||
requestQueue.delete(key);
|
||||
const promise = requestFn();
|
||||
const timestamp = Date.now();
|
||||
pendingRequests.set(key, { promise, timestamp });
|
||||
|
||||
promise
|
||||
.then(result => {
|
||||
pendingRequests.delete(key);
|
||||
resolve(result);
|
||||
})
|
||||
.catch(error => {
|
||||
pendingRequests.delete(key);
|
||||
reject(error);
|
||||
});
|
||||
}, delay);
|
||||
|
||||
requestQueue.set(key, { resolve, reject, timeout });
|
||||
});
|
||||
}
|
||||
|
||||
export const setAuthErrorHandler = (handler) => {
|
||||
onAuthError = handler;
|
||||
};
|
||||
@@ -174,12 +241,53 @@ export const auth = {
|
||||
};
|
||||
|
||||
export const jobs = {
|
||||
async list() {
|
||||
return api.get('/jobs');
|
||||
async list(options = {}) {
|
||||
const key = getCacheKey('/jobs', options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.offset) params.append('offset', options.offset.toString());
|
||||
if (options.status) params.append('status', options.status);
|
||||
if (options.sort) params.append('sort', options.sort);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async get(id) {
|
||||
return api.get(`/jobs/${id}`);
|
||||
async listSummary(options = {}) {
|
||||
const key = getCacheKey('/jobs/summary', options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.offset) params.append('offset', options.offset.toString());
|
||||
if (options.status) params.append('status', options.status);
|
||||
if (options.sort) params.append('sort', options.sort);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/summary${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async get(id, options = {}) {
|
||||
const key = getCacheKey(`/jobs/${id}`, options);
|
||||
return debounceRequest(key, async () => {
|
||||
if (options.etag) {
|
||||
// Include ETag in request headers for conditional requests
|
||||
const headers = { 'If-None-Match': options.etag };
|
||||
const response = await fetch(`${API_BASE}/jobs/${id}`, {
|
||||
credentials: 'include',
|
||||
headers,
|
||||
});
|
||||
if (response.status === 304) {
|
||||
return null; // Not modified
|
||||
}
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null);
|
||||
throw new Error(errorData?.error || response.statusText);
|
||||
}
|
||||
return response.json();
|
||||
}
|
||||
return api.get(`/jobs/${id}`);
|
||||
});
|
||||
},
|
||||
|
||||
async create(jobData) {
|
||||
@@ -202,8 +310,27 @@ export const jobs = {
|
||||
return api.uploadFile(`/jobs/upload`, file, onProgress, mainBlendFile);
|
||||
},
|
||||
|
||||
async getFiles(jobId) {
|
||||
return api.get(`/jobs/${jobId}/files`);
|
||||
async getFiles(jobId, options = {}) {
|
||||
const key = getCacheKey(`/jobs/${jobId}/files`, options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.offset) params.append('offset', options.offset.toString());
|
||||
if (options.file_type) params.append('file_type', options.file_type);
|
||||
if (options.extension) params.append('extension', options.extension);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/${jobId}/files${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async getFilesCount(jobId, options = {}) {
|
||||
const key = getCacheKey(`/jobs/${jobId}/files/count`, options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.file_type) params.append('file_type', options.file_type);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/${jobId}/files/count${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async getContextArchive(jobId) {
|
||||
@@ -219,12 +346,21 @@ export const jobs = {
|
||||
},
|
||||
|
||||
async getTaskLogs(jobId, taskId, options = {}) {
|
||||
const params = new URLSearchParams();
|
||||
if (options.stepName) params.append('step_name', options.stepName);
|
||||
if (options.logLevel) params.append('log_level', options.logLevel);
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/${jobId}/tasks/${taskId}/logs${query ? '?' + query : ''}`);
|
||||
const key = getCacheKey(`/jobs/${jobId}/tasks/${taskId}/logs`, options);
|
||||
return debounceRequest(key, async () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.stepName) params.append('step_name', options.stepName);
|
||||
if (options.logLevel) params.append('log_level', options.logLevel);
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.sinceId) params.append('since_id', options.sinceId.toString());
|
||||
const query = params.toString();
|
||||
const result = await api.get(`/jobs/${jobId}/tasks/${taskId}/logs${query ? '?' + query : ''}`);
|
||||
// Handle both old format (array) and new format (object with logs, last_id, limit)
|
||||
if (Array.isArray(result)) {
|
||||
return { logs: result, last_id: result.length > 0 ? result[result.length - 1].id : 0, limit: options.limit || 100 };
|
||||
}
|
||||
return result;
|
||||
});
|
||||
},
|
||||
|
||||
async getTaskSteps(jobId, taskId) {
|
||||
@@ -239,6 +375,20 @@ export const jobs = {
|
||||
return new WebSocket(url);
|
||||
},
|
||||
|
||||
streamJobsWebSocket() {
|
||||
const wsProtocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const wsHost = window.location.host;
|
||||
const url = `${wsProtocol}//${wsHost}${API_BASE}/jobs/ws`;
|
||||
return new WebSocket(url);
|
||||
},
|
||||
|
||||
streamJobWebSocket(jobId) {
|
||||
const wsProtocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const wsHost = window.location.host;
|
||||
const url = `${wsProtocol}//${wsHost}${API_BASE}/jobs/${jobId}/ws`;
|
||||
return new WebSocket(url);
|
||||
},
|
||||
|
||||
async retryTask(jobId, taskId) {
|
||||
return api.post(`/jobs/${jobId}/tasks/${taskId}/retry`);
|
||||
},
|
||||
@@ -247,8 +397,50 @@ export const jobs = {
|
||||
return api.get(`/jobs/${jobId}/metadata`);
|
||||
},
|
||||
|
||||
async getTasks(jobId) {
|
||||
return api.get(`/jobs/${jobId}/tasks`);
|
||||
async getTasks(jobId, options = {}) {
|
||||
const key = getCacheKey(`/jobs/${jobId}/tasks`, options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.offset) params.append('offset', options.offset.toString());
|
||||
if (options.status) params.append('status', options.status);
|
||||
if (options.frameStart) params.append('frame_start', options.frameStart.toString());
|
||||
if (options.frameEnd) params.append('frame_end', options.frameEnd.toString());
|
||||
if (options.sort) params.append('sort', options.sort);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/${jobId}/tasks${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async getTasksSummary(jobId, options = {}) {
|
||||
const key = getCacheKey(`/jobs/${jobId}/tasks/summary`, options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.offset) params.append('offset', options.offset.toString());
|
||||
if (options.status) params.append('status', options.status);
|
||||
if (options.sort) params.append('sort', options.sort);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/${jobId}/tasks/summary${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async batchGetJobs(jobIds) {
|
||||
// Sort jobIds for consistent cache key
|
||||
const sortedIds = [...jobIds].sort((a, b) => a - b);
|
||||
const key = getCacheKey('/jobs/batch', { job_ids: sortedIds.join(',') });
|
||||
return debounceRequest(key, () => {
|
||||
return api.post('/jobs/batch', { job_ids: jobIds });
|
||||
});
|
||||
},
|
||||
|
||||
async batchGetTasks(jobId, taskIds) {
|
||||
// Sort taskIds for consistent cache key
|
||||
const sortedIds = [...taskIds].sort((a, b) => a - b);
|
||||
const key = getCacheKey(`/jobs/${jobId}/tasks/batch`, { task_ids: sortedIds.join(',') });
|
||||
return debounceRequest(key, () => {
|
||||
return api.post(`/jobs/${jobId}/tasks/batch`, { task_ids: taskIds });
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user