Enhance logging and context handling in job management. Introduce a logger initialization with configurable parameters in the manager and runner commands. Update job context handling to use tar files instead of tar.gz, and implement ETag generation for improved caching. Refactor API endpoints to support new context file structure and enhance error handling in job submissions. Add support for unhide objects and auto-execution options in job creation requests.
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
import { useState } from 'react';
|
||||
|
||||
export default function FileExplorer({ files, onDownload, onPreview, isImageFile }) {
|
||||
const [expandedPaths, setExpandedPaths] = useState(new Set());
|
||||
const [expandedPaths, setExpandedPaths] = useState(new Set()); // Root folder collapsed by default
|
||||
|
||||
// Build directory tree from file paths
|
||||
const buildTree = (files) => {
|
||||
@@ -70,7 +70,7 @@ export default function FileExplorer({ files, onDownload, onPreview, isImageFile
|
||||
const file = item.file;
|
||||
const isImage = isImageFile && isImageFile(file.file_name);
|
||||
const sizeMB = (file.file_size / 1024 / 1024).toFixed(2);
|
||||
const isArchive = file.file_name.endsWith('.tar.gz') || file.file_name.endsWith('.zip');
|
||||
const isArchive = file.file_name.endsWith('.tar') || file.file_name.endsWith('.zip');
|
||||
|
||||
return (
|
||||
<div key={fullPath} className="flex items-center justify-between py-1.5 hover:bg-gray-800/50 rounded px-2" style={{ paddingLeft: `${indent + 8}px` }}>
|
||||
@@ -108,10 +108,13 @@ export default function FileExplorer({ files, onDownload, onPreview, isImageFile
|
||||
return (
|
||||
<div key={fullPath}>
|
||||
<div
|
||||
className="flex items-center gap-2 py-1 hover:bg-gray-800/50 rounded px-2 cursor-pointer"
|
||||
className="flex items-center gap-2 py-1.5 hover:bg-gray-800/50 rounded px-2 cursor-pointer select-none"
|
||||
style={{ paddingLeft: `${indent + 8}px` }}
|
||||
onClick={() => hasChildren && togglePath(fullPath)}
|
||||
>
|
||||
<span className="text-gray-400 text-xs w-4 flex items-center justify-center">
|
||||
{hasChildren ? (isExpanded ? '▼' : '▶') : '•'}
|
||||
</span>
|
||||
<span className="text-gray-500 text-sm">
|
||||
{hasChildren ? (isExpanded ? '📂' : '📁') : '📁'}
|
||||
</span>
|
||||
@@ -123,7 +126,7 @@ export default function FileExplorer({ files, onDownload, onPreview, isImageFile
|
||||
)}
|
||||
</div>
|
||||
{hasChildren && isExpanded && (
|
||||
<div>
|
||||
<div className="ml-2">
|
||||
{renderTree(item.children, level + 1, fullPath)}
|
||||
</div>
|
||||
)}
|
||||
@@ -143,10 +146,34 @@ export default function FileExplorer({ files, onDownload, onPreview, isImageFile
|
||||
);
|
||||
}
|
||||
|
||||
// Wrap tree in a root folder
|
||||
const rootExpanded = expandedPaths.has('');
|
||||
|
||||
return (
|
||||
<div className="bg-gray-900 rounded-lg border border-gray-700 p-3">
|
||||
<div className="space-y-1">
|
||||
{renderTree(tree)}
|
||||
<div>
|
||||
<div
|
||||
className="flex items-center gap-2 py-1.5 hover:bg-gray-800/50 rounded px-2 cursor-pointer select-none"
|
||||
onClick={() => togglePath('')}
|
||||
>
|
||||
<span className="text-gray-400 text-xs w-4 flex items-center justify-center">
|
||||
{rootExpanded ? '▼' : '▶'}
|
||||
</span>
|
||||
<span className="text-gray-500 text-sm">
|
||||
{rootExpanded ? '📂' : '📁'}
|
||||
</span>
|
||||
<span className="text-gray-300 text-sm font-medium">Files</span>
|
||||
<span className="text-gray-500 text-xs ml-2">
|
||||
({Object.keys(tree).length})
|
||||
</span>
|
||||
</div>
|
||||
{rootExpanded && (
|
||||
<div className="ml-2">
|
||||
{renderTree(tree)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { useState, useEffect, useRef } from 'react';
|
||||
import { jobs } from '../utils/api';
|
||||
import { jobs, REQUEST_SUPERSEDED } from '../utils/api';
|
||||
import VideoPlayer from './VideoPlayer';
|
||||
import FileExplorer from './FileExplorer';
|
||||
|
||||
@@ -18,19 +18,53 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
const [streaming, setStreaming] = useState(false);
|
||||
const [previewImage, setPreviewImage] = useState(null); // { url, fileName } or null
|
||||
const wsRef = useRef(null);
|
||||
const jobWsRef = useRef(null); // Separate ref for job WebSocket
|
||||
const logContainerRefs = useRef({}); // Refs for each step's log container
|
||||
const shouldAutoScrollRefs = useRef({}); // Auto-scroll state per step
|
||||
|
||||
useEffect(() => {
|
||||
loadDetails();
|
||||
const interval = setInterval(loadDetails, 2000);
|
||||
return () => {
|
||||
clearInterval(interval);
|
||||
if (wsRef.current) {
|
||||
wsRef.current.close();
|
||||
// Use WebSocket for real-time updates instead of polling
|
||||
if (jobDetails.status === 'running' || jobDetails.status === 'pending' || !jobDetails.status) {
|
||||
connectJobWebSocket();
|
||||
return () => {
|
||||
if (jobWsRef.current) {
|
||||
try {
|
||||
jobWsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
jobWsRef.current = null;
|
||||
}
|
||||
if (wsRef.current) {
|
||||
try {
|
||||
wsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
wsRef.current = null;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
// Job is completed/failed/cancelled - close WebSocket
|
||||
if (jobWsRef.current) {
|
||||
try {
|
||||
jobWsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
jobWsRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [job.id]);
|
||||
if (wsRef.current) {
|
||||
try {
|
||||
wsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
wsRef.current = null;
|
||||
}
|
||||
}
|
||||
}, [job.id, jobDetails.status]);
|
||||
|
||||
useEffect(() => {
|
||||
// Load logs and steps for all running tasks
|
||||
@@ -83,14 +117,45 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
|
||||
const loadDetails = async () => {
|
||||
try {
|
||||
const [details, fileList, taskList] = await Promise.all([
|
||||
setLoading(true);
|
||||
// Use summary endpoint for tasks initially - much faster
|
||||
const [details, fileList, taskListResult] = await Promise.all([
|
||||
jobs.get(job.id),
|
||||
jobs.getFiles(job.id),
|
||||
jobs.getTasks(job.id),
|
||||
jobs.getFiles(job.id, { limit: 50 }), // Only load first page of files
|
||||
jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame_start:asc' }), // Use summary endpoint
|
||||
]);
|
||||
setJobDetails(details);
|
||||
setFiles(fileList);
|
||||
setTasks(taskList);
|
||||
|
||||
// Handle paginated file response - check for superseded sentinel
|
||||
if (fileList === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const fileData = fileList?.data || fileList;
|
||||
setFiles(Array.isArray(fileData) ? fileData : []);
|
||||
|
||||
// Handle paginated task summary response - check for superseded sentinel
|
||||
if (taskListResult === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const taskData = taskListResult?.data || taskListResult;
|
||||
const taskSummaries = Array.isArray(taskData) ? taskData : [];
|
||||
|
||||
// Convert summaries to task-like objects for display
|
||||
const tasksForDisplay = taskSummaries.map(summary => ({
|
||||
id: summary.id,
|
||||
job_id: job.id,
|
||||
frame_start: summary.frame_start,
|
||||
frame_end: summary.frame_end,
|
||||
status: summary.status,
|
||||
task_type: summary.task_type,
|
||||
runner_id: summary.runner_id,
|
||||
// These will be loaded on expand
|
||||
current_step: null,
|
||||
retry_count: 0,
|
||||
max_retries: 3,
|
||||
created_at: new Date().toISOString(),
|
||||
}));
|
||||
setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []);
|
||||
|
||||
// Fetch context archive contents separately (may not exist for old jobs)
|
||||
try {
|
||||
@@ -101,26 +166,27 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
setContextFiles([]);
|
||||
}
|
||||
|
||||
// Only load task data (logs/steps) for tasks that don't have data yet
|
||||
// This prevents overwriting logs that are being streamed via WebSocket
|
||||
// Once we have logs for a task, we rely on WebSocket for new logs
|
||||
// Only load task data (logs/steps) for expanded tasks
|
||||
// Don't auto-load for all tasks - wait for user to expand
|
||||
if (details.status === 'running') {
|
||||
taskList.forEach(task => {
|
||||
const existingData = taskData[task.id];
|
||||
// Only fetch logs via HTTP if we don't have any logs yet
|
||||
// Once we have logs, WebSocket will handle new ones
|
||||
if (!existingData || !existingData.logs || existingData.logs.length === 0) {
|
||||
loadTaskData(task.id);
|
||||
} else if (!existingData.steps || existingData.steps.length === 0) {
|
||||
// If we have logs but no steps, fetch steps only
|
||||
loadTaskStepsOnly(task.id);
|
||||
// Only load data for tasks that are expanded
|
||||
tasksForDisplay.forEach(task => {
|
||||
if (expandedTasks.has(task.id)) {
|
||||
const existingData = taskData[task.id];
|
||||
// Only fetch logs via HTTP if we don't have any logs yet
|
||||
if (!existingData || !existingData.logs || existingData.logs.length === 0) {
|
||||
loadTaskData(task.id);
|
||||
} else if (!existingData.steps || existingData.steps.length === 0) {
|
||||
loadTaskStepsOnly(task.id);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Check if there's an MP4 output file
|
||||
const mp4File = fileList.find(
|
||||
(f) => f.file_type === 'output' && f.file_name.endsWith('.mp4')
|
||||
const fileArray = Array.isArray(fileData) ? fileData : [];
|
||||
const mp4File = fileArray.find(
|
||||
(f) => f.file_type === 'output' && f.file_name && f.file_name.endsWith('.mp4')
|
||||
);
|
||||
if (mp4File) {
|
||||
setVideoUrl(jobs.getVideoUrl(job.id));
|
||||
@@ -138,23 +204,39 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
|
||||
const loadTaskData = async (taskId) => {
|
||||
try {
|
||||
const [logs, steps] = await Promise.all([
|
||||
jobs.getTaskLogs(job.id, taskId),
|
||||
console.log(`Loading task data for task ${taskId}...`);
|
||||
const [logsResult, steps] = await Promise.all([
|
||||
jobs.getTaskLogs(job.id, taskId, { limit: 1000 }), // Increased limit for completed tasks
|
||||
jobs.getTaskSteps(job.id, taskId),
|
||||
]);
|
||||
|
||||
// Check for superseded sentinel
|
||||
if (logsResult === REQUEST_SUPERSEDED || steps === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
|
||||
console.log(`Task ${taskId} logs result:`, logsResult);
|
||||
|
||||
// Handle new format with logs, last_id, limit
|
||||
const logs = logsResult.logs || logsResult;
|
||||
const lastId = logsResult.last_id;
|
||||
|
||||
console.log(`Task ${taskId} - loaded ${Array.isArray(logs) ? logs.length : 0} logs, ${Array.isArray(steps) ? steps.length : 0} steps`);
|
||||
|
||||
setTaskData(prev => {
|
||||
const current = prev[taskId] || { steps: [], logs: [] };
|
||||
const current = prev[taskId] || { steps: [], logs: [], lastId: 0 };
|
||||
// Merge logs instead of replacing - this preserves WebSocket-streamed logs
|
||||
// Deduplicate by log ID
|
||||
const existingLogIds = new Set((current.logs || []).map(l => l.id));
|
||||
const newLogs = (logs || []).filter(l => !existingLogIds.has(l.id));
|
||||
const newLogs = (Array.isArray(logs) ? logs : []).filter(l => !existingLogIds.has(l.id));
|
||||
const mergedLogs = [...(current.logs || []), ...newLogs].sort((a, b) => a.id - b.id);
|
||||
|
||||
return {
|
||||
...prev,
|
||||
[taskId]: {
|
||||
steps: steps || current.steps, // Steps can be replaced (they don't change often)
|
||||
logs: mergedLogs
|
||||
steps: steps || current.steps,
|
||||
logs: mergedLogs,
|
||||
lastId: lastId || current.lastId
|
||||
}
|
||||
};
|
||||
});
|
||||
@@ -166,6 +248,10 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
const loadTaskStepsOnly = async (taskId) => {
|
||||
try {
|
||||
const steps = await jobs.getTaskSteps(job.id, taskId);
|
||||
// Check for superseded sentinel
|
||||
if (steps === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
setTaskData(prev => {
|
||||
const current = prev[taskId] || { steps: [], logs: [] };
|
||||
return {
|
||||
@@ -181,9 +267,276 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
}
|
||||
};
|
||||
|
||||
const connectJobWebSocket = () => {
|
||||
try {
|
||||
// Close existing connection if any
|
||||
if (jobWsRef.current) {
|
||||
try {
|
||||
jobWsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
jobWsRef.current = null;
|
||||
}
|
||||
|
||||
const ws = jobs.streamJobWebSocket(job.id);
|
||||
jobWsRef.current = ws; // Store reference
|
||||
|
||||
ws.onopen = () => {
|
||||
console.log('Job WebSocket connected for job', job.id);
|
||||
};
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
console.log('Job WebSocket message received:', data.type, data);
|
||||
|
||||
if (data.type === 'job_update' && data.data) {
|
||||
// Update job details
|
||||
setJobDetails(prev => ({ ...prev, ...data.data }));
|
||||
} else if (data.type === 'task_update' && data.data) {
|
||||
// Update task in list
|
||||
setTasks(prev => {
|
||||
// Ensure prev is always an array
|
||||
const prevArray = Array.isArray(prev) ? prev : [];
|
||||
if (!data.task_id) {
|
||||
console.warn('task_update message missing task_id:', data);
|
||||
return prevArray;
|
||||
}
|
||||
const index = prevArray.findIndex(t => t.id === data.task_id);
|
||||
if (index >= 0) {
|
||||
const updated = [...prevArray];
|
||||
updated[index] = { ...updated[index], ...data.data };
|
||||
return updated;
|
||||
}
|
||||
// If task not found, it might be a new task - reload to be safe
|
||||
if (data.data && (data.data.status === 'running' || data.data.status === 'pending')) {
|
||||
setTimeout(() => {
|
||||
const reloadTasks = async () => {
|
||||
try {
|
||||
const taskListResult = await jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame_start:asc' });
|
||||
// Check for superseded sentinel
|
||||
if (taskListResult === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const taskData = taskListResult.data || taskListResult;
|
||||
const taskSummaries = Array.isArray(taskData) ? taskData : [];
|
||||
const tasksForDisplay = taskSummaries.map(summary => ({
|
||||
id: summary.id,
|
||||
job_id: job.id,
|
||||
frame_start: summary.frame_start,
|
||||
frame_end: summary.frame_end,
|
||||
status: summary.status,
|
||||
task_type: summary.task_type,
|
||||
runner_id: summary.runner_id,
|
||||
current_step: null,
|
||||
retry_count: 0,
|
||||
max_retries: 3,
|
||||
created_at: new Date().toISOString(),
|
||||
}));
|
||||
setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []);
|
||||
} catch (error) {
|
||||
console.error('Failed to reload tasks:', error);
|
||||
}
|
||||
};
|
||||
reloadTasks();
|
||||
}, 100);
|
||||
}
|
||||
return prevArray;
|
||||
});
|
||||
} else if (data.type === 'task_added' && data.data) {
|
||||
// New task was added - reload task summaries to get the new task
|
||||
console.log('task_added message received, reloading tasks...', data);
|
||||
const reloadTasks = async () => {
|
||||
try {
|
||||
const taskListResult = await jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame_start:asc' });
|
||||
// Check for superseded sentinel
|
||||
if (taskListResult === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const taskData = taskListResult.data || taskListResult;
|
||||
const taskSummaries = Array.isArray(taskData) ? taskData : [];
|
||||
const tasksForDisplay = taskSummaries.map(summary => ({
|
||||
id: summary.id,
|
||||
job_id: job.id,
|
||||
frame_start: summary.frame_start,
|
||||
frame_end: summary.frame_end,
|
||||
status: summary.status,
|
||||
task_type: summary.task_type,
|
||||
runner_id: summary.runner_id,
|
||||
current_step: null,
|
||||
retry_count: 0,
|
||||
max_retries: 3,
|
||||
created_at: new Date().toISOString(),
|
||||
}));
|
||||
setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []);
|
||||
} catch (error) {
|
||||
console.error('Failed to reload tasks:', error);
|
||||
// Fallback to full reload
|
||||
loadDetails();
|
||||
}
|
||||
};
|
||||
reloadTasks();
|
||||
} else if (data.type === 'tasks_added' && data.data) {
|
||||
// Multiple new tasks were added - reload task summaries
|
||||
console.log('tasks_added message received, reloading tasks...', data);
|
||||
const reloadTasks = async () => {
|
||||
try {
|
||||
const taskListResult = await jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame_start:asc' });
|
||||
// Check for superseded sentinel
|
||||
if (taskListResult === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const taskData = taskListResult.data || taskListResult;
|
||||
const taskSummaries = Array.isArray(taskData) ? taskData : [];
|
||||
const tasksForDisplay = taskSummaries.map(summary => ({
|
||||
id: summary.id,
|
||||
job_id: job.id,
|
||||
frame_start: summary.frame_start,
|
||||
frame_end: summary.frame_end,
|
||||
status: summary.status,
|
||||
task_type: summary.task_type,
|
||||
runner_id: summary.runner_id,
|
||||
current_step: null,
|
||||
retry_count: 0,
|
||||
max_retries: 3,
|
||||
created_at: new Date().toISOString(),
|
||||
}));
|
||||
setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []);
|
||||
} catch (error) {
|
||||
console.error('Failed to reload tasks:', error);
|
||||
// Fallback to full reload
|
||||
loadDetails();
|
||||
}
|
||||
};
|
||||
reloadTasks();
|
||||
} else if (data.type === 'file_added' && data.data) {
|
||||
// New file was added - reload file list
|
||||
const reloadFiles = async () => {
|
||||
try {
|
||||
const fileList = await jobs.getFiles(job.id, { limit: 50 });
|
||||
// Check for superseded sentinel
|
||||
if (fileList === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const fileData = fileList.data || fileList;
|
||||
setFiles(Array.isArray(fileData) ? fileData : []);
|
||||
} catch (error) {
|
||||
console.error('Failed to reload files:', error);
|
||||
}
|
||||
};
|
||||
reloadFiles();
|
||||
} else if (data.type === 'step_update' && data.data && data.task_id) {
|
||||
// Step was created or updated - update task data
|
||||
console.log('step_update message received:', data);
|
||||
setTaskData(prev => {
|
||||
const taskId = data.task_id;
|
||||
const current = prev[taskId] || { steps: [], logs: [] };
|
||||
const stepData = data.data;
|
||||
|
||||
// Find if step already exists
|
||||
const existingSteps = current.steps || [];
|
||||
const stepIndex = existingSteps.findIndex(s => s.step_name === stepData.step_name);
|
||||
|
||||
let updatedSteps;
|
||||
if (stepIndex >= 0) {
|
||||
// Update existing step
|
||||
updatedSteps = [...existingSteps];
|
||||
updatedSteps[stepIndex] = {
|
||||
...updatedSteps[stepIndex],
|
||||
...stepData,
|
||||
id: stepData.step_id || updatedSteps[stepIndex].id,
|
||||
};
|
||||
} else {
|
||||
// Add new step
|
||||
updatedSteps = [...existingSteps, {
|
||||
id: stepData.step_id,
|
||||
step_name: stepData.step_name,
|
||||
status: stepData.status,
|
||||
duration_ms: stepData.duration_ms,
|
||||
error_message: stepData.error_message,
|
||||
}];
|
||||
}
|
||||
|
||||
return {
|
||||
...prev,
|
||||
[taskId]: {
|
||||
...current,
|
||||
steps: updatedSteps,
|
||||
}
|
||||
};
|
||||
});
|
||||
} else if (data.type === 'connected') {
|
||||
// Connection established
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to parse WebSocket message:', error);
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = (error) => {
|
||||
console.error('Job WebSocket error:', {
|
||||
error,
|
||||
readyState: ws.readyState,
|
||||
url: ws.url,
|
||||
jobId: job.id,
|
||||
status: jobDetails.status
|
||||
});
|
||||
// WebSocket errors don't provide much detail, but we can check readyState
|
||||
if (ws.readyState === WebSocket.CLOSED || ws.readyState === WebSocket.CLOSING) {
|
||||
console.warn('Job WebSocket is closed or closing, will attempt reconnect');
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = (event) => {
|
||||
console.log('Job WebSocket closed:', {
|
||||
code: event.code,
|
||||
reason: event.reason,
|
||||
wasClean: event.wasClean,
|
||||
jobId: job.id,
|
||||
status: jobDetails.status
|
||||
});
|
||||
jobWsRef.current = null;
|
||||
|
||||
// Code 1006 = Abnormal Closure (connection lost without close frame)
|
||||
// Code 1000 = Normal Closure
|
||||
// Code 1001 = Going Away (server restart, etc.)
|
||||
// We should reconnect for abnormal closures (1006) or unexpected closes
|
||||
const shouldReconnect = !event.wasClean || event.code === 1006 || event.code === 1001;
|
||||
|
||||
// Get current status from state to avoid stale closure
|
||||
const currentStatus = jobDetails.status;
|
||||
const isActiveJob = currentStatus === 'running' || currentStatus === 'pending';
|
||||
|
||||
if (shouldReconnect && isActiveJob) {
|
||||
console.log(`Attempting to reconnect job WebSocket in 2 seconds... (code: ${event.code})`);
|
||||
setTimeout(() => {
|
||||
// Check status again before reconnecting (might have changed)
|
||||
// Use a ref or check the current state directly
|
||||
if ((!jobWsRef.current || jobWsRef.current.readyState === WebSocket.CLOSED)) {
|
||||
// Re-check if job is still active by reading current state
|
||||
// We'll check this in connectJobWebSocket if needed
|
||||
connectJobWebSocket();
|
||||
}
|
||||
}, 2000);
|
||||
} else if (!isActiveJob) {
|
||||
console.log('Job is no longer active, not reconnecting WebSocket');
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Failed to connect job WebSocket:', error);
|
||||
}
|
||||
};
|
||||
|
||||
const startLogStream = (taskIds) => {
|
||||
if (taskIds.length === 0 || streaming) return;
|
||||
|
||||
// Don't start streaming if job is no longer running
|
||||
if (jobDetails.status !== 'running' && jobDetails.status !== 'pending') {
|
||||
console.log('Job is not running, skipping log stream');
|
||||
return;
|
||||
}
|
||||
|
||||
setStreaming(true);
|
||||
// For now, stream the first task's logs (WebSocket supports one task at a time)
|
||||
// In the future, we could have multiple WebSocket connections
|
||||
@@ -219,17 +572,44 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
}
|
||||
};
|
||||
|
||||
ws.onopen = () => {
|
||||
console.log('Log WebSocket connected for task', primaryTaskId);
|
||||
};
|
||||
|
||||
ws.onerror = (error) => {
|
||||
console.error('WebSocket error:', error);
|
||||
console.error('Log WebSocket error:', {
|
||||
error,
|
||||
readyState: ws.readyState,
|
||||
url: ws.url,
|
||||
taskId: primaryTaskId,
|
||||
jobId: job.id
|
||||
});
|
||||
setStreaming(false);
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
ws.onclose = (event) => {
|
||||
console.log('Log WebSocket closed:', {
|
||||
code: event.code,
|
||||
reason: event.reason,
|
||||
wasClean: event.wasClean,
|
||||
taskId: primaryTaskId,
|
||||
jobId: job.id
|
||||
});
|
||||
setStreaming(false);
|
||||
// Auto-reconnect if job is still running
|
||||
if (jobDetails.status === 'running' && taskIds.length > 0) {
|
||||
wsRef.current = null;
|
||||
|
||||
// Code 1006 = Abnormal Closure (connection lost without close frame)
|
||||
// Code 1000 = Normal Closure
|
||||
// Code 1001 = Going Away (server restart, etc.)
|
||||
const shouldReconnect = !event.wasClean || event.code === 1006 || event.code === 1001;
|
||||
|
||||
// Auto-reconnect if job is still running and close was unexpected
|
||||
if (shouldReconnect && jobDetails.status === 'running' && taskIds.length > 0) {
|
||||
console.log(`Attempting to reconnect log WebSocket in 2 seconds... (code: ${event.code})`);
|
||||
setTimeout(() => {
|
||||
if (jobDetails.status === 'running') {
|
||||
// Check status again before reconnecting (might have changed)
|
||||
// The startLogStream function will check if job is still running
|
||||
if (jobDetails.status === 'running' && taskIds.length > 0) {
|
||||
startLogStream(taskIds);
|
||||
}
|
||||
}, 2000);
|
||||
@@ -243,9 +623,39 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
newExpanded.delete(taskId);
|
||||
} else {
|
||||
newExpanded.add(taskId);
|
||||
// Load data if not already loaded
|
||||
if (!taskData[taskId]) {
|
||||
// Load full task details if we only have summary
|
||||
const tasksArray = Array.isArray(tasks) ? tasks : [];
|
||||
const currentTask = tasksArray.find(t => t.id === taskId);
|
||||
if (currentTask && !currentTask.created_at) {
|
||||
// This is a summary - fetch full task details
|
||||
try {
|
||||
const fullTasks = await jobs.getTasks(job.id, {
|
||||
limit: 1,
|
||||
// We can't filter by task ID, so we'll get all and find the one we need
|
||||
});
|
||||
const taskData = fullTasks.data || fullTasks;
|
||||
const fullTask = Array.isArray(taskData) ? taskData.find(t => t.id === taskId) : null;
|
||||
if (fullTask) {
|
||||
setTasks(prev => {
|
||||
const prevArray = Array.isArray(prev) ? prev : [];
|
||||
return prevArray.map(t => t.id === taskId ? fullTask : t);
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to load full task details:', err);
|
||||
}
|
||||
}
|
||||
// Always load logs/steps when expanding a task to ensure we have the latest data
|
||||
// This is especially important for completed tasks that weren't loaded before
|
||||
const existingData = taskData[taskId];
|
||||
const hasLogs = existingData && existingData.logs && existingData.logs.length > 0;
|
||||
const hasSteps = existingData && existingData.steps && existingData.steps.length > 0;
|
||||
|
||||
if (!hasLogs || !hasSteps) {
|
||||
console.log(`Loading task data for task ${taskId} (logs: ${hasLogs}, steps: ${hasSteps})`);
|
||||
await loadTaskData(taskId);
|
||||
} else {
|
||||
console.log(`Task ${taskId} already has ${existingData.logs.length} logs and ${existingData.steps.length} steps, skipping load`);
|
||||
}
|
||||
}
|
||||
setExpandedTasks(newExpanded);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useState, useEffect, useRef } from 'react';
|
||||
import { jobs } from '../utils/api';
|
||||
import JobDetails from './JobDetails';
|
||||
|
||||
@@ -6,17 +6,165 @@ export default function JobList() {
|
||||
const [jobList, setJobList] = useState([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [selectedJob, setSelectedJob] = useState(null);
|
||||
const [pagination, setPagination] = useState({ total: 0, limit: 50, offset: 0 });
|
||||
const [hasMore, setHasMore] = useState(true);
|
||||
const pollingIntervalRef = useRef(null);
|
||||
const wsRef = useRef(null);
|
||||
|
||||
useEffect(() => {
|
||||
loadJobs();
|
||||
const interval = setInterval(loadJobs, 5000);
|
||||
return () => clearInterval(interval);
|
||||
// Use WebSocket for real-time updates instead of polling
|
||||
connectWebSocket();
|
||||
return () => {
|
||||
if (pollingIntervalRef.current) {
|
||||
clearInterval(pollingIntervalRef.current);
|
||||
}
|
||||
if (wsRef.current) {
|
||||
try {
|
||||
wsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
wsRef.current = null;
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const loadJobs = async () => {
|
||||
const connectWebSocket = () => {
|
||||
try {
|
||||
const data = await jobs.list();
|
||||
setJobList(data);
|
||||
// Close existing connection if any
|
||||
if (wsRef.current) {
|
||||
try {
|
||||
wsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
wsRef.current = null;
|
||||
}
|
||||
|
||||
const ws = jobs.streamJobsWebSocket();
|
||||
wsRef.current = ws;
|
||||
|
||||
ws.onopen = () => {
|
||||
console.log('Job list WebSocket connected');
|
||||
};
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
if (data.type === 'job_update' && data.data) {
|
||||
// Update job in list
|
||||
setJobList(prev => {
|
||||
const index = prev.findIndex(j => j.id === data.job_id);
|
||||
if (index >= 0) {
|
||||
const updated = [...prev];
|
||||
updated[index] = { ...updated[index], ...data.data };
|
||||
return updated;
|
||||
}
|
||||
// If job not in current page, reload to get updated list
|
||||
if (data.data.status === 'completed' || data.data.status === 'failed') {
|
||||
loadJobs();
|
||||
}
|
||||
return prev;
|
||||
});
|
||||
} else if (data.type === 'connected') {
|
||||
// Connection established
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to parse WebSocket message:', error);
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = (error) => {
|
||||
console.error('Job list WebSocket error:', {
|
||||
error,
|
||||
readyState: ws.readyState,
|
||||
url: ws.url
|
||||
});
|
||||
// WebSocket errors don't provide much detail, but we can check readyState
|
||||
if (ws.readyState === WebSocket.CLOSED || ws.readyState === WebSocket.CLOSING) {
|
||||
console.warn('Job list WebSocket is closed or closing, will fallback to polling');
|
||||
// Fallback to polling on error
|
||||
startAdaptivePolling();
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = (event) => {
|
||||
console.log('Job list WebSocket closed:', {
|
||||
code: event.code,
|
||||
reason: event.reason,
|
||||
wasClean: event.wasClean
|
||||
});
|
||||
wsRef.current = null;
|
||||
|
||||
// Code 1006 = Abnormal Closure (connection lost without close frame)
|
||||
// Code 1000 = Normal Closure
|
||||
// Code 1001 = Going Away (server restart, etc.)
|
||||
// We should reconnect for abnormal closures (1006) or unexpected closes
|
||||
const shouldReconnect = !event.wasClean || event.code === 1006 || event.code === 1001;
|
||||
|
||||
if (shouldReconnect) {
|
||||
console.log(`Attempting to reconnect job list WebSocket in 2 seconds... (code: ${event.code})`);
|
||||
setTimeout(() => {
|
||||
if (wsRef.current === null || (wsRef.current && wsRef.current.readyState === WebSocket.CLOSED)) {
|
||||
connectWebSocket();
|
||||
}
|
||||
}, 2000);
|
||||
} else {
|
||||
// Clean close (code 1000) - fallback to polling
|
||||
console.log('WebSocket closed cleanly, falling back to polling');
|
||||
startAdaptivePolling();
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Failed to connect WebSocket:', error);
|
||||
// Fallback to polling
|
||||
startAdaptivePolling();
|
||||
}
|
||||
};
|
||||
|
||||
const startAdaptivePolling = () => {
|
||||
const checkAndPoll = () => {
|
||||
const hasRunningJobs = jobList.some(job => job.status === 'running' || job.status === 'pending');
|
||||
const interval = hasRunningJobs ? 5000 : 10000; // 5s for running, 10s for completed
|
||||
|
||||
if (pollingIntervalRef.current) {
|
||||
clearInterval(pollingIntervalRef.current);
|
||||
}
|
||||
|
||||
pollingIntervalRef.current = setInterval(() => {
|
||||
loadJobs();
|
||||
}, interval);
|
||||
};
|
||||
|
||||
checkAndPoll();
|
||||
// Re-check interval when job list changes
|
||||
const checkInterval = setInterval(checkAndPoll, 5000);
|
||||
return () => clearInterval(checkInterval);
|
||||
};
|
||||
|
||||
const loadJobs = async (append = false) => {
|
||||
try {
|
||||
const offset = append ? pagination.offset + pagination.limit : 0;
|
||||
const result = await jobs.listSummary({
|
||||
limit: pagination.limit,
|
||||
offset,
|
||||
sort: 'created_at:desc'
|
||||
});
|
||||
|
||||
// Handle both old format (array) and new format (object with data, total, etc.)
|
||||
const jobsData = result.data || result;
|
||||
const total = result.total !== undefined ? result.total : jobsData.length;
|
||||
|
||||
if (append) {
|
||||
setJobList(prev => [...prev, ...jobsData]);
|
||||
setPagination(prev => ({ ...prev, offset, total }));
|
||||
} else {
|
||||
setJobList(jobsData);
|
||||
setPagination({ total, limit: result.limit || pagination.limit, offset: result.offset || 0 });
|
||||
}
|
||||
|
||||
setHasMore(offset + jobsData.length < total);
|
||||
} catch (error) {
|
||||
console.error('Failed to load jobs:', error);
|
||||
} finally {
|
||||
@@ -24,8 +172,13 @@ export default function JobList() {
|
||||
}
|
||||
};
|
||||
|
||||
const loadMore = () => {
|
||||
if (!loading && hasMore) {
|
||||
loadJobs(true);
|
||||
}
|
||||
};
|
||||
|
||||
// Keep selectedJob in sync with the job list when it refreshes
|
||||
// This prevents the selected job from becoming stale when format selection or other actions trigger updates
|
||||
useEffect(() => {
|
||||
if (selectedJob && jobList.length > 0) {
|
||||
const freshJob = jobList.find(j => j.id === selectedJob.id);
|
||||
@@ -74,7 +227,7 @@ export default function JobList() {
|
||||
return colors[status] || colors.pending;
|
||||
};
|
||||
|
||||
if (loading) {
|
||||
if (loading && jobList.length === 0) {
|
||||
return (
|
||||
<div className="flex justify-center items-center h-64">
|
||||
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-orange-500"></div>
|
||||
@@ -106,8 +259,10 @@ export default function JobList() {
|
||||
</div>
|
||||
|
||||
<div className="space-y-2 text-sm text-gray-400 mb-4">
|
||||
<p>Frames: {job.frame_start} - {job.frame_end}</p>
|
||||
<p>Format: {job.output_format}</p>
|
||||
{job.frame_start !== undefined && job.frame_end !== undefined && (
|
||||
<p>Frames: {job.frame_start} - {job.frame_end}</p>
|
||||
)}
|
||||
{job.output_format && <p>Format: {job.output_format}</p>}
|
||||
<p>Created: {new Date(job.created_at).toLocaleString()}</p>
|
||||
</div>
|
||||
|
||||
@@ -126,7 +281,15 @@ export default function JobList() {
|
||||
|
||||
<div className="flex gap-2">
|
||||
<button
|
||||
onClick={() => setSelectedJob(job)}
|
||||
onClick={() => {
|
||||
// Fetch full job details when viewing
|
||||
jobs.get(job.id).then(fullJob => {
|
||||
setSelectedJob(fullJob);
|
||||
}).catch(err => {
|
||||
console.error('Failed to load job details:', err);
|
||||
setSelectedJob(job); // Fallback to summary
|
||||
});
|
||||
}}
|
||||
className="flex-1 px-4 py-2 bg-orange-600 text-white rounded-lg hover:bg-orange-500 transition-colors font-medium"
|
||||
>
|
||||
View Details
|
||||
@@ -153,6 +316,18 @@ export default function JobList() {
|
||||
))}
|
||||
</div>
|
||||
|
||||
{hasMore && (
|
||||
<div className="flex justify-center mt-6">
|
||||
<button
|
||||
onClick={loadMore}
|
||||
disabled={loading}
|
||||
className="px-6 py-2 bg-gray-700 text-gray-200 rounded-lg hover:bg-gray-600 transition-colors font-medium disabled:opacity-50"
|
||||
>
|
||||
{loading ? 'Loading...' : 'Load More'}
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{selectedJob && (
|
||||
<JobDetails
|
||||
job={selectedJob}
|
||||
@@ -163,4 +338,3 @@ export default function JobList() {
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { jobs } from '../utils/api';
|
||||
import JobDetails from './JobDetails';
|
||||
|
||||
export default function JobSubmission({ onSuccess }) {
|
||||
const [step, setStep] = useState(1); // 1 = upload & extract metadata, 2 = configure & submit
|
||||
const [step, setStep] = useState(1); // 1 = upload & extract metadata, 2 = missing addons (if any), 3 = configure & submit
|
||||
const [formData, setFormData] = useState({
|
||||
name: '',
|
||||
frame_start: 1,
|
||||
@@ -11,6 +11,8 @@ export default function JobSubmission({ onSuccess }) {
|
||||
output_format: 'PNG',
|
||||
allow_parallel_runners: true,
|
||||
render_settings: null, // Will contain engine settings
|
||||
unhide_objects: false, // Unhide objects/collections tweak
|
||||
enable_execution: false, // Enable auto-execution in Blender
|
||||
});
|
||||
const [showAdvancedSettings, setShowAdvancedSettings] = useState(false);
|
||||
const [file, setFile] = useState(null);
|
||||
@@ -25,6 +27,7 @@ export default function JobSubmission({ onSuccess }) {
|
||||
const [isUploading, setIsUploading] = useState(false);
|
||||
const [blendFiles, setBlendFiles] = useState([]); // For ZIP files with multiple blend files
|
||||
const [selectedMainBlend, setSelectedMainBlend] = useState('');
|
||||
const [confirmedMissingFiles, setConfirmedMissingFiles] = useState(false); // Confirmation for missing files
|
||||
|
||||
// Use refs to track cancellation state across re-renders
|
||||
const isCancelledRef = useRef(false);
|
||||
@@ -71,10 +74,15 @@ export default function JobSubmission({ onSuccess }) {
|
||||
// Upload file to new endpoint (no job required)
|
||||
const result = await jobs.uploadFileForJobCreation(selectedFile, (progress) => {
|
||||
setUploadProgress(progress);
|
||||
// After upload completes, show processing state
|
||||
if (progress >= 100) {
|
||||
setMetadataStatus('processing');
|
||||
}
|
||||
}, selectedMainBlend || undefined);
|
||||
|
||||
// Keep showing processing state until we have the result
|
||||
setMetadataStatus('processing');
|
||||
setUploadProgress(100);
|
||||
setIsUploading(false);
|
||||
|
||||
// Store session ID for later use when creating the job
|
||||
if (result.session_id) {
|
||||
@@ -88,6 +96,9 @@ export default function JobSubmission({ onSuccess }) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Upload and processing complete
|
||||
setIsUploading(false);
|
||||
|
||||
// If metadata was extracted, use it
|
||||
if (result.metadata_extracted && result.metadata) {
|
||||
setMetadata(result.metadata);
|
||||
@@ -141,25 +152,33 @@ export default function JobSubmission({ onSuccess }) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
setIsUploading(true);
|
||||
setUploadProgress(0);
|
||||
setMetadataStatus('extracting');
|
||||
try {
|
||||
setIsUploading(true);
|
||||
setUploadProgress(0);
|
||||
setMetadataStatus('extracting');
|
||||
|
||||
// Re-upload with selected main blend file
|
||||
const result = await jobs.uploadFileForJobCreation(file, (progress) => {
|
||||
setUploadProgress(progress);
|
||||
}, selectedMainBlend);
|
||||
// Re-upload with selected main blend file
|
||||
const result = await jobs.uploadFileForJobCreation(file, (progress) => {
|
||||
setUploadProgress(progress);
|
||||
// After upload completes, show processing state
|
||||
if (progress >= 100) {
|
||||
setMetadataStatus('processing');
|
||||
}
|
||||
}, selectedMainBlend);
|
||||
|
||||
setUploadProgress(100);
|
||||
setIsUploading(false);
|
||||
setBlendFiles([]);
|
||||
// Keep showing processing state until we have the result
|
||||
setMetadataStatus('processing');
|
||||
setUploadProgress(100);
|
||||
setBlendFiles([]);
|
||||
|
||||
// Store session ID
|
||||
if (result.session_id) {
|
||||
setUploadSessionId(result.session_id);
|
||||
}
|
||||
|
||||
// Upload and processing complete
|
||||
setIsUploading(false);
|
||||
|
||||
// If metadata was extracted, use it
|
||||
if (result.metadata_extracted && result.metadata) {
|
||||
setMetadata(result.metadata);
|
||||
@@ -202,17 +221,43 @@ export default function JobSubmission({ onSuccess }) {
|
||||
|
||||
const handleContinueToStep2 = () => {
|
||||
if (metadataStatus === 'completed' || metadataStatus === 'error') {
|
||||
setStep(2);
|
||||
// Check if there are missing addons - if so, go to addon step, otherwise skip to config
|
||||
const hasMissingAddons = metadata?.missing_files_info?.missing_addons &&
|
||||
metadata.missing_files_info.missing_addons.length > 0;
|
||||
if (hasMissingAddons) {
|
||||
setStep(2); // Step 2 = missing addons
|
||||
} else {
|
||||
setStep(3); // Step 3 = configure & submit
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleContinueToStep3 = () => {
|
||||
setStep(3); // Continue from addons step to config step
|
||||
};
|
||||
|
||||
const handleBackToStep1 = () => {
|
||||
setStep(1);
|
||||
};
|
||||
|
||||
const handleBackToStep2 = () => {
|
||||
setStep(2);
|
||||
};
|
||||
|
||||
const handleSubmit = async (e) => {
|
||||
e.preventDefault();
|
||||
setError('');
|
||||
|
||||
// Check if there are missing files/addons and require confirmation
|
||||
const hasMissingFiles = metadata?.missing_files_info?.has_missing &&
|
||||
metadata.missing_files_info.missing_addons &&
|
||||
metadata.missing_files_info.missing_addons.length > 0;
|
||||
|
||||
if (hasMissingFiles && !confirmedMissingFiles) {
|
||||
setError('Please confirm that you want to proceed with missing addons');
|
||||
return;
|
||||
}
|
||||
|
||||
setSubmitting(true);
|
||||
|
||||
try {
|
||||
@@ -246,6 +291,8 @@ export default function JobSubmission({ onSuccess }) {
|
||||
allow_parallel_runners: formData.allow_parallel_runners,
|
||||
render_settings: renderSettings,
|
||||
upload_session_id: uploadSessionId || undefined, // Pass session ID to move context archive
|
||||
unhide_objects: formData.unhide_objects || undefined, // Pass unhide toggle
|
||||
enable_execution: formData.enable_execution || undefined, // Pass enable execution toggle
|
||||
});
|
||||
|
||||
// Fetch the full job details
|
||||
@@ -269,6 +316,8 @@ export default function JobSubmission({ onSuccess }) {
|
||||
output_format: 'PNG',
|
||||
allow_parallel_runners: true,
|
||||
render_settings: null,
|
||||
unhide_objects: false,
|
||||
enable_execution: false,
|
||||
});
|
||||
setShowAdvancedSettings(false);
|
||||
setFile(null);
|
||||
@@ -304,10 +353,21 @@ export default function JobSubmission({ onSuccess }) {
|
||||
</div>
|
||||
<span>Upload & Extract Metadata</span>
|
||||
</div>
|
||||
{metadata?.missing_files_info?.missing_addons && metadata.missing_files_info.missing_addons.length > 0 && (
|
||||
<>
|
||||
<div className="w-8 h-0.5 bg-gray-700"></div>
|
||||
<div className={`flex items-center gap-2 ${step >= 2 ? 'text-orange-500 font-medium' : 'text-gray-500'}`}>
|
||||
<div className={`w-6 h-6 rounded-full flex items-center justify-center ${step >= 2 ? 'bg-orange-600 text-white' : 'bg-gray-700'}`}>
|
||||
{step > 2 ? '✓' : '2'}
|
||||
</div>
|
||||
<span>Missing Addons</span>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
<div className="w-8 h-0.5 bg-gray-700"></div>
|
||||
<div className={`flex items-center gap-2 ${step >= 2 ? 'text-orange-500 font-medium' : 'text-gray-500'}`}>
|
||||
<div className={`w-6 h-6 rounded-full flex items-center justify-center ${step >= 2 ? 'bg-orange-600 text-white' : 'bg-gray-700'}`}>
|
||||
2
|
||||
<div className={`flex items-center gap-2 ${step >= 3 ? 'text-orange-500 font-medium' : 'text-gray-500'}`}>
|
||||
<div className={`w-6 h-6 rounded-full flex items-center justify-center ${step >= 3 ? 'bg-orange-600 text-white' : 'bg-gray-700'}`}>
|
||||
{step > 3 ? '✓' : (metadata?.missing_files_info?.missing_addons && metadata.missing_files_info.missing_addons.length > 0 ? '3' : '2')}
|
||||
</div>
|
||||
<span>Configure & Submit</span>
|
||||
</div>
|
||||
@@ -370,9 +430,9 @@ export default function JobSubmission({ onSuccess }) {
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
{(isUploading || metadataStatus === 'extracting') && (
|
||||
{(isUploading || metadataStatus === 'extracting' || metadataStatus === 'processing') && (
|
||||
<div className="mt-2 p-3 bg-orange-400/20 border border-orange-400/50 rounded-lg text-orange-400 text-sm">
|
||||
{isUploading ? (
|
||||
{isUploading && uploadProgress < 100 ? (
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between text-xs">
|
||||
<span>Uploading file...</span>
|
||||
@@ -385,6 +445,22 @@ export default function JobSubmission({ onSuccess }) {
|
||||
></div>
|
||||
</div>
|
||||
</div>
|
||||
) : metadataStatus === 'processing' ? (
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between text-xs">
|
||||
<span>Processing file and extracting metadata...</span>
|
||||
<span>{Math.round(uploadProgress)}%</span>
|
||||
</div>
|
||||
<div className="w-full bg-gray-700 rounded-full h-2">
|
||||
<div
|
||||
className="bg-orange-500 h-2 rounded-full transition-all duration-300"
|
||||
style={{ width: `${uploadProgress}%` }}
|
||||
></div>
|
||||
</div>
|
||||
<div className="text-xs text-orange-400/80 mt-1">
|
||||
This may take a moment for large files...
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-orange-500"></div>
|
||||
@@ -430,9 +506,9 @@ export default function JobSubmission({ onSuccess }) {
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
// Step 2: Configure and submit
|
||||
<form onSubmit={handleSubmit} className="space-y-6">
|
||||
) : step === 2 ? (
|
||||
// Step 2: Missing Addons (only shown if there are missing addons)
|
||||
<div className="space-y-6">
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleBackToStep1}
|
||||
@@ -440,6 +516,39 @@ export default function JobSubmission({ onSuccess }) {
|
||||
>
|
||||
← Back to Upload
|
||||
</button>
|
||||
<div className="p-4 bg-yellow-400/20 border border-yellow-400/50 rounded-lg">
|
||||
<div className="text-yellow-400 font-semibold mb-3">Missing Addons Detected</div>
|
||||
<div className="text-yellow-400/80 text-sm mb-4">
|
||||
<p className="mb-2">The following addons are required by this blend file but are not available on the render servers:</p>
|
||||
<ul className="list-disc list-inside space-y-1 ml-2">
|
||||
{metadata?.missing_files_info?.missing_addons?.map((addon, idx) => (
|
||||
<li key={idx}>{addon}</li>
|
||||
))}
|
||||
</ul>
|
||||
<p className="mt-3 text-xs">
|
||||
<strong>Note:</strong> The render may fail or produce unexpected results if these addons are required for rendering.
|
||||
You can still proceed, but be aware that the output may not match your expectations.
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleContinueToStep3}
|
||||
className="w-full px-4 py-2 bg-yellow-600 text-white rounded-lg hover:bg-yellow-500 transition-colors font-medium"
|
||||
>
|
||||
Continue to Configuration →
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
// Step 3: Configure and submit
|
||||
<form onSubmit={handleSubmit} className="space-y-6">
|
||||
<button
|
||||
type="button"
|
||||
onClick={metadata?.missing_files_info?.missing_addons && metadata.missing_files_info.missing_addons.length > 0 ? handleBackToStep2 : handleBackToStep1}
|
||||
className="text-orange-500 hover:text-orange-400 font-medium text-sm flex items-center gap-1"
|
||||
>
|
||||
← Back
|
||||
</button>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-300 mb-2">
|
||||
Job Name
|
||||
@@ -516,21 +625,89 @@ export default function JobSubmission({ onSuccess }) {
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div className="p-4 bg-blue-400/20 border border-blue-400/50 rounded-lg">
|
||||
<div className="flex items-center">
|
||||
<input
|
||||
type="checkbox"
|
||||
id="unhide_objects"
|
||||
checked={formData.unhide_objects}
|
||||
onChange={(e) => setFormData({ ...formData, unhide_objects: e.target.checked })}
|
||||
className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-900 rounded"
|
||||
/>
|
||||
<label htmlFor="unhide_objects" className="ml-2 block text-sm text-gray-300">
|
||||
<span className="font-medium">Enable unhide tweaks</span>
|
||||
<span className="text-xs text-gray-400 block mt-1">
|
||||
Automatically unhide objects and collections that are hidden from render (useful for certain blend files)
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="p-4 bg-blue-400/20 border border-blue-400/50 rounded-lg">
|
||||
<div className="flex items-center">
|
||||
<input
|
||||
type="checkbox"
|
||||
id="enable_execution"
|
||||
checked={formData.enable_execution}
|
||||
onChange={(e) => setFormData({ ...formData, enable_execution: e.target.checked })}
|
||||
className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-900 rounded"
|
||||
/>
|
||||
<label htmlFor="enable_execution" className="ml-2 block text-sm text-gray-300">
|
||||
<span className="font-medium">Enable auto-execution</span>
|
||||
<span className="text-xs text-gray-400 block mt-1">
|
||||
Allow Blender to auto-execute startup scripts (disabled by default for security)
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{metadata && metadataStatus === 'completed' && (
|
||||
<div className="p-4 bg-green-400/20 border border-green-400/50 rounded-lg text-sm mb-4">
|
||||
<div className="text-green-400 font-semibold mb-2">Metadata from blend file:</div>
|
||||
<div className="text-green-400/80 text-xs space-y-1">
|
||||
<div>Frames: {metadata.frame_start} - {metadata.frame_end}</div>
|
||||
<div>Resolution: {metadata.render_settings?.resolution_x} x {metadata.render_settings?.resolution_y}</div>
|
||||
<div>Engine: {metadata.render_settings?.engine}</div>
|
||||
{metadata.render_settings?.engine_settings?.samples && (
|
||||
<div>Samples: {metadata.render_settings.engine_settings.samples}</div>
|
||||
)}
|
||||
{metadata.render_settings?.engine_settings?.taa_render_samples && (
|
||||
<div>EEVEE Samples: {metadata.render_settings.engine_settings.taa_render_samples}</div>
|
||||
)}
|
||||
<>
|
||||
<div className="p-4 bg-green-400/20 border border-green-400/50 rounded-lg text-sm mb-4">
|
||||
<div className="text-green-400 font-semibold mb-2">Metadata from blend file:</div>
|
||||
<div className="text-green-400/80 text-xs space-y-1">
|
||||
<div>Frames: {metadata.frame_start} - {metadata.frame_end}</div>
|
||||
<div>Resolution: {metadata.render_settings?.resolution_x} x {metadata.render_settings?.resolution_y}</div>
|
||||
<div>Engine: {metadata.render_settings?.engine}</div>
|
||||
{metadata.render_settings?.engine_settings?.samples && (
|
||||
<div>Samples: {metadata.render_settings.engine_settings.samples}</div>
|
||||
)}
|
||||
{metadata.render_settings?.engine_settings?.taa_render_samples && (
|
||||
<div>EEVEE Samples: {metadata.render_settings.engine_settings.taa_render_samples}</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Missing Files/Addons Warning */}
|
||||
{metadata.missing_files_info?.has_missing &&
|
||||
metadata.missing_files_info.missing_addons &&
|
||||
metadata.missing_files_info.missing_addons.length > 0 && (
|
||||
<div className="p-4 bg-yellow-400/20 border border-yellow-400/50 rounded-lg text-sm mb-4">
|
||||
<div className="text-yellow-400 font-semibold mb-2">⚠️ Missing Addons Detected</div>
|
||||
<div className="text-yellow-400/80 text-xs mb-3">
|
||||
<p className="mb-2">The following addons are required by this blend file but are not available:</p>
|
||||
<ul className="list-disc list-inside space-y-1">
|
||||
{metadata.missing_files_info.missing_addons.map((addon, idx) => (
|
||||
<li key={idx}>{addon}</li>
|
||||
))}
|
||||
</ul>
|
||||
<p className="mt-2 font-medium">Rendering may fail or produce incorrect results without these addons.</p>
|
||||
</div>
|
||||
<div className="flex items-center">
|
||||
<input
|
||||
type="checkbox"
|
||||
id="confirm_missing_files"
|
||||
checked={confirmedMissingFiles}
|
||||
onChange={(e) => setConfirmedMissingFiles(e.target.checked)}
|
||||
className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-900 rounded"
|
||||
/>
|
||||
<label htmlFor="confirm_missing_files" className="ml-2 block text-sm text-yellow-400">
|
||||
I understand the risks and want to proceed anyway
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Advanced Render Settings */}
|
||||
@@ -886,11 +1063,16 @@ export default function JobSubmission({ onSuccess }) {
|
||||
)}
|
||||
<button
|
||||
type="submit"
|
||||
disabled={submitting || !file || isUploading}
|
||||
disabled={submitting || !file || isUploading || (metadata?.missing_files_info?.has_missing && !confirmedMissingFiles)}
|
||||
className="w-full px-6 py-3 bg-orange-600 text-white rounded-lg hover:bg-orange-500 transition-colors font-semibold disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
{submitting ? 'Creating Job...' : 'Create Job'}
|
||||
</button>
|
||||
{metadata?.missing_files_info?.has_missing && !confirmedMissingFiles && (
|
||||
<p className="text-xs text-yellow-400 mt-2 text-center">
|
||||
Please confirm that you want to proceed with missing addons
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</form>
|
||||
)}
|
||||
|
||||
@@ -3,6 +3,73 @@ const API_BASE = '/api';
|
||||
// Global auth error handler - will be set by useAuth hook
|
||||
let onAuthError = null;
|
||||
|
||||
// Request debouncing and deduplication
|
||||
const pendingRequests = new Map(); // key: endpoint+params, value: Promise
|
||||
const requestQueue = new Map(); // key: endpoint+params, value: { resolve, reject, timestamp }
|
||||
const DEBOUNCE_DELAY = 100; // 100ms debounce delay
|
||||
const DEDUPE_WINDOW = 5000; // 5 seconds - same request within this window uses cached promise
|
||||
|
||||
// Generate cache key from endpoint and params
|
||||
function getCacheKey(endpoint, options = {}) {
|
||||
const params = new URLSearchParams();
|
||||
Object.keys(options).sort().forEach(key => {
|
||||
if (options[key] !== undefined && options[key] !== null) {
|
||||
params.append(key, String(options[key]));
|
||||
}
|
||||
});
|
||||
const query = params.toString();
|
||||
return `${endpoint}${query ? '?' + query : ''}`;
|
||||
}
|
||||
|
||||
// Sentinel value to indicate a request was superseded (instead of rejecting)
|
||||
// Export it so components can check for it
|
||||
export const REQUEST_SUPERSEDED = Symbol('REQUEST_SUPERSEDED');
|
||||
|
||||
// Debounced request wrapper
|
||||
function debounceRequest(key, requestFn, delay = DEBOUNCE_DELAY) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Check if there's a pending request for this key
|
||||
if (pendingRequests.has(key)) {
|
||||
const pending = pendingRequests.get(key);
|
||||
// If request is very recent (within dedupe window), reuse it
|
||||
const now = Date.now();
|
||||
if (pending.timestamp && (now - pending.timestamp) < DEDUPE_WINDOW) {
|
||||
pending.promise.then(resolve).catch(reject);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Clear any existing timeout for this key
|
||||
if (requestQueue.has(key)) {
|
||||
const queued = requestQueue.get(key);
|
||||
clearTimeout(queued.timeout);
|
||||
// Resolve with sentinel value instead of rejecting - this prevents errors from propagating
|
||||
// The new request will handle the actual response
|
||||
queued.resolve(REQUEST_SUPERSEDED);
|
||||
}
|
||||
|
||||
// Queue new request
|
||||
const timeout = setTimeout(() => {
|
||||
requestQueue.delete(key);
|
||||
const promise = requestFn();
|
||||
const timestamp = Date.now();
|
||||
pendingRequests.set(key, { promise, timestamp });
|
||||
|
||||
promise
|
||||
.then(result => {
|
||||
pendingRequests.delete(key);
|
||||
resolve(result);
|
||||
})
|
||||
.catch(error => {
|
||||
pendingRequests.delete(key);
|
||||
reject(error);
|
||||
});
|
||||
}, delay);
|
||||
|
||||
requestQueue.set(key, { resolve, reject, timeout });
|
||||
});
|
||||
}
|
||||
|
||||
export const setAuthErrorHandler = (handler) => {
|
||||
onAuthError = handler;
|
||||
};
|
||||
@@ -174,12 +241,53 @@ export const auth = {
|
||||
};
|
||||
|
||||
export const jobs = {
|
||||
async list() {
|
||||
return api.get('/jobs');
|
||||
async list(options = {}) {
|
||||
const key = getCacheKey('/jobs', options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.offset) params.append('offset', options.offset.toString());
|
||||
if (options.status) params.append('status', options.status);
|
||||
if (options.sort) params.append('sort', options.sort);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async get(id) {
|
||||
return api.get(`/jobs/${id}`);
|
||||
async listSummary(options = {}) {
|
||||
const key = getCacheKey('/jobs/summary', options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.offset) params.append('offset', options.offset.toString());
|
||||
if (options.status) params.append('status', options.status);
|
||||
if (options.sort) params.append('sort', options.sort);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/summary${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async get(id, options = {}) {
|
||||
const key = getCacheKey(`/jobs/${id}`, options);
|
||||
return debounceRequest(key, async () => {
|
||||
if (options.etag) {
|
||||
// Include ETag in request headers for conditional requests
|
||||
const headers = { 'If-None-Match': options.etag };
|
||||
const response = await fetch(`${API_BASE}/jobs/${id}`, {
|
||||
credentials: 'include',
|
||||
headers,
|
||||
});
|
||||
if (response.status === 304) {
|
||||
return null; // Not modified
|
||||
}
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null);
|
||||
throw new Error(errorData?.error || response.statusText);
|
||||
}
|
||||
return response.json();
|
||||
}
|
||||
return api.get(`/jobs/${id}`);
|
||||
});
|
||||
},
|
||||
|
||||
async create(jobData) {
|
||||
@@ -202,8 +310,27 @@ export const jobs = {
|
||||
return api.uploadFile(`/jobs/upload`, file, onProgress, mainBlendFile);
|
||||
},
|
||||
|
||||
async getFiles(jobId) {
|
||||
return api.get(`/jobs/${jobId}/files`);
|
||||
async getFiles(jobId, options = {}) {
|
||||
const key = getCacheKey(`/jobs/${jobId}/files`, options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.offset) params.append('offset', options.offset.toString());
|
||||
if (options.file_type) params.append('file_type', options.file_type);
|
||||
if (options.extension) params.append('extension', options.extension);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/${jobId}/files${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async getFilesCount(jobId, options = {}) {
|
||||
const key = getCacheKey(`/jobs/${jobId}/files/count`, options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.file_type) params.append('file_type', options.file_type);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/${jobId}/files/count${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async getContextArchive(jobId) {
|
||||
@@ -219,12 +346,21 @@ export const jobs = {
|
||||
},
|
||||
|
||||
async getTaskLogs(jobId, taskId, options = {}) {
|
||||
const params = new URLSearchParams();
|
||||
if (options.stepName) params.append('step_name', options.stepName);
|
||||
if (options.logLevel) params.append('log_level', options.logLevel);
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/${jobId}/tasks/${taskId}/logs${query ? '?' + query : ''}`);
|
||||
const key = getCacheKey(`/jobs/${jobId}/tasks/${taskId}/logs`, options);
|
||||
return debounceRequest(key, async () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.stepName) params.append('step_name', options.stepName);
|
||||
if (options.logLevel) params.append('log_level', options.logLevel);
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.sinceId) params.append('since_id', options.sinceId.toString());
|
||||
const query = params.toString();
|
||||
const result = await api.get(`/jobs/${jobId}/tasks/${taskId}/logs${query ? '?' + query : ''}`);
|
||||
// Handle both old format (array) and new format (object with logs, last_id, limit)
|
||||
if (Array.isArray(result)) {
|
||||
return { logs: result, last_id: result.length > 0 ? result[result.length - 1].id : 0, limit: options.limit || 100 };
|
||||
}
|
||||
return result;
|
||||
});
|
||||
},
|
||||
|
||||
async getTaskSteps(jobId, taskId) {
|
||||
@@ -239,6 +375,20 @@ export const jobs = {
|
||||
return new WebSocket(url);
|
||||
},
|
||||
|
||||
streamJobsWebSocket() {
|
||||
const wsProtocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const wsHost = window.location.host;
|
||||
const url = `${wsProtocol}//${wsHost}${API_BASE}/jobs/ws`;
|
||||
return new WebSocket(url);
|
||||
},
|
||||
|
||||
streamJobWebSocket(jobId) {
|
||||
const wsProtocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const wsHost = window.location.host;
|
||||
const url = `${wsProtocol}//${wsHost}${API_BASE}/jobs/${jobId}/ws`;
|
||||
return new WebSocket(url);
|
||||
},
|
||||
|
||||
async retryTask(jobId, taskId) {
|
||||
return api.post(`/jobs/${jobId}/tasks/${taskId}/retry`);
|
||||
},
|
||||
@@ -247,8 +397,50 @@ export const jobs = {
|
||||
return api.get(`/jobs/${jobId}/metadata`);
|
||||
},
|
||||
|
||||
async getTasks(jobId) {
|
||||
return api.get(`/jobs/${jobId}/tasks`);
|
||||
async getTasks(jobId, options = {}) {
|
||||
const key = getCacheKey(`/jobs/${jobId}/tasks`, options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.offset) params.append('offset', options.offset.toString());
|
||||
if (options.status) params.append('status', options.status);
|
||||
if (options.frameStart) params.append('frame_start', options.frameStart.toString());
|
||||
if (options.frameEnd) params.append('frame_end', options.frameEnd.toString());
|
||||
if (options.sort) params.append('sort', options.sort);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/${jobId}/tasks${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async getTasksSummary(jobId, options = {}) {
|
||||
const key = getCacheKey(`/jobs/${jobId}/tasks/summary`, options);
|
||||
return debounceRequest(key, () => {
|
||||
const params = new URLSearchParams();
|
||||
if (options.limit) params.append('limit', options.limit.toString());
|
||||
if (options.offset) params.append('offset', options.offset.toString());
|
||||
if (options.status) params.append('status', options.status);
|
||||
if (options.sort) params.append('sort', options.sort);
|
||||
const query = params.toString();
|
||||
return api.get(`/jobs/${jobId}/tasks/summary${query ? '?' + query : ''}`);
|
||||
});
|
||||
},
|
||||
|
||||
async batchGetJobs(jobIds) {
|
||||
// Sort jobIds for consistent cache key
|
||||
const sortedIds = [...jobIds].sort((a, b) => a - b);
|
||||
const key = getCacheKey('/jobs/batch', { job_ids: sortedIds.join(',') });
|
||||
return debounceRequest(key, () => {
|
||||
return api.post('/jobs/batch', { job_ids: jobIds });
|
||||
});
|
||||
},
|
||||
|
||||
async batchGetTasks(jobId, taskIds) {
|
||||
// Sort taskIds for consistent cache key
|
||||
const sortedIds = [...taskIds].sort((a, b) => a - b);
|
||||
const key = getCacheKey(`/jobs/${jobId}/tasks/batch`, { task_ids: sortedIds.join(',') });
|
||||
return debounceRequest(key, () => {
|
||||
return api.post(`/jobs/${jobId}/tasks/batch`, { task_ids: taskIds });
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user