import { useState, useEffect, useRef } from 'react'; import { jobs, REQUEST_SUPERSEDED } from '../utils/api'; import { wsManager } from '../utils/websocket'; import VideoPlayer from './VideoPlayer'; import FileExplorer from './FileExplorer'; import ErrorMessage from './ErrorMessage'; import LoadingSpinner from './LoadingSpinner'; export default function JobDetails({ job, onClose, onUpdate }) { const [jobDetails, setJobDetails] = useState(job); const [files, setFiles] = useState([]); const [contextFiles, setContextFiles] = useState([]); const [tasks, setTasks] = useState([]); const [loading, setLoading] = useState(true); // Store steps and logs per task: { taskId: { steps: [], logs: [] } } const [taskData, setTaskData] = useState({}); // Track which tasks are expanded const [expandedTasks, setExpandedTasks] = useState(new Set()); const [streaming, setStreaming] = useState(false); const [previewImage, setPreviewImage] = useState(null); // { url, fileName } or null const [previewVideo, setPreviewVideo] = useState(null); // { url, fileName } or null const listenerIdRef = useRef(null); // Listener ID for shared WebSocket const subscribedChannelsRef = useRef(new Set()); // Track confirmed subscribed channels const pendingSubscriptionsRef = useRef(new Set()); // Track pending subscriptions (waiting for confirmation) const logContainerRefs = useRef({}); // Refs for each task's log container const shouldAutoScrollRefs = useRef({}); // Auto-scroll state per task const abortControllerRef = useRef(null); // AbortController for HTTP requests // Sync job prop to state when it changes useEffect(() => { if (job) { setJobDetails(job); } }, [job?.id, job?.status, job?.progress]); useEffect(() => { // Guard against undefined job or job.id if (!job || !job.id) { console.warn('JobDetails: job or job.id is undefined, skipping initialization'); return; } // Create new AbortController for this effect abortControllerRef.current = new AbortController(); loadDetails(); // Use shared WebSocket manager for real-time updates listenerIdRef.current = wsManager.subscribe(`jobdetails_${job.id}`, { open: () => { console.log('JobDetails: Shared WebSocket connected for job', job.id); // Subscribe to job channel subscribe(`job:${job.id}`); }, message: (data) => { handleWebSocketMessage(data); }, error: (error) => { console.error('JobDetails: Shared WebSocket error:', error); }, close: (event) => { console.log('JobDetails: Shared WebSocket closed:', event); subscribedChannelsRef.current.clear(); pendingSubscriptionsRef.current.clear(); } }); // Ensure connection is established wsManager.connect(); return () => { // Cancel any pending HTTP requests if (abortControllerRef.current) { abortControllerRef.current.abort(); abortControllerRef.current = null; } // Unsubscribe from all channels unsubscribeAll(); if (listenerIdRef.current) { wsManager.unsubscribe(listenerIdRef.current); listenerIdRef.current = null; } }; }, [job?.id]); useEffect(() => { // Update log subscriptions based on expanded tasks updateLogSubscriptions(); // eslint-disable-next-line react-hooks/exhaustive-deps }, [expandedTasks, tasks.length, jobDetails.status]); // Use tasks.length instead of tasks to avoid unnecessary re-runs // Auto-scroll logs to bottom when new logs arrive useEffect(() => { // Use requestAnimationFrame to ensure DOM has updated requestAnimationFrame(() => { Object.keys(logContainerRefs.current).forEach(key => { const ref = logContainerRefs.current[key]; if (!ref) return; // Initialize auto-scroll to true if not set if (shouldAutoScrollRefs.current[key] === undefined) { shouldAutoScrollRefs.current[key] = true; } // Always auto-scroll unless user has manually scrolled up // shouldAutoScrollRefs.current[key] is false only if user scrolled up manually if (shouldAutoScrollRefs.current[key] !== false) { // Scroll to bottom ref.scrollTop = ref.scrollHeight; } }); }); }, [taskData]); const loadDetails = async () => { // Guard against undefined job or job.id if (!job || !job.id) { console.warn('JobDetails: Cannot load details - job or job.id is undefined'); return; } try { setLoading(true); // Use summary endpoint for tasks initially - much faster const signal = abortControllerRef.current?.signal; const [details, fileList, taskListResult] = await Promise.all([ jobs.get(job.id, { signal }), jobs.getFiles(job.id, { limit: 50, signal }), // Only load first page of files jobs.getTasksSummary(job.id, { sort: 'frame:asc', signal }), // Get all tasks ]); // Check if request was aborted if (signal?.aborted) { return; } setJobDetails(details); // Handle paginated file response - check for superseded sentinel if (fileList === REQUEST_SUPERSEDED) { return; // Request was superseded, skip this update } const fileData = fileList?.data || fileList; setFiles(Array.isArray(fileData) ? fileData : []); // Handle paginated task summary response - check for superseded sentinel if (taskListResult === REQUEST_SUPERSEDED) { return; // Request was superseded, skip this update } const taskData = taskListResult?.data || taskListResult; const taskSummaries = Array.isArray(taskData) ? taskData : []; // Convert summaries to task-like objects for display const tasksForDisplay = taskSummaries.map(summary => ({ id: summary.id, job_id: job.id, frame: summary.frame, status: summary.status, task_type: summary.task_type, runner_id: summary.runner_id, // These will be loaded on expand current_step: null, retry_count: 0, max_retries: 3, created_at: new Date().toISOString(), })); setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []); // Fetch context archive contents separately (may not exist for old jobs) try { const contextList = await jobs.getContextArchive(job.id, { signal }); if (signal?.aborted) return; setContextFiles(contextList || []); } catch (error) { if (signal?.aborted) return; // Context archive may not exist for old jobs setContextFiles([]); } // Only load task data (logs/steps) for expanded tasks // Don't auto-load for all tasks - wait for user to expand if (details.status === 'running') { // Only load data for tasks that are expanded tasksForDisplay.forEach(task => { if (expandedTasks.has(task.id)) { const existingData = taskData[task.id]; // Only fetch logs via HTTP if we don't have any logs yet if (!existingData || !existingData.logs || existingData.logs.length === 0) { loadTaskData(task.id); } else if (!existingData.steps || existingData.steps.length === 0) { loadTaskStepsOnly(task.id); } } }); } } catch (error) { console.error('Failed to load job details:', error); } finally { setLoading(false); } }; const handleDownload = (fileId, fileName) => { window.open(jobs.downloadFile(job.id, fileId), '_blank'); }; const loadTaskData = async (taskId) => { try { console.log(`Loading task data for task ${taskId}...`); const signal = abortControllerRef.current?.signal; const [logsResult, steps] = await Promise.all([ jobs.getTaskLogs(job.id, taskId, { limit: 1000, signal }), // Increased limit for completed tasks jobs.getTaskSteps(job.id, taskId, { signal }), ]); // Check if request was aborted if (signal?.aborted) { return; } // Check for superseded sentinel if (logsResult === REQUEST_SUPERSEDED || steps === REQUEST_SUPERSEDED) { return; // Request was superseded, skip this update } console.log(`Task ${taskId} logs result:`, logsResult); // Handle new format with logs, last_id, limit const logs = logsResult.logs || logsResult; const lastId = logsResult.last_id; console.log(`Task ${taskId} - loaded ${Array.isArray(logs) ? logs.length : 0} logs, ${Array.isArray(steps) ? steps.length : 0} steps`); setTaskData(prev => { const current = prev[taskId] || { steps: [], logs: [], lastId: 0 }; // Merge logs instead of replacing - this preserves WebSocket-streamed logs // Deduplicate by log ID const existingLogIds = new Set((current.logs || []).map(l => l.id)); const newLogs = (Array.isArray(logs) ? logs : []).filter(l => !existingLogIds.has(l.id)); const mergedLogs = [...(current.logs || []), ...newLogs].sort((a, b) => a.id - b.id); return { ...prev, [taskId]: { steps: steps || current.steps, logs: mergedLogs, lastId: lastId || current.lastId } }; }); } catch (error) { console.error('Failed to load task data:', error); } }; const loadTaskStepsOnly = async (taskId) => { try { const signal = abortControllerRef.current?.signal; const steps = await jobs.getTaskSteps(job.id, taskId, { signal }); // Check if request was aborted if (signal?.aborted) { return; } // Check for superseded sentinel if (steps === REQUEST_SUPERSEDED) { return; // Request was superseded, skip this update } setTaskData(prev => { const current = prev[taskId] || { steps: [], logs: [] }; return { ...prev, [taskId]: { steps: steps || current.steps, logs: current.logs || [] // Preserve existing logs } }; }); } catch (error) { console.error('Failed to load task steps:', error); } }; const subscribe = (channel) => { // Use wsManager's channel subscription (handles reconnect automatically) wsManager.subscribeToChannel(channel); subscribedChannelsRef.current.add(channel); pendingSubscriptionsRef.current.add(channel); }; const unsubscribe = (channel) => { // Use wsManager's channel unsubscription wsManager.unsubscribeFromChannel(channel); subscribedChannelsRef.current.delete(channel); pendingSubscriptionsRef.current.delete(channel); }; const unsubscribeAll = () => { subscribedChannelsRef.current.forEach(channel => { unsubscribe(channel); }); }; const updateLogSubscriptions = () => { // Guard against undefined job or job.id if (!job || !job.id) { return; } // Determine which log channels should be subscribed const shouldSubscribe = new Set(); const isRunning = jobDetails.status === 'running' || jobDetails.status === 'pending'; // Subscribe to logs when task is expanded (not when step is expanded) if (isRunning) { expandedTasks.forEach(taskId => { const channel = `logs:${job.id}:${taskId}`; shouldSubscribe.add(channel); }); } // Subscribe to new channels shouldSubscribe.forEach(channel => { if (!subscribedChannelsRef.current.has(channel)) { subscribe(channel); } }); // Unsubscribe from channels that shouldn't be subscribed subscribedChannelsRef.current.forEach(channel => { if (channel.startsWith('logs:') && !shouldSubscribe.has(channel)) { unsubscribe(channel); } }); }; const handleWebSocketMessage = (data) => { try { console.log('JobDetails: Client WebSocket message received:', data.type, data.channel, data); // Handle subscription responses - update both local refs and wsManager if (data.type === 'subscribed' && data.channel) { pendingSubscriptionsRef.current.delete(data.channel); subscribedChannelsRef.current.add(data.channel); wsManager.confirmSubscription(data.channel); console.log('Successfully subscribed to channel:', data.channel, 'Total subscriptions:', subscribedChannelsRef.current.size); } else if (data.type === 'subscription_error' && data.channel) { pendingSubscriptionsRef.current.delete(data.channel); subscribedChannelsRef.current.delete(data.channel); wsManager.failSubscription(data.channel); console.error('Subscription failed for channel:', data.channel, data.error); if (job && job.id && data.channel === `job:${job.id}`) { console.error('Failed to subscribe to job channel - job may not exist or access denied'); } } // Handle job channel messages // Check both explicit channel and job_id match (for backwards compatibility) // Guard against undefined job.id if (!job || !job.id) { return; } const isJobChannel = data.channel === `job:${job.id}` || (data.job_id === job.id && !data.channel); if (isJobChannel) { console.log('Job channel message received:', data.type, data); if (data.type === 'job_update' && data.data) { // Update job details console.log('Updating job details:', data.data); setJobDetails(prev => { const updated = { ...prev, ...data.data }; console.log('Job details updated:', { old_progress: prev.progress, new_progress: updated.progress, old_status: prev.status, new_status: updated.status }); // Notify parent component of update if (onUpdate) { onUpdate(data.job_id || job.id, updated); } return updated; }); } else if (data.type === 'task_update') { // Handle task_update - data.data contains the update fields const taskId = data.task_id || (data.data && (data.data.id || data.data.task_id)); console.log('Task update received:', { task_id: taskId, data: data.data, full_message: data }); if (!taskId) { console.warn('task_update message missing task_id:', data); return; } if (!data.data) { console.warn('task_update message missing data:', data); return; } // Update task in list setTasks(prev => { // Ensure prev is always an array const prevArray = Array.isArray(prev) ? prev : []; const index = prevArray.findIndex(t => t.id === taskId); if (index >= 0) { // Task exists - update it const updated = [...prevArray]; const oldTask = updated[index]; // Create a completely new task object to ensure React detects the change const newTask = { ...oldTask, // Explicitly update each field from data.data to ensure changes are detected status: data.data.status !== undefined ? data.data.status : oldTask.status, runner_id: data.data.runner_id !== undefined ? data.data.runner_id : oldTask.runner_id, started_at: data.data.started_at !== undefined ? data.data.started_at : oldTask.started_at, completed_at: data.data.completed_at !== undefined ? data.data.completed_at : oldTask.completed_at, error_message: data.data.error_message !== undefined ? data.data.error_message : oldTask.error_message, output_path: data.data.output_path !== undefined ? data.data.output_path : oldTask.output_path, current_step: data.data.current_step !== undefined ? data.data.current_step : oldTask.current_step, // Merge any other fields ...Object.keys(data.data).reduce((acc, key) => { if (!['status', 'runner_id', 'started_at', 'completed_at', 'error_message', 'output_path', 'current_step'].includes(key)) { acc[key] = data.data[key]; } return acc; }, {}) }; updated[index] = newTask; console.log('Updated task at index', index, { task_id: taskId, old_status: oldTask.status, new_status: newTask.status, old_runner_id: oldTask.runner_id, new_runner_id: newTask.runner_id, update_data: data.data, full_new_task: newTask }); return updated; } // Task not found - check if data contains full task info (from initial state) // Check both 'id' and 'task_id' fields const taskIdFromData = data.data.id || data.data.task_id; if (data.data && typeof data.data === 'object' && taskIdFromData && taskIdFromData === taskId) { // This is a full task object from initial state - add it console.log('Adding new task from initial state:', data.data); return [...prevArray, { ...data.data, id: taskIdFromData }]; } // If task not found and it's a partial update, reload tasks to get the full list console.log('Task not found in list, reloading tasks...'); setTimeout(() => { const reloadTasks = async () => { try { const signal = abortControllerRef.current?.signal; const taskListResult = await jobs.getTasksSummary(job.id, { sort: 'frame:asc', signal }); // Check if request was aborted if (signal?.aborted) { return; } // Check for superseded sentinel if (taskListResult === REQUEST_SUPERSEDED) { return; // Request was superseded, skip this update } const taskData = taskListResult.data || taskListResult; const taskSummaries = Array.isArray(taskData) ? taskData : []; const tasksForDisplay = taskSummaries.map(summary => ({ id: summary.id, job_id: job.id, frame: summary.frame, status: summary.status, task_type: summary.task_type, runner_id: summary.runner_id, current_step: summary.current_step || null, retry_count: summary.retry_count || 0, max_retries: summary.max_retries || 3, created_at: summary.created_at || new Date().toISOString(), started_at: summary.started_at, completed_at: summary.completed_at, error_message: summary.error_message, output_path: summary.output_path, })); setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []); } catch (error) { console.error('Failed to reload tasks:', error); } }; reloadTasks(); }, 100); return prevArray; }); } else if (data.type === 'task_reset') { // Handle task_reset - task was reset to pending, steps and logs were cleared const taskId = data.task_id || (data.data && (data.data.id || data.data.task_id)); console.log('Task reset received:', { task_id: taskId, data: data.data }); if (!taskId) { console.warn('task_reset message missing task_id:', data); return; } // Update task in list setTasks(prev => { const prevArray = Array.isArray(prev) ? prev : []; const index = prevArray.findIndex(t => t.id === taskId); if (index >= 0) { const updated = [...prevArray]; const oldTask = updated[index]; const newTask = { ...oldTask, status: data.data?.status || 'pending', runner_id: null, current_step: null, started_at: null, error_message: data.data?.error_message || null, retry_count: data.data?.retry_count !== undefined ? data.data.retry_count : oldTask.retry_count, }; updated[index] = newTask; console.log('Reset task at index', index, { task_id: taskId, new_task: newTask }); return updated; } return prevArray; }); // Clear steps and logs for this task if flags indicate they were cleared if (data.data?.steps_cleared || data.data?.logs_cleared) { setTaskData(prev => { const current = prev[taskId]; if (!current) return prev; return { ...prev, [taskId]: { steps: data.data?.steps_cleared ? [] : current.steps, logs: data.data?.logs_cleared ? [] : current.logs, lastId: 0, } }; }); } } else if (data.type === 'task_added' && data.data) { // New task was added - reload task summaries to get the new task console.log('task_added message received, reloading tasks...', data); const reloadTasks = async () => { try { const signal = abortControllerRef.current?.signal; const taskListResult = await jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame:asc', signal }); // Check if request was aborted if (signal?.aborted) { return; } // Check for superseded sentinel if (taskListResult === REQUEST_SUPERSEDED) { return; // Request was superseded, skip this update } const taskData = taskListResult.data || taskListResult; const taskSummaries = Array.isArray(taskData) ? taskData : []; const tasksForDisplay = taskSummaries.map(summary => ({ id: summary.id, job_id: job.id, frame: summary.frame, status: summary.status, task_type: summary.task_type, runner_id: summary.runner_id, current_step: null, retry_count: 0, max_retries: 3, created_at: new Date().toISOString(), })); setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []); } catch (error) { console.error('Failed to reload tasks:', error); // Fallback to full reload loadDetails(); } }; reloadTasks(); } else if (data.type === 'tasks_added' && data.data) { // Multiple new tasks were added - reload task summaries console.log('tasks_added message received, reloading tasks...', data); const reloadTasks = async () => { try { const signal = abortControllerRef.current?.signal; const taskListResult = await jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame:asc', signal }); // Check if request was aborted if (signal?.aborted) { return; } // Check for superseded sentinel if (taskListResult === REQUEST_SUPERSEDED) { return; // Request was superseded, skip this update } const taskData = taskListResult.data || taskListResult; const taskSummaries = Array.isArray(taskData) ? taskData : []; const tasksForDisplay = taskSummaries.map(summary => ({ id: summary.id, job_id: job.id, frame: summary.frame, status: summary.status, task_type: summary.task_type, runner_id: summary.runner_id, current_step: null, retry_count: 0, max_retries: 3, created_at: new Date().toISOString(), })); setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []); } catch (error) { console.error('Failed to reload tasks:', error); // Fallback to full reload loadDetails(); } }; reloadTasks(); } else if (data.type === 'file_added' && data.data) { // New file was added - reload file list const reloadFiles = async () => { try { const fileList = await jobs.getFiles(job.id, { limit: 50 }); // Check for superseded sentinel if (fileList === REQUEST_SUPERSEDED) { return; // Request was superseded, skip this update } const fileData = fileList.data || fileList; setFiles(Array.isArray(fileData) ? fileData : []); } catch (error) { console.error('Failed to reload files:', error); } }; reloadFiles(); } else if (data.type === 'step_update' && data.data && data.task_id) { // Step was created or updated - update task data console.log('step_update message received:', data); setTaskData(prev => { const taskId = data.task_id; const current = prev[taskId] || { steps: [], logs: [] }; const stepData = data.data; // Find if step already exists const existingSteps = current.steps || []; const stepIndex = existingSteps.findIndex(s => s.step_name === stepData.step_name); let updatedSteps; if (stepIndex >= 0) { // Update existing step updatedSteps = [...existingSteps]; updatedSteps[stepIndex] = { ...updatedSteps[stepIndex], ...stepData, id: stepData.step_id || updatedSteps[stepIndex].id, }; } else { // Add new step updatedSteps = [...existingSteps, { id: stepData.step_id, step_name: stepData.step_name, status: stepData.status, duration_ms: stepData.duration_ms, error_message: stepData.error_message, }]; } return { ...prev, [taskId]: { ...current, steps: updatedSteps, } }; }); } } else if (data.channel && data.channel.startsWith('logs:')) { // Handle log channel messages if (data.type === 'log' && data.data) { const log = data.data; // Get task_id from log data or top-level message const taskId = log.task_id || data.task_id; if (!taskId) { console.warn('Log message missing task_id:', data); return; } console.log('Received log for task:', taskId, log); setTaskData(prev => { const current = prev[taskId] || { steps: [], logs: [] }; // If log has a step_name, ensure the step exists in the steps array let updatedSteps = current.steps || []; if (log.step_name) { const stepExists = updatedSteps.some(s => s.step_name === log.step_name); if (!stepExists) { // Create placeholder step for logs that arrive before step_update console.log('Creating placeholder step for:', log.step_name, 'in task:', taskId); updatedSteps = [...updatedSteps, { id: null, // Will be updated when step_update arrives step_name: log.step_name, status: 'running', // Default to running since we're receiving logs duration_ms: null, error_message: null, }]; } } // Check if log already exists (avoid duplicates) if (!current.logs.find(l => l.id === log.id)) { return { ...prev, [taskId]: { ...current, steps: updatedSteps, logs: [...current.logs, log] } }; } // Even if log is duplicate, update steps if needed return { ...prev, [taskId]: { ...current, steps: updatedSteps, } }; }); } } else if (data.type === 'connected') { // Connection established } } catch (error) { console.error('Failed to parse WebSocket message:', error); } }; // startLogStream is no longer needed - subscriptions are managed by updateLogSubscriptions const toggleTask = async (taskId) => { const newExpanded = new Set(expandedTasks); if (newExpanded.has(taskId)) { newExpanded.delete(taskId); } else { newExpanded.add(taskId); // Load full task details if we only have summary const tasksArray = Array.isArray(tasks) ? tasks : []; const currentTask = tasksArray.find(t => t.id === taskId); if (currentTask && !currentTask.created_at) { // This is a summary - fetch full task details try { const signal = abortControllerRef.current?.signal; const fullTasks = await jobs.getTasks(job.id, { limit: 1, signal, // We can't filter by task ID, so we'll get all and find the one we need }); // Check if request was aborted if (signal?.aborted) { return; } const taskData = fullTasks.data || fullTasks; const fullTask = Array.isArray(taskData) ? taskData.find(t => t.id === taskId) : null; if (fullTask) { setTasks(prev => { const prevArray = Array.isArray(prev) ? prev : []; return prevArray.map(t => t.id === taskId ? fullTask : t); }); } } catch (err) { console.error('Failed to load full task details:', err); } } // Always load logs/steps when expanding a task to ensure we have the latest data // This is especially important for completed tasks that weren't loaded before const existingData = taskData[taskId]; const hasLogs = existingData && existingData.logs && existingData.logs.length > 0; const hasSteps = existingData && existingData.steps && existingData.steps.length > 0; if (!hasLogs || !hasSteps) { console.log(`Loading task data for task ${taskId} (logs: ${hasLogs}, steps: ${hasSteps})`); await loadTaskData(taskId); } else { console.log(`Task ${taskId} already has ${existingData.logs.length} logs and ${existingData.steps.length} steps, skipping load`); } } setExpandedTasks(newExpanded); }; const toggleAutoScroll = (taskId, containerName) => { const key = `${taskId}-${containerName}`; // Toggle auto-scroll state (default to true if undefined) const currentState = shouldAutoScrollRefs.current[key] !== false; shouldAutoScrollRefs.current[key] = !currentState; // Force re-render to update button state // We don't have expandedSteps anymore, so just trigger a re-render by updating a dummy state setExpandedTasks(new Set(expandedTasks)); }; const handleLogWheel = (taskId, containerName) => { const key = `${taskId}-${containerName}`; // Turn off auto-scroll when user scrolls with wheel if (shouldAutoScrollRefs.current[key] !== false) { shouldAutoScrollRefs.current[key] = false; // Force re-render to update button state setExpandedTasks(new Set(expandedTasks)); } }; const handleLogClick = (taskId, containerName, e) => { // Pause on left or right click if (e.button === 0 || e.button === 2) { const key = `${taskId}-${containerName}`; if (shouldAutoScrollRefs.current[key] !== false) { shouldAutoScrollRefs.current[key] = false; // Force re-render to update button state setExpandedTasks(new Set(expandedTasks)); } } }; const getLogLevelColor = (level) => { switch (level) { case 'ERROR': return 'text-red-400'; case 'WARN': return 'text-yellow-400'; case 'DEBUG': return 'text-gray-500'; default: return 'text-gray-200'; } }; const getStepStatusIcon = (status) => { switch (status) { case 'completed': return '✓'; case 'failed': return '✗'; case 'running': return '⏳'; case 'skipped': return '⏸'; default: return '○'; } }; const getTaskStatusColor = (status) => { const colors = { pending: 'bg-yellow-400/20 text-yellow-400', running: 'bg-orange-400/20 text-orange-400', completed: 'bg-green-400/20 text-green-400', failed: 'bg-red-400/20 text-red-400', }; return colors[status] || 'bg-gray-500/20 text-gray-400'; }; const handleDelete = async () => { if (!confirm('Are you sure you want to permanently delete this job? This action cannot be undone.')) return; try { await jobs.delete(jobDetails.id); if (onUpdate) { onUpdate(); } onClose(); } catch (error) { alert('Failed to delete job: ' + error.message); } }; const outputFiles = files.filter((f) => f.file_type === 'output'); const inputFiles = files.filter((f) => f.file_type === 'input'); // Helper to check if a file is a browser-supported image (or EXR which we convert server-side) const isImageFile = (fileName) => { // Browser-supported image formats + EXR (converted server-side) const imageExtensions = [ '.png', '.jpg', '.jpeg', '.gif', '.webp', '.bmp', '.svg', '.ico', '.avif', '.apng', '.jfif', '.pjpeg', '.pjp', '.exr' // EXR files are converted to PNG server-side ]; const lowerName = fileName.toLowerCase(); return imageExtensions.some(ext => lowerName.endsWith(ext)); }; // Helper to check if a file is an EXR file const isEXRFile = (fileName) => { return fileName.toLowerCase().endsWith('.exr'); }; return ( <> {/* Image Preview Modal */} {previewImage && (
setPreviewImage(null)} >
e.stopPropagation()} >

{previewImage.fileName}

{previewImage.fileName} { e.target.style.display = 'none'; const errorDiv = e.target.nextSibling; if (errorDiv) { errorDiv.style.display = 'block'; } }} />
Failed to load image preview
)} {/* Video Preview Modal */} {previewVideo && (
setPreviewVideo(null)} >
e.stopPropagation()} >

{previewVideo.fileName}

)}

{jobDetails.name}

{(jobDetails.status === 'completed' || jobDetails.status === 'failed' || jobDetails.status === 'cancelled') && ( )}
{loading && } {!loading && ( <>

Status

{jobDetails.status}

Progress

{(jobDetails.progress || 0).toFixed(1)}%

Frame Range

{jobDetails.frame_start} - {jobDetails.frame_end}

Output Format

{jobDetails.output_format}

{contextFiles.length > 0 && (

Context Archive

({ id: 0, // Context files don't have IDs file_name: f.path || f.name || '', file_size: f.size || 0, file_type: 'input' }))} onDownload={null} // Context files can't be downloaded individually isImageFile={isImageFile} />
)} {outputFiles.length > 0 && (

Output Files

{ // Use EXR preview endpoint for EXR files, regular download for others const imageUrl = isEXRFile(file.file_name) ? jobs.previewEXR(job.id, file.id) : jobs.downloadFile(job.id, file.id); setPreviewImage({ url: imageUrl, fileName: file.file_name }); }} onVideoPreview={(file) => { setPreviewVideo({ url: jobs.getVideoUrl(job.id), fileName: file.file_name }); }} isImageFile={isImageFile} />
)}

Tasks {streaming && (streaming)}

{tasks.length > 0 ? ( tasks.map((task) => { const isExpanded = expandedTasks.has(task.id); const taskInfo = taskData[task.id] || { steps: [], logs: [] }; const { steps, logs } = taskInfo; // Sort all logs chronologically (no grouping by step_name) const sortedLogs = [...logs].sort((a, b) => new Date(a.created_at) - new Date(b.created_at)); return (
{/* Task Header */}
toggleTask(task.id)} className="flex items-center justify-between p-3 bg-gray-800 rounded-t-lg cursor-pointer hover:bg-gray-750 transition-colors" >
{isExpanded ? '▼' : '▶'} {task.status} {task.task_type === 'encode' ? `Encode (${jobDetails.frame_start} - ${jobDetails.frame_end})` : `Frame ${task.frame}`} {task.task_type && task.task_type !== 'render' && task.task_type !== 'encode' && ( ({task.task_type}) )}
{task.runner_id && `Runner ${task.runner_id}`}
{/* Task Content (Continuous Log Stream) */} {isExpanded && (
{/* Header with auto-scroll */}
{/* Logs */}
{ if (el) { logContainerRefs.current[`${task.id}-logs`] = el; // Initialize auto-scroll to true (follow logs) when ref is first set if (shouldAutoScrollRefs.current[`${task.id}-logs`] === undefined) { shouldAutoScrollRefs.current[`${task.id}-logs`] = true; } } }} onWheel={() => handleLogWheel(task.id, 'logs')} onMouseDown={(e) => handleLogClick(task.id, 'logs', e)} onContextMenu={(e) => handleLogClick(task.id, 'logs', e)} className="bg-black text-green-400 font-mono text-sm p-3 rounded max-h-96 overflow-y-auto" > {sortedLogs.length === 0 ? (

No logs yet...

) : ( sortedLogs.map((log) => (
[{new Date(log.created_at).toLocaleTimeString()}] {log.message}
)) )}
)}
); }) ) : (

No tasks yet...

)}
)}
); }