Enhance logging and context handling in job management. Introduce a logger initialization with configurable parameters in the manager and runner commands. Update job context handling to use tar files instead of tar.gz, and implement ETag generation for improved caching. Refactor API endpoints to support new context file structure and enhance error handling in job submissions. Add support for unhide objects and auto-execution options in job creation requests.
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
import { useState, useEffect, useRef } from 'react';
|
||||
import { jobs } from '../utils/api';
|
||||
import { jobs, REQUEST_SUPERSEDED } from '../utils/api';
|
||||
import VideoPlayer from './VideoPlayer';
|
||||
import FileExplorer from './FileExplorer';
|
||||
|
||||
@@ -18,19 +18,53 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
const [streaming, setStreaming] = useState(false);
|
||||
const [previewImage, setPreviewImage] = useState(null); // { url, fileName } or null
|
||||
const wsRef = useRef(null);
|
||||
const jobWsRef = useRef(null); // Separate ref for job WebSocket
|
||||
const logContainerRefs = useRef({}); // Refs for each step's log container
|
||||
const shouldAutoScrollRefs = useRef({}); // Auto-scroll state per step
|
||||
|
||||
useEffect(() => {
|
||||
loadDetails();
|
||||
const interval = setInterval(loadDetails, 2000);
|
||||
return () => {
|
||||
clearInterval(interval);
|
||||
if (wsRef.current) {
|
||||
wsRef.current.close();
|
||||
// Use WebSocket for real-time updates instead of polling
|
||||
if (jobDetails.status === 'running' || jobDetails.status === 'pending' || !jobDetails.status) {
|
||||
connectJobWebSocket();
|
||||
return () => {
|
||||
if (jobWsRef.current) {
|
||||
try {
|
||||
jobWsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
jobWsRef.current = null;
|
||||
}
|
||||
if (wsRef.current) {
|
||||
try {
|
||||
wsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
wsRef.current = null;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
// Job is completed/failed/cancelled - close WebSocket
|
||||
if (jobWsRef.current) {
|
||||
try {
|
||||
jobWsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
jobWsRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [job.id]);
|
||||
if (wsRef.current) {
|
||||
try {
|
||||
wsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
wsRef.current = null;
|
||||
}
|
||||
}
|
||||
}, [job.id, jobDetails.status]);
|
||||
|
||||
useEffect(() => {
|
||||
// Load logs and steps for all running tasks
|
||||
@@ -83,14 +117,45 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
|
||||
const loadDetails = async () => {
|
||||
try {
|
||||
const [details, fileList, taskList] = await Promise.all([
|
||||
setLoading(true);
|
||||
// Use summary endpoint for tasks initially - much faster
|
||||
const [details, fileList, taskListResult] = await Promise.all([
|
||||
jobs.get(job.id),
|
||||
jobs.getFiles(job.id),
|
||||
jobs.getTasks(job.id),
|
||||
jobs.getFiles(job.id, { limit: 50 }), // Only load first page of files
|
||||
jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame_start:asc' }), // Use summary endpoint
|
||||
]);
|
||||
setJobDetails(details);
|
||||
setFiles(fileList);
|
||||
setTasks(taskList);
|
||||
|
||||
// Handle paginated file response - check for superseded sentinel
|
||||
if (fileList === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const fileData = fileList?.data || fileList;
|
||||
setFiles(Array.isArray(fileData) ? fileData : []);
|
||||
|
||||
// Handle paginated task summary response - check for superseded sentinel
|
||||
if (taskListResult === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const taskData = taskListResult?.data || taskListResult;
|
||||
const taskSummaries = Array.isArray(taskData) ? taskData : [];
|
||||
|
||||
// Convert summaries to task-like objects for display
|
||||
const tasksForDisplay = taskSummaries.map(summary => ({
|
||||
id: summary.id,
|
||||
job_id: job.id,
|
||||
frame_start: summary.frame_start,
|
||||
frame_end: summary.frame_end,
|
||||
status: summary.status,
|
||||
task_type: summary.task_type,
|
||||
runner_id: summary.runner_id,
|
||||
// These will be loaded on expand
|
||||
current_step: null,
|
||||
retry_count: 0,
|
||||
max_retries: 3,
|
||||
created_at: new Date().toISOString(),
|
||||
}));
|
||||
setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []);
|
||||
|
||||
// Fetch context archive contents separately (may not exist for old jobs)
|
||||
try {
|
||||
@@ -101,26 +166,27 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
setContextFiles([]);
|
||||
}
|
||||
|
||||
// Only load task data (logs/steps) for tasks that don't have data yet
|
||||
// This prevents overwriting logs that are being streamed via WebSocket
|
||||
// Once we have logs for a task, we rely on WebSocket for new logs
|
||||
// Only load task data (logs/steps) for expanded tasks
|
||||
// Don't auto-load for all tasks - wait for user to expand
|
||||
if (details.status === 'running') {
|
||||
taskList.forEach(task => {
|
||||
const existingData = taskData[task.id];
|
||||
// Only fetch logs via HTTP if we don't have any logs yet
|
||||
// Once we have logs, WebSocket will handle new ones
|
||||
if (!existingData || !existingData.logs || existingData.logs.length === 0) {
|
||||
loadTaskData(task.id);
|
||||
} else if (!existingData.steps || existingData.steps.length === 0) {
|
||||
// If we have logs but no steps, fetch steps only
|
||||
loadTaskStepsOnly(task.id);
|
||||
// Only load data for tasks that are expanded
|
||||
tasksForDisplay.forEach(task => {
|
||||
if (expandedTasks.has(task.id)) {
|
||||
const existingData = taskData[task.id];
|
||||
// Only fetch logs via HTTP if we don't have any logs yet
|
||||
if (!existingData || !existingData.logs || existingData.logs.length === 0) {
|
||||
loadTaskData(task.id);
|
||||
} else if (!existingData.steps || existingData.steps.length === 0) {
|
||||
loadTaskStepsOnly(task.id);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Check if there's an MP4 output file
|
||||
const mp4File = fileList.find(
|
||||
(f) => f.file_type === 'output' && f.file_name.endsWith('.mp4')
|
||||
const fileArray = Array.isArray(fileData) ? fileData : [];
|
||||
const mp4File = fileArray.find(
|
||||
(f) => f.file_type === 'output' && f.file_name && f.file_name.endsWith('.mp4')
|
||||
);
|
||||
if (mp4File) {
|
||||
setVideoUrl(jobs.getVideoUrl(job.id));
|
||||
@@ -138,23 +204,39 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
|
||||
const loadTaskData = async (taskId) => {
|
||||
try {
|
||||
const [logs, steps] = await Promise.all([
|
||||
jobs.getTaskLogs(job.id, taskId),
|
||||
console.log(`Loading task data for task ${taskId}...`);
|
||||
const [logsResult, steps] = await Promise.all([
|
||||
jobs.getTaskLogs(job.id, taskId, { limit: 1000 }), // Increased limit for completed tasks
|
||||
jobs.getTaskSteps(job.id, taskId),
|
||||
]);
|
||||
|
||||
// Check for superseded sentinel
|
||||
if (logsResult === REQUEST_SUPERSEDED || steps === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
|
||||
console.log(`Task ${taskId} logs result:`, logsResult);
|
||||
|
||||
// Handle new format with logs, last_id, limit
|
||||
const logs = logsResult.logs || logsResult;
|
||||
const lastId = logsResult.last_id;
|
||||
|
||||
console.log(`Task ${taskId} - loaded ${Array.isArray(logs) ? logs.length : 0} logs, ${Array.isArray(steps) ? steps.length : 0} steps`);
|
||||
|
||||
setTaskData(prev => {
|
||||
const current = prev[taskId] || { steps: [], logs: [] };
|
||||
const current = prev[taskId] || { steps: [], logs: [], lastId: 0 };
|
||||
// Merge logs instead of replacing - this preserves WebSocket-streamed logs
|
||||
// Deduplicate by log ID
|
||||
const existingLogIds = new Set((current.logs || []).map(l => l.id));
|
||||
const newLogs = (logs || []).filter(l => !existingLogIds.has(l.id));
|
||||
const newLogs = (Array.isArray(logs) ? logs : []).filter(l => !existingLogIds.has(l.id));
|
||||
const mergedLogs = [...(current.logs || []), ...newLogs].sort((a, b) => a.id - b.id);
|
||||
|
||||
return {
|
||||
...prev,
|
||||
[taskId]: {
|
||||
steps: steps || current.steps, // Steps can be replaced (they don't change often)
|
||||
logs: mergedLogs
|
||||
steps: steps || current.steps,
|
||||
logs: mergedLogs,
|
||||
lastId: lastId || current.lastId
|
||||
}
|
||||
};
|
||||
});
|
||||
@@ -166,6 +248,10 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
const loadTaskStepsOnly = async (taskId) => {
|
||||
try {
|
||||
const steps = await jobs.getTaskSteps(job.id, taskId);
|
||||
// Check for superseded sentinel
|
||||
if (steps === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
setTaskData(prev => {
|
||||
const current = prev[taskId] || { steps: [], logs: [] };
|
||||
return {
|
||||
@@ -181,9 +267,276 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
}
|
||||
};
|
||||
|
||||
const connectJobWebSocket = () => {
|
||||
try {
|
||||
// Close existing connection if any
|
||||
if (jobWsRef.current) {
|
||||
try {
|
||||
jobWsRef.current.close();
|
||||
} catch (e) {
|
||||
// Ignore errors when closing
|
||||
}
|
||||
jobWsRef.current = null;
|
||||
}
|
||||
|
||||
const ws = jobs.streamJobWebSocket(job.id);
|
||||
jobWsRef.current = ws; // Store reference
|
||||
|
||||
ws.onopen = () => {
|
||||
console.log('Job WebSocket connected for job', job.id);
|
||||
};
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
console.log('Job WebSocket message received:', data.type, data);
|
||||
|
||||
if (data.type === 'job_update' && data.data) {
|
||||
// Update job details
|
||||
setJobDetails(prev => ({ ...prev, ...data.data }));
|
||||
} else if (data.type === 'task_update' && data.data) {
|
||||
// Update task in list
|
||||
setTasks(prev => {
|
||||
// Ensure prev is always an array
|
||||
const prevArray = Array.isArray(prev) ? prev : [];
|
||||
if (!data.task_id) {
|
||||
console.warn('task_update message missing task_id:', data);
|
||||
return prevArray;
|
||||
}
|
||||
const index = prevArray.findIndex(t => t.id === data.task_id);
|
||||
if (index >= 0) {
|
||||
const updated = [...prevArray];
|
||||
updated[index] = { ...updated[index], ...data.data };
|
||||
return updated;
|
||||
}
|
||||
// If task not found, it might be a new task - reload to be safe
|
||||
if (data.data && (data.data.status === 'running' || data.data.status === 'pending')) {
|
||||
setTimeout(() => {
|
||||
const reloadTasks = async () => {
|
||||
try {
|
||||
const taskListResult = await jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame_start:asc' });
|
||||
// Check for superseded sentinel
|
||||
if (taskListResult === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const taskData = taskListResult.data || taskListResult;
|
||||
const taskSummaries = Array.isArray(taskData) ? taskData : [];
|
||||
const tasksForDisplay = taskSummaries.map(summary => ({
|
||||
id: summary.id,
|
||||
job_id: job.id,
|
||||
frame_start: summary.frame_start,
|
||||
frame_end: summary.frame_end,
|
||||
status: summary.status,
|
||||
task_type: summary.task_type,
|
||||
runner_id: summary.runner_id,
|
||||
current_step: null,
|
||||
retry_count: 0,
|
||||
max_retries: 3,
|
||||
created_at: new Date().toISOString(),
|
||||
}));
|
||||
setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []);
|
||||
} catch (error) {
|
||||
console.error('Failed to reload tasks:', error);
|
||||
}
|
||||
};
|
||||
reloadTasks();
|
||||
}, 100);
|
||||
}
|
||||
return prevArray;
|
||||
});
|
||||
} else if (data.type === 'task_added' && data.data) {
|
||||
// New task was added - reload task summaries to get the new task
|
||||
console.log('task_added message received, reloading tasks...', data);
|
||||
const reloadTasks = async () => {
|
||||
try {
|
||||
const taskListResult = await jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame_start:asc' });
|
||||
// Check for superseded sentinel
|
||||
if (taskListResult === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const taskData = taskListResult.data || taskListResult;
|
||||
const taskSummaries = Array.isArray(taskData) ? taskData : [];
|
||||
const tasksForDisplay = taskSummaries.map(summary => ({
|
||||
id: summary.id,
|
||||
job_id: job.id,
|
||||
frame_start: summary.frame_start,
|
||||
frame_end: summary.frame_end,
|
||||
status: summary.status,
|
||||
task_type: summary.task_type,
|
||||
runner_id: summary.runner_id,
|
||||
current_step: null,
|
||||
retry_count: 0,
|
||||
max_retries: 3,
|
||||
created_at: new Date().toISOString(),
|
||||
}));
|
||||
setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []);
|
||||
} catch (error) {
|
||||
console.error('Failed to reload tasks:', error);
|
||||
// Fallback to full reload
|
||||
loadDetails();
|
||||
}
|
||||
};
|
||||
reloadTasks();
|
||||
} else if (data.type === 'tasks_added' && data.data) {
|
||||
// Multiple new tasks were added - reload task summaries
|
||||
console.log('tasks_added message received, reloading tasks...', data);
|
||||
const reloadTasks = async () => {
|
||||
try {
|
||||
const taskListResult = await jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame_start:asc' });
|
||||
// Check for superseded sentinel
|
||||
if (taskListResult === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const taskData = taskListResult.data || taskListResult;
|
||||
const taskSummaries = Array.isArray(taskData) ? taskData : [];
|
||||
const tasksForDisplay = taskSummaries.map(summary => ({
|
||||
id: summary.id,
|
||||
job_id: job.id,
|
||||
frame_start: summary.frame_start,
|
||||
frame_end: summary.frame_end,
|
||||
status: summary.status,
|
||||
task_type: summary.task_type,
|
||||
runner_id: summary.runner_id,
|
||||
current_step: null,
|
||||
retry_count: 0,
|
||||
max_retries: 3,
|
||||
created_at: new Date().toISOString(),
|
||||
}));
|
||||
setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []);
|
||||
} catch (error) {
|
||||
console.error('Failed to reload tasks:', error);
|
||||
// Fallback to full reload
|
||||
loadDetails();
|
||||
}
|
||||
};
|
||||
reloadTasks();
|
||||
} else if (data.type === 'file_added' && data.data) {
|
||||
// New file was added - reload file list
|
||||
const reloadFiles = async () => {
|
||||
try {
|
||||
const fileList = await jobs.getFiles(job.id, { limit: 50 });
|
||||
// Check for superseded sentinel
|
||||
if (fileList === REQUEST_SUPERSEDED) {
|
||||
return; // Request was superseded, skip this update
|
||||
}
|
||||
const fileData = fileList.data || fileList;
|
||||
setFiles(Array.isArray(fileData) ? fileData : []);
|
||||
} catch (error) {
|
||||
console.error('Failed to reload files:', error);
|
||||
}
|
||||
};
|
||||
reloadFiles();
|
||||
} else if (data.type === 'step_update' && data.data && data.task_id) {
|
||||
// Step was created or updated - update task data
|
||||
console.log('step_update message received:', data);
|
||||
setTaskData(prev => {
|
||||
const taskId = data.task_id;
|
||||
const current = prev[taskId] || { steps: [], logs: [] };
|
||||
const stepData = data.data;
|
||||
|
||||
// Find if step already exists
|
||||
const existingSteps = current.steps || [];
|
||||
const stepIndex = existingSteps.findIndex(s => s.step_name === stepData.step_name);
|
||||
|
||||
let updatedSteps;
|
||||
if (stepIndex >= 0) {
|
||||
// Update existing step
|
||||
updatedSteps = [...existingSteps];
|
||||
updatedSteps[stepIndex] = {
|
||||
...updatedSteps[stepIndex],
|
||||
...stepData,
|
||||
id: stepData.step_id || updatedSteps[stepIndex].id,
|
||||
};
|
||||
} else {
|
||||
// Add new step
|
||||
updatedSteps = [...existingSteps, {
|
||||
id: stepData.step_id,
|
||||
step_name: stepData.step_name,
|
||||
status: stepData.status,
|
||||
duration_ms: stepData.duration_ms,
|
||||
error_message: stepData.error_message,
|
||||
}];
|
||||
}
|
||||
|
||||
return {
|
||||
...prev,
|
||||
[taskId]: {
|
||||
...current,
|
||||
steps: updatedSteps,
|
||||
}
|
||||
};
|
||||
});
|
||||
} else if (data.type === 'connected') {
|
||||
// Connection established
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to parse WebSocket message:', error);
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = (error) => {
|
||||
console.error('Job WebSocket error:', {
|
||||
error,
|
||||
readyState: ws.readyState,
|
||||
url: ws.url,
|
||||
jobId: job.id,
|
||||
status: jobDetails.status
|
||||
});
|
||||
// WebSocket errors don't provide much detail, but we can check readyState
|
||||
if (ws.readyState === WebSocket.CLOSED || ws.readyState === WebSocket.CLOSING) {
|
||||
console.warn('Job WebSocket is closed or closing, will attempt reconnect');
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = (event) => {
|
||||
console.log('Job WebSocket closed:', {
|
||||
code: event.code,
|
||||
reason: event.reason,
|
||||
wasClean: event.wasClean,
|
||||
jobId: job.id,
|
||||
status: jobDetails.status
|
||||
});
|
||||
jobWsRef.current = null;
|
||||
|
||||
// Code 1006 = Abnormal Closure (connection lost without close frame)
|
||||
// Code 1000 = Normal Closure
|
||||
// Code 1001 = Going Away (server restart, etc.)
|
||||
// We should reconnect for abnormal closures (1006) or unexpected closes
|
||||
const shouldReconnect = !event.wasClean || event.code === 1006 || event.code === 1001;
|
||||
|
||||
// Get current status from state to avoid stale closure
|
||||
const currentStatus = jobDetails.status;
|
||||
const isActiveJob = currentStatus === 'running' || currentStatus === 'pending';
|
||||
|
||||
if (shouldReconnect && isActiveJob) {
|
||||
console.log(`Attempting to reconnect job WebSocket in 2 seconds... (code: ${event.code})`);
|
||||
setTimeout(() => {
|
||||
// Check status again before reconnecting (might have changed)
|
||||
// Use a ref or check the current state directly
|
||||
if ((!jobWsRef.current || jobWsRef.current.readyState === WebSocket.CLOSED)) {
|
||||
// Re-check if job is still active by reading current state
|
||||
// We'll check this in connectJobWebSocket if needed
|
||||
connectJobWebSocket();
|
||||
}
|
||||
}, 2000);
|
||||
} else if (!isActiveJob) {
|
||||
console.log('Job is no longer active, not reconnecting WebSocket');
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Failed to connect job WebSocket:', error);
|
||||
}
|
||||
};
|
||||
|
||||
const startLogStream = (taskIds) => {
|
||||
if (taskIds.length === 0 || streaming) return;
|
||||
|
||||
// Don't start streaming if job is no longer running
|
||||
if (jobDetails.status !== 'running' && jobDetails.status !== 'pending') {
|
||||
console.log('Job is not running, skipping log stream');
|
||||
return;
|
||||
}
|
||||
|
||||
setStreaming(true);
|
||||
// For now, stream the first task's logs (WebSocket supports one task at a time)
|
||||
// In the future, we could have multiple WebSocket connections
|
||||
@@ -219,17 +572,44 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
}
|
||||
};
|
||||
|
||||
ws.onopen = () => {
|
||||
console.log('Log WebSocket connected for task', primaryTaskId);
|
||||
};
|
||||
|
||||
ws.onerror = (error) => {
|
||||
console.error('WebSocket error:', error);
|
||||
console.error('Log WebSocket error:', {
|
||||
error,
|
||||
readyState: ws.readyState,
|
||||
url: ws.url,
|
||||
taskId: primaryTaskId,
|
||||
jobId: job.id
|
||||
});
|
||||
setStreaming(false);
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
ws.onclose = (event) => {
|
||||
console.log('Log WebSocket closed:', {
|
||||
code: event.code,
|
||||
reason: event.reason,
|
||||
wasClean: event.wasClean,
|
||||
taskId: primaryTaskId,
|
||||
jobId: job.id
|
||||
});
|
||||
setStreaming(false);
|
||||
// Auto-reconnect if job is still running
|
||||
if (jobDetails.status === 'running' && taskIds.length > 0) {
|
||||
wsRef.current = null;
|
||||
|
||||
// Code 1006 = Abnormal Closure (connection lost without close frame)
|
||||
// Code 1000 = Normal Closure
|
||||
// Code 1001 = Going Away (server restart, etc.)
|
||||
const shouldReconnect = !event.wasClean || event.code === 1006 || event.code === 1001;
|
||||
|
||||
// Auto-reconnect if job is still running and close was unexpected
|
||||
if (shouldReconnect && jobDetails.status === 'running' && taskIds.length > 0) {
|
||||
console.log(`Attempting to reconnect log WebSocket in 2 seconds... (code: ${event.code})`);
|
||||
setTimeout(() => {
|
||||
if (jobDetails.status === 'running') {
|
||||
// Check status again before reconnecting (might have changed)
|
||||
// The startLogStream function will check if job is still running
|
||||
if (jobDetails.status === 'running' && taskIds.length > 0) {
|
||||
startLogStream(taskIds);
|
||||
}
|
||||
}, 2000);
|
||||
@@ -243,9 +623,39 @@ export default function JobDetails({ job, onClose, onUpdate }) {
|
||||
newExpanded.delete(taskId);
|
||||
} else {
|
||||
newExpanded.add(taskId);
|
||||
// Load data if not already loaded
|
||||
if (!taskData[taskId]) {
|
||||
// Load full task details if we only have summary
|
||||
const tasksArray = Array.isArray(tasks) ? tasks : [];
|
||||
const currentTask = tasksArray.find(t => t.id === taskId);
|
||||
if (currentTask && !currentTask.created_at) {
|
||||
// This is a summary - fetch full task details
|
||||
try {
|
||||
const fullTasks = await jobs.getTasks(job.id, {
|
||||
limit: 1,
|
||||
// We can't filter by task ID, so we'll get all and find the one we need
|
||||
});
|
||||
const taskData = fullTasks.data || fullTasks;
|
||||
const fullTask = Array.isArray(taskData) ? taskData.find(t => t.id === taskId) : null;
|
||||
if (fullTask) {
|
||||
setTasks(prev => {
|
||||
const prevArray = Array.isArray(prev) ? prev : [];
|
||||
return prevArray.map(t => t.id === taskId ? fullTask : t);
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to load full task details:', err);
|
||||
}
|
||||
}
|
||||
// Always load logs/steps when expanding a task to ensure we have the latest data
|
||||
// This is especially important for completed tasks that weren't loaded before
|
||||
const existingData = taskData[taskId];
|
||||
const hasLogs = existingData && existingData.logs && existingData.logs.length > 0;
|
||||
const hasSteps = existingData && existingData.steps && existingData.steps.length > 0;
|
||||
|
||||
if (!hasLogs || !hasSteps) {
|
||||
console.log(`Loading task data for task ${taskId} (logs: ${hasLogs}, steps: ${hasSteps})`);
|
||||
await loadTaskData(taskId);
|
||||
} else {
|
||||
console.log(`Task ${taskId} already has ${existingData.logs.length} logs and ${existingData.steps.length} steps, skipping load`);
|
||||
}
|
||||
}
|
||||
setExpandedTasks(newExpanded);
|
||||
|
||||
Reference in New Issue
Block a user