Implement context archive handling and metadata extraction for render jobs. Add functionality to check for Blender availability, create context archives, and extract metadata from .blend files. Update job creation and retrieval processes to support new metadata structure and context file management. Enhance client-side components to display context files and integrate new API endpoints for context handling.

This commit is contained in:
2025-11-24 10:02:13 -06:00
parent f9ff4d0138
commit a029714e08
13 changed files with 3887 additions and 856 deletions

View File

@@ -6,6 +6,7 @@ import (
"log"
"net/http"
"os"
"os/exec"
"jiggablend/internal/api"
"jiggablend/internal/auth"
@@ -40,6 +41,14 @@ func main() {
log.Fatalf("Failed to initialize storage: %v", err)
}
// Check if Blender is available (required for metadata extraction)
if err := checkBlenderAvailable(); err != nil {
log.Fatalf("Blender is not available: %v\n"+
"The manager requires Blender to be installed and in PATH for metadata extraction.\n"+
"Please install Blender and ensure it's accessible via the 'blender' command.", err)
}
log.Printf("Blender is available")
// Create API server
server, err := api.NewServer(db, authHandler, storageHandler)
if err != nil {
@@ -76,3 +85,14 @@ func getEnv(key, defaultValue string) string {
}
return defaultValue
}
// checkBlenderAvailable checks if Blender is available by running `blender --version`
func checkBlenderAvailable() error {
cmd := exec.Command("blender", "--version")
output, err := cmd.CombinedOutput()
if err != nil {
return fmt.Errorf("failed to run 'blender --version': %w (output: %s)", err, string(output))
}
// If we got here, Blender is available
return nil
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,21 @@
package api
import (
"archive/tar"
"bufio"
"bytes"
"compress/gzip"
"database/sql"
"encoding/json"
"errors"
"fmt"
"io"
"log"
"net/http"
"os"
"os/exec"
"path/filepath"
"strings"
"jiggablend/pkg/types"
)
@@ -156,3 +166,375 @@ func (s *Server) handleGetJobMetadata(w http.ResponseWriter, r *http.Request) {
s.respondJSON(w, http.StatusOK, metadata)
}
// extractMetadataFromContext extracts metadata from the blend file in a context archive
// Returns the extracted metadata or an error
func (s *Server) extractMetadataFromContext(jobID int64) (*types.BlendMetadata, error) {
contextPath := filepath.Join(s.storage.JobPath(jobID), "context.tar.gz")
// Check if context exists
if _, err := os.Stat(contextPath); err != nil {
return nil, fmt.Errorf("context archive not found: %w", err)
}
// Create temporary directory for extraction
tmpDir, err := os.MkdirTemp("", fmt.Sprintf("fuego-metadata-%d-*", jobID))
if err != nil {
return nil, fmt.Errorf("failed to create temporary directory: %w", err)
}
defer os.RemoveAll(tmpDir)
// Extract context archive
if err := s.extractTarGz(contextPath, tmpDir); err != nil {
return nil, fmt.Errorf("failed to extract context: %w", err)
}
// Find .blend file in extracted contents
blendFile := ""
err = filepath.Walk(tmpDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".blend") {
// Check it's not a Blender save file (.blend1, .blend2, etc.)
lower := strings.ToLower(info.Name())
idx := strings.LastIndex(lower, ".blend")
if idx != -1 {
suffix := lower[idx+len(".blend"):]
// If there are digits after .blend, it's a save file
isSaveFile := false
if len(suffix) > 0 {
isSaveFile = true
for _, r := range suffix {
if r < '0' || r > '9' {
isSaveFile = false
break
}
}
}
if !isSaveFile {
blendFile = path
return filepath.SkipAll // Stop walking once we find a blend file
}
}
}
return nil
})
if err != nil {
return nil, fmt.Errorf("failed to find blend file: %w", err)
}
if blendFile == "" {
return nil, fmt.Errorf("no .blend file found in context")
}
// Create Python script to extract metadata
scriptPath := filepath.Join(tmpDir, "extract_metadata.py")
scriptContent := `import bpy
import json
import sys
# Make all file paths relative to the blend file location FIRST
# This must be done immediately after file load, before any other operations
# to prevent Blender from trying to access external files with absolute paths
try:
bpy.ops.file.make_paths_relative()
print("Made all file paths relative to blend file")
except Exception as e:
print(f"Warning: Could not make paths relative: {e}")
# Check for missing addons that the blend file requires
# Blender marks missing addons with "_missing" suffix in preferences
missing_files_info = {
"checked": False,
"has_missing": False,
"missing_files": [],
"missing_addons": []
}
try:
missing = []
for mod in bpy.context.preferences.addons:
if mod.module.endswith("_missing"):
missing.append(mod.module.rsplit("_", 1)[0])
missing_files_info["checked"] = True
if missing:
missing_files_info["has_missing"] = True
missing_files_info["missing_addons"] = missing
print("Missing add-ons required by this .blend:")
for name in missing:
print(" -", name)
else:
print("No missing add-ons detected file is headless-safe")
except Exception as e:
print(f"Warning: Could not check for missing addons: {e}")
missing_files_info["error"] = str(e)
# Get scene
scene = bpy.context.scene
# Extract frame range from scene settings
frame_start = scene.frame_start
frame_end = scene.frame_end
# Also check for actual animation range (keyframes)
# Find the earliest and latest keyframes across all objects
animation_start = None
animation_end = None
for obj in scene.objects:
if obj.animation_data and obj.animation_data.action:
action = obj.animation_data.action
if action.fcurves:
for fcurve in action.fcurves:
if fcurve.keyframe_points:
for keyframe in fcurve.keyframe_points:
frame = int(keyframe.co[0])
if animation_start is None or frame < animation_start:
animation_start = frame
if animation_end is None or frame > animation_end:
animation_end = frame
# Use animation range if available, otherwise use scene frame range
# If scene range seems wrong (start == end), prefer animation range
if animation_start is not None and animation_end is not None:
if frame_start == frame_end or (animation_start < frame_start or animation_end > frame_end):
# Use animation range if scene range is invalid or animation extends beyond it
frame_start = animation_start
frame_end = animation_end
# Extract render settings
render = scene.render
resolution_x = render.resolution_x
resolution_y = render.resolution_y
engine = scene.render.engine.upper()
# Determine output format from file format
output_format = render.image_settings.file_format
# Extract engine-specific settings
engine_settings = {}
if engine == 'CYCLES':
cycles = scene.cycles
engine_settings = {
"samples": getattr(cycles, 'samples', 128),
"use_denoising": getattr(cycles, 'use_denoising', False),
"denoising_radius": getattr(cycles, 'denoising_radius', 0),
"denoising_strength": getattr(cycles, 'denoising_strength', 0.0),
"device": getattr(cycles, 'device', 'CPU'),
"use_adaptive_sampling": getattr(cycles, 'use_adaptive_sampling', False),
"adaptive_threshold": getattr(cycles, 'adaptive_threshold', 0.01) if getattr(cycles, 'use_adaptive_sampling', False) else 0.01,
"use_fast_gi": getattr(cycles, 'use_fast_gi', False),
"light_tree": getattr(cycles, 'use_light_tree', False),
"use_light_linking": getattr(cycles, 'use_light_linking', False),
"caustics_reflective": getattr(cycles, 'caustics_reflective', False),
"caustics_refractive": getattr(cycles, 'caustics_refractive', False),
"blur_glossy": getattr(cycles, 'blur_glossy', 0.0),
"max_bounces": getattr(cycles, 'max_bounces', 12),
"diffuse_bounces": getattr(cycles, 'diffuse_bounces', 4),
"glossy_bounces": getattr(cycles, 'glossy_bounces', 4),
"transmission_bounces": getattr(cycles, 'transmission_bounces', 12),
"volume_bounces": getattr(cycles, 'volume_bounces', 0),
"transparent_max_bounces": getattr(cycles, 'transparent_max_bounces', 8),
"film_transparent": getattr(cycles, 'film_transparent', False),
"use_layer_samples": getattr(cycles, 'use_layer_samples', False),
}
elif engine == 'EEVEE' or engine == 'EEVEE_NEXT':
eevee = scene.eevee
engine_settings = {
"taa_render_samples": getattr(eevee, 'taa_render_samples', 64),
"use_bloom": getattr(eevee, 'use_bloom', False),
"bloom_threshold": getattr(eevee, 'bloom_threshold', 0.8),
"bloom_intensity": getattr(eevee, 'bloom_intensity', 0.05),
"bloom_radius": getattr(eevee, 'bloom_radius', 6.5),
"use_ssr": getattr(eevee, 'use_ssr', True),
"use_ssr_refraction": getattr(eevee, 'use_ssr_refraction', False),
"ssr_quality": getattr(eevee, 'ssr_quality', 'MEDIUM'),
"use_ssao": getattr(eevee, 'use_ssao', True),
"ssao_quality": getattr(eevee, 'ssao_quality', 'MEDIUM'),
"ssao_distance": getattr(eevee, 'ssao_distance', 0.2),
"ssao_factor": getattr(eevee, 'ssao_factor', 1.0),
"use_soft_shadows": getattr(eevee, 'use_soft_shadows', True),
"use_shadow_high_bitdepth": getattr(eevee, 'use_shadow_high_bitdepth', True),
"use_volumetric": getattr(eevee, 'use_volumetric', False),
"volumetric_tile_size": getattr(eevee, 'volumetric_tile_size', '8'),
"volumetric_samples": getattr(eevee, 'volumetric_samples', 64),
"volumetric_start": getattr(eevee, 'volumetric_start', 0.0),
"volumetric_end": getattr(eevee, 'volumetric_end', 100.0),
"use_volumetric_lights": getattr(eevee, 'use_volumetric_lights', True),
"use_volumetric_shadows": getattr(eevee, 'use_volumetric_shadows', True),
"use_gtao": getattr(eevee, 'use_gtao', False),
"gtao_quality": getattr(eevee, 'gtao_quality', 'MEDIUM'),
"use_overscan": getattr(eevee, 'use_overscan', False),
}
else:
# For other engines, extract basic samples if available
engine_settings = {
"samples": getattr(scene, 'samples', 128) if hasattr(scene, 'samples') else 128
}
# Extract scene info
camera_count = len([obj for obj in scene.objects if obj.type == 'CAMERA'])
object_count = len(scene.objects)
material_count = len(bpy.data.materials)
# Build metadata dictionary
metadata = {
"frame_start": frame_start,
"frame_end": frame_end,
"render_settings": {
"resolution_x": resolution_x,
"resolution_y": resolution_y,
"output_format": output_format,
"engine": engine.lower(),
"engine_settings": engine_settings
},
"scene_info": {
"camera_count": camera_count,
"object_count": object_count,
"material_count": material_count
},
"missing_files_info": missing_files_info
}
# Output as JSON
print(json.dumps(metadata))
sys.stdout.flush()
`
if err := os.WriteFile(scriptPath, []byte(scriptContent), 0644); err != nil {
return nil, fmt.Errorf("failed to create extraction script: %w", err)
}
// Execute Blender with Python script
cmd := exec.Command("blender", "-b", blendFile, "--python", scriptPath)
cmd.Dir = tmpDir
// Capture stdout and stderr
stdoutPipe, err := cmd.StdoutPipe()
if err != nil {
return nil, fmt.Errorf("failed to create stdout pipe: %w", err)
}
stderrPipe, err := cmd.StderrPipe()
if err != nil {
return nil, fmt.Errorf("failed to create stderr pipe: %w", err)
}
// Buffer to collect stdout for JSON parsing
var stdoutBuffer bytes.Buffer
// Start the command
if err := cmd.Start(); err != nil {
return nil, fmt.Errorf("failed to start blender: %w", err)
}
// Stream stdout and collect for JSON parsing
stdoutDone := make(chan bool)
go func() {
defer close(stdoutDone)
scanner := bufio.NewScanner(stdoutPipe)
for scanner.Scan() {
line := scanner.Text()
stdoutBuffer.WriteString(line)
stdoutBuffer.WriteString("\n")
}
}()
// Stream stderr (discard for now, but could log if needed)
stderrDone := make(chan bool)
go func() {
defer close(stderrDone)
scanner := bufio.NewScanner(stderrPipe)
for scanner.Scan() {
// Could log stderr if needed
_ = scanner.Text()
}
}()
// Wait for command to complete
err = cmd.Wait()
// Wait for streaming goroutines to finish
<-stdoutDone
<-stderrDone
if err != nil {
return nil, fmt.Errorf("blender metadata extraction failed: %w", err)
}
// Parse output (metadata is printed to stdout)
metadataJSON := strings.TrimSpace(stdoutBuffer.String())
// Extract JSON from output (Blender may print other stuff)
jsonStart := strings.Index(metadataJSON, "{")
jsonEnd := strings.LastIndex(metadataJSON, "}")
if jsonStart == -1 || jsonEnd == -1 || jsonEnd <= jsonStart {
return nil, errors.New("failed to extract JSON from Blender output")
}
metadataJSON = metadataJSON[jsonStart : jsonEnd+1]
var metadata types.BlendMetadata
if err := json.Unmarshal([]byte(metadataJSON), &metadata); err != nil {
return nil, fmt.Errorf("failed to parse metadata JSON: %w", err)
}
log.Printf("Metadata extracted for job %d: frame_start=%d, frame_end=%d", jobID, metadata.FrameStart, metadata.FrameEnd)
return &metadata, nil
}
// extractTarGz extracts a tar.gz archive to a destination directory
func (s *Server) extractTarGz(tarGzPath, destDir string) error {
file, err := os.Open(tarGzPath)
if err != nil {
return fmt.Errorf("failed to open archive: %w", err)
}
defer file.Close()
gzr, err := gzip.NewReader(file)
if err != nil {
return fmt.Errorf("failed to create gzip reader: %w", err)
}
defer gzr.Close()
tr := tar.NewReader(gzr)
for {
header, err := tr.Next()
if err == io.EOF {
break
}
if err != nil {
return fmt.Errorf("failed to read tar header: %w", err)
}
// Sanitize path to prevent directory traversal
target := filepath.Join(destDir, header.Name)
// Ensure target is within destDir
if !strings.HasPrefix(filepath.Clean(target), filepath.Clean(destDir)+string(os.PathSeparator)) {
return fmt.Errorf("invalid file path in archive: %s", header.Name)
}
// Create parent directories
if err := os.MkdirAll(filepath.Dir(target), 0755); err != nil {
return fmt.Errorf("failed to create directory: %w", err)
}
// Write file
if header.Typeflag == tar.TypeReg {
outFile, err := os.Create(target)
if err != nil {
return fmt.Errorf("failed to create file: %w", err)
}
if _, err := io.Copy(outFile, tr); err != nil {
outFile.Close()
return fmt.Errorf("failed to write file: %w", err)
}
outFile.Close()
}
}
return nil
}

View File

@@ -17,7 +17,6 @@ import (
"jiggablend/pkg/types"
"github.com/go-chi/chi/v5"
"github.com/gorilla/websocket"
)
@@ -294,51 +293,37 @@ func (s *Server) handleUpdateTaskStep(w http.ResponseWriter, r *http.Request) {
})
}
// handleDownloadFileForRunner allows runners to download job files
func (s *Server) handleDownloadFileForRunner(w http.ResponseWriter, r *http.Request) {
// handleDownloadJobContext allows runners to download the job context tar.gz
func (s *Server) handleDownloadJobContext(w http.ResponseWriter, r *http.Request) {
jobID, err := parseID(r, "jobId")
if err != nil {
s.respondError(w, http.StatusBadRequest, err.Error())
return
}
// Get the file path from the wildcard parameter (supports subdirectories)
filePathParam := chi.URLParam(r, "*")
if filePathParam == "" {
s.respondError(w, http.StatusBadRequest, "File path not specified")
return
}
// Remove leading slash if present
filePathParam = strings.TrimPrefix(filePathParam, "/")
// Construct the context file path
contextPath := filepath.Join(s.storage.JobPath(jobID), "context.tar.gz")
// Find the file in the database by matching file_name (which stores relative path)
var filePath string
var storedFileName string
err = s.db.QueryRow(
`SELECT file_path, file_name FROM job_files WHERE job_id = ? AND file_name = ?`,
jobID, filePathParam,
).Scan(&filePath, &storedFileName)
if err == sql.ErrNoRows {
s.respondError(w, http.StatusNotFound, "File not found")
return
}
if err != nil {
s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to query file: %v", err))
// Check if context file exists
if !s.storage.FileExists(contextPath) {
log.Printf("Context archive not found for job %d", jobID)
s.respondError(w, http.StatusNotFound, "Context archive not found. The file may not have been uploaded successfully.")
return
}
// Open and serve file
file, err := s.storage.GetFile(filePath)
file, err := s.storage.GetFile(contextPath)
if err != nil {
s.respondError(w, http.StatusNotFound, "File not found on disk")
s.respondError(w, http.StatusNotFound, "Context file not found on disk")
return
}
defer file.Close()
// Use the stored file name for the download (preserves original filename)
downloadFileName := filepath.Base(storedFileName)
w.Header().Set("Content-Type", "application/octet-stream")
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s", downloadFileName))
// Set appropriate headers for tar.gz file
w.Header().Set("Content-Type", "application/gzip")
w.Header().Set("Content-Disposition", "attachment; filename=context.tar.gz")
// Stream the file to the response
io.Copy(w, file)
}
@@ -488,6 +473,43 @@ func (s *Server) handleGetJobFilesForRunner(w http.ResponseWriter, r *http.Reque
s.respondJSON(w, http.StatusOK, files)
}
// handleGetJobMetadataForRunner allows runners to get job metadata
func (s *Server) handleGetJobMetadataForRunner(w http.ResponseWriter, r *http.Request) {
jobID, err := parseID(r, "jobId")
if err != nil {
s.respondError(w, http.StatusBadRequest, err.Error())
return
}
var blendMetadataJSON sql.NullString
err = s.db.QueryRow(
`SELECT blend_metadata FROM jobs WHERE id = ?`,
jobID,
).Scan(&blendMetadataJSON)
if err == sql.ErrNoRows {
s.respondError(w, http.StatusNotFound, "Job not found")
return
}
if err != nil {
s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to query job: %v", err))
return
}
if !blendMetadataJSON.Valid || blendMetadataJSON.String == "" {
s.respondJSON(w, http.StatusOK, nil)
return
}
var metadata types.BlendMetadata
if err := json.Unmarshal([]byte(blendMetadataJSON.String), &metadata); err != nil {
s.respondError(w, http.StatusInternalServerError, "Failed to parse metadata")
return
}
s.respondJSON(w, http.StatusOK, metadata)
}
// WebSocket message types
type WSMessage struct {
Type string `json:"type"`
@@ -1020,7 +1042,7 @@ func (s *Server) updateJobStatusFromTasks(jobID int64) {
log.Printf("Updated job %d status to %s (progress: %.1f%%, completed tasks: %d/%d)", jobID, jobStatus, progress, completedTasks, totalTasks)
}
if outputFormatStr == "MP4" {
if outputFormatStr == "EXR_264_MP4" || outputFormatStr == "EXR_AV1_MP4" {
// Check if a video generation task already exists for this job (any status)
var existingVideoTask int
s.db.QueryRow(
@@ -1603,6 +1625,9 @@ func (s *Server) assignTaskToRunner(runnerID int64, taskID int64) error {
task.JobName = jobName
if outputFormat.Valid {
task.OutputFormat = outputFormat.String
log.Printf("Task %d assigned with output_format: '%s' (from job %d)", taskID, outputFormat.String, task.JobID)
} else {
log.Printf("Task %d assigned with no output_format (job %d)", taskID, task.JobID)
}
task.TaskType = taskType

View File

@@ -116,12 +116,14 @@ func (s *Server) setupRoutes() {
return http.HandlerFunc(s.auth.Middleware(next.ServeHTTP))
})
r.Post("/", s.handleCreateJob)
r.Post("/upload", s.handleUploadFileForJobCreation) // Upload before job creation
r.Get("/", s.handleListJobs)
r.Get("/{id}", s.handleGetJob)
r.Delete("/{id}", s.handleCancelJob)
r.Post("/{id}/delete", s.handleDeleteJob)
r.Post("/{id}/upload", s.handleUploadJobFile)
r.Get("/{id}/files", s.handleListJobFiles)
r.Get("/{id}/context", s.handleListContextArchive)
r.Get("/{id}/files/{fileId}/download", s.handleDownloadJobFile)
r.Get("/{id}/video", s.handleStreamVideo)
r.Get("/{id}/metadata", s.handleGetJobMetadata)
@@ -179,10 +181,11 @@ func (s *Server) setupRoutes() {
})
r.Post("/tasks/{id}/progress", s.handleUpdateTaskProgress)
r.Post("/tasks/{id}/steps", s.handleUpdateTaskStep)
r.Get("/files/{jobId}/*", s.handleDownloadFileForRunner)
r.Get("/jobs/{jobId}/context.tar.gz", s.handleDownloadJobContext)
r.Post("/files/{jobId}/upload", s.handleUploadFileFromRunner)
r.Get("/jobs/{jobId}/status", s.handleGetJobStatusForRunner)
r.Get("/jobs/{jobId}/files", s.handleGetJobFilesForRunner)
r.Get("/jobs/{jobId}/metadata", s.handleGetJobMetadataForRunner)
r.Post("/jobs/{jobId}/metadata", s.handleSubmitMetadata)
})
})
@@ -508,7 +511,7 @@ func parseID(r *http.Request, param string) (int64, error) {
// StartBackgroundTasks starts background goroutines for error recovery
func (s *Server) StartBackgroundTasks() {
go s.recoverStuckTasks()
go s.cleanupOldMetadataJobs()
go s.cleanupOldRenderJobs()
}
// recoverStuckTasks periodically checks for dead runners and stuck tasks

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,9 @@
package storage
import (
"archive/tar"
"archive/zip"
"compress/gzip"
"fmt"
"io"
"os"
@@ -194,3 +196,377 @@ func (s *Storage) ExtractZip(zipPath, destDir string) ([]string, error) {
return extractedFiles, nil
}
// findCommonPrefix finds the common leading directory prefix if all paths share the same first-level directory
// Returns the prefix to strip (with trailing slash) or empty string if no common prefix
func findCommonPrefix(relPaths []string) string {
if len(relPaths) == 0 {
return ""
}
// Get the first path component of each path
firstComponents := make([]string, 0, len(relPaths))
for _, path := range relPaths {
parts := strings.Split(filepath.ToSlash(path), "/")
if len(parts) > 0 && parts[0] != "" {
firstComponents = append(firstComponents, parts[0])
} else {
// If any path is at root level, no common prefix
return ""
}
}
// Check if all first components are the same
if len(firstComponents) == 0 {
return ""
}
commonFirst := firstComponents[0]
for _, comp := range firstComponents {
if comp != commonFirst {
// Not all paths share the same first directory
return ""
}
}
// All paths share the same first directory - return it with trailing slash
return commonFirst + "/"
}
// isBlenderSaveFile checks if a filename is a Blender save file (.blend1, .blend2, etc.)
// Returns true for files like "file.blend1", "file.blend2", but false for "file.blend"
func isBlenderSaveFile(filename string) bool {
lower := strings.ToLower(filename)
// Check if it ends with .blend followed by one or more digits
// Pattern: *.blend[digits]
if !strings.HasSuffix(lower, ".blend") {
// Doesn't end with .blend, check if it ends with .blend + digits
idx := strings.LastIndex(lower, ".blend")
if idx == -1 {
return false
}
// Check if there are digits after .blend
suffix := lower[idx+len(".blend"):]
if len(suffix) == 0 {
return false
}
// All remaining characters must be digits
for _, r := range suffix {
if r < '0' || r > '9' {
return false
}
}
return true
}
// Ends with .blend exactly - this is a regular blend file, not a save file
return false
}
// CreateJobContext creates a tar.gz archive containing all job input files
// Filters out Blender save files (.blend1, .blend2, etc.)
// Uses temporary directories and streaming to handle large files efficiently
func (s *Storage) CreateJobContext(jobID int64) (string, error) {
jobPath := s.JobPath(jobID)
contextPath := filepath.Join(jobPath, "context.tar.gz")
// Create temporary directory for staging
tmpDir, err := os.MkdirTemp("", "fuego-context-*")
if err != nil {
return "", fmt.Errorf("failed to create temporary directory: %w", err)
}
defer os.RemoveAll(tmpDir)
// Collect all files from job directory, excluding the context file itself and Blender save files
var filesToInclude []string
err = filepath.Walk(jobPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
// Skip directories
if info.IsDir() {
return nil
}
// Skip the context file itself if it exists
if path == contextPath {
return nil
}
// Skip Blender save files
if isBlenderSaveFile(info.Name()) {
return nil
}
// Get relative path from job directory
relPath, err := filepath.Rel(jobPath, path)
if err != nil {
return err
}
// Sanitize path - ensure it doesn't escape the job directory
cleanRelPath := filepath.Clean(relPath)
if strings.HasPrefix(cleanRelPath, "..") {
return fmt.Errorf("invalid file path: %s", relPath)
}
filesToInclude = append(filesToInclude, path)
return nil
})
if err != nil {
return "", fmt.Errorf("failed to walk job directory: %w", err)
}
if len(filesToInclude) == 0 {
return "", fmt.Errorf("no files found to include in context")
}
// Create the tar.gz file using streaming
contextFile, err := os.Create(contextPath)
if err != nil {
return "", fmt.Errorf("failed to create context file: %w", err)
}
defer contextFile.Close()
gzWriter := gzip.NewWriter(contextFile)
defer gzWriter.Close()
tarWriter := tar.NewWriter(gzWriter)
defer tarWriter.Close()
// Add each file to the tar archive
for _, filePath := range filesToInclude {
file, err := os.Open(filePath)
if err != nil {
return "", fmt.Errorf("failed to open file %s: %w", filePath, err)
}
info, err := file.Stat()
if err != nil {
file.Close()
return "", fmt.Errorf("failed to stat file %s: %w", filePath, err)
}
// Get relative path for tar header
relPath, err := filepath.Rel(jobPath, filePath)
if err != nil {
file.Close()
return "", fmt.Errorf("failed to get relative path for %s: %w", filePath, err)
}
// Normalize path separators for tar (use forward slashes)
tarPath := filepath.ToSlash(relPath)
// Create tar header
header, err := tar.FileInfoHeader(info, "")
if err != nil {
file.Close()
return "", fmt.Errorf("failed to create tar header for %s: %w", filePath, err)
}
header.Name = tarPath
// Write header
if err := tarWriter.WriteHeader(header); err != nil {
file.Close()
return "", fmt.Errorf("failed to write tar header for %s: %w", filePath, err)
}
// Copy file contents using streaming
if _, err := io.Copy(tarWriter, file); err != nil {
file.Close()
return "", fmt.Errorf("failed to write file %s to tar: %w", filePath, err)
}
file.Close()
}
// Ensure all data is flushed
if err := tarWriter.Close(); err != nil {
return "", fmt.Errorf("failed to close tar writer: %w", err)
}
if err := gzWriter.Close(); err != nil {
return "", fmt.Errorf("failed to close gzip writer: %w", err)
}
if err := contextFile.Close(); err != nil {
return "", fmt.Errorf("failed to close context file: %w", err)
}
return contextPath, nil
}
// CreateJobContextFromDir creates a context archive (tar.gz) from files in a source directory
// This is used during upload to immediately create the context archive as the primary artifact
// excludeFiles is a set of relative paths (from sourceDir) to exclude from the context
func (s *Storage) CreateJobContextFromDir(sourceDir string, jobID int64, excludeFiles ...string) (string, error) {
jobPath := s.JobPath(jobID)
contextPath := filepath.Join(jobPath, "context.tar.gz")
// Ensure job directory exists
if err := os.MkdirAll(jobPath, 0755); err != nil {
return "", fmt.Errorf("failed to create job directory: %w", err)
}
// Build set of files to exclude (normalize paths)
excludeSet := make(map[string]bool)
for _, excludeFile := range excludeFiles {
// Normalize the exclude path
excludePath := filepath.Clean(excludeFile)
excludeSet[excludePath] = true
// Also add with forward slash for cross-platform compatibility
excludeSet[filepath.ToSlash(excludePath)] = true
}
// Collect all files from source directory, excluding Blender save files and excluded files
var filesToInclude []string
err := filepath.Walk(sourceDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
// Skip directories
if info.IsDir() {
return nil
}
// Skip Blender save files
if isBlenderSaveFile(info.Name()) {
return nil
}
// Get relative path from source directory
relPath, err := filepath.Rel(sourceDir, path)
if err != nil {
return err
}
// Sanitize path - ensure it doesn't escape the source directory
cleanRelPath := filepath.Clean(relPath)
if strings.HasPrefix(cleanRelPath, "..") {
return fmt.Errorf("invalid file path: %s", relPath)
}
// Check if this file should be excluded
if excludeSet[cleanRelPath] || excludeSet[filepath.ToSlash(cleanRelPath)] {
return nil
}
filesToInclude = append(filesToInclude, path)
return nil
})
if err != nil {
return "", fmt.Errorf("failed to walk source directory: %w", err)
}
if len(filesToInclude) == 0 {
return "", fmt.Errorf("no files found to include in context archive")
}
// Collect relative paths to find common prefix
relPaths := make([]string, 0, len(filesToInclude))
for _, filePath := range filesToInclude {
relPath, err := filepath.Rel(sourceDir, filePath)
if err != nil {
return "", fmt.Errorf("failed to get relative path for %s: %w", filePath, err)
}
relPaths = append(relPaths, relPath)
}
// Find and strip common leading directory if all files share one
commonPrefix := findCommonPrefix(relPaths)
// Validate that there's exactly one .blend file at the root level after prefix stripping
blendFilesAtRoot := 0
for _, relPath := range relPaths {
tarPath := filepath.ToSlash(relPath)
// Strip common prefix if present
if commonPrefix != "" && strings.HasPrefix(tarPath, commonPrefix) {
tarPath = strings.TrimPrefix(tarPath, commonPrefix)
}
// Check if it's a .blend file at root (no path separators after prefix stripping)
if strings.HasSuffix(strings.ToLower(tarPath), ".blend") {
// Check if it's at root level (no directory separators)
if !strings.Contains(tarPath, "/") {
blendFilesAtRoot++
}
}
}
if blendFilesAtRoot == 0 {
return "", fmt.Errorf("no .blend file found at root level in context archive")
}
if blendFilesAtRoot > 1 {
return "", fmt.Errorf("multiple .blend files found at root level in context archive (found %d, expected 1)", blendFilesAtRoot)
}
// Create the tar.gz file using streaming
contextFile, err := os.Create(contextPath)
if err != nil {
return "", fmt.Errorf("failed to create context file: %w", err)
}
defer contextFile.Close()
gzWriter := gzip.NewWriter(contextFile)
defer gzWriter.Close()
tarWriter := tar.NewWriter(gzWriter)
defer tarWriter.Close()
// Add each file to the tar archive
for i, filePath := range filesToInclude {
file, err := os.Open(filePath)
if err != nil {
return "", fmt.Errorf("failed to open file %s: %w", filePath, err)
}
info, err := file.Stat()
if err != nil {
file.Close()
return "", fmt.Errorf("failed to stat file %s: %w", filePath, err)
}
// Get relative path and strip common prefix if present
relPath := relPaths[i]
tarPath := filepath.ToSlash(relPath)
// Strip common prefix if found
if commonPrefix != "" && strings.HasPrefix(tarPath, commonPrefix) {
tarPath = strings.TrimPrefix(tarPath, commonPrefix)
}
// Create tar header
header, err := tar.FileInfoHeader(info, "")
if err != nil {
file.Close()
return "", fmt.Errorf("failed to create tar header for %s: %w", filePath, err)
}
header.Name = tarPath
// Write header
if err := tarWriter.WriteHeader(header); err != nil {
file.Close()
return "", fmt.Errorf("failed to write tar header for %s: %w", filePath, err)
}
// Copy file contents using streaming
if _, err := io.Copy(tarWriter, file); err != nil {
file.Close()
return "", fmt.Errorf("failed to write file %s to tar: %w", filePath, err)
}
file.Close()
}
// Ensure all data is flushed
if err := tarWriter.Close(); err != nil {
return "", fmt.Errorf("failed to close tar writer: %w", err)
}
if err := gzWriter.Close(); err != nil {
return "", fmt.Errorf("failed to close gzip writer: %w", err)
}
if err := contextFile.Close(); err != nil {
return "", fmt.Errorf("failed to close context file: %w", err)
}
return contextPath, nil
}

View File

@@ -27,15 +27,14 @@ const (
type JobType string
const (
JobTypeMetadata JobType = "metadata" // Metadata extraction job - only needs blend file
JobTypeRender JobType = "render" // Render job - needs frame range, format, etc.
JobTypeRender JobType = "render" // Render job - needs frame range, format, etc.
)
// Job represents a job (metadata extraction or render)
// Job represents a render job
type Job struct {
ID int64 `json:"id"`
UserID int64 `json:"user_id"`
JobType JobType `json:"job_type"` // "metadata" or "render"
JobType JobType `json:"job_type"` // "render"
Name string `json:"name"`
Status JobStatus `json:"status"`
Progress float64 `json:"progress"` // 0.0 to 100.0
@@ -133,13 +132,14 @@ type JobFile struct {
// CreateJobRequest represents a request to create a new job
type CreateJobRequest struct {
JobType JobType `json:"job_type"` // "metadata" or "render"
Name string `json:"name"`
FrameStart *int `json:"frame_start,omitempty"` // Required for render jobs
FrameEnd *int `json:"frame_end,omitempty"` // Required for render jobs
OutputFormat *string `json:"output_format,omitempty"` // Required for render jobs
AllowParallelRunners *bool `json:"allow_parallel_runners,omitempty"` // Optional for render jobs, defaults to true
MetadataJobID *int64 `json:"metadata_job_id,omitempty"` // Optional: ID of metadata job to copy input files from
JobType JobType `json:"job_type"` // "render"
Name string `json:"name"`
FrameStart *int `json:"frame_start,omitempty"` // Required for render jobs
FrameEnd *int `json:"frame_end,omitempty"` // Required for render jobs
OutputFormat *string `json:"output_format,omitempty"` // Required for render jobs
AllowParallelRunners *bool `json:"allow_parallel_runners,omitempty"` // Optional for render jobs, defaults to true
RenderSettings *RenderSettings `json:"render_settings,omitempty"` // Optional: Override blend file render settings
UploadSessionID *string `json:"upload_session_id,omitempty"` // Optional: Session ID from file upload
}
// UpdateJobProgressRequest represents a request to update job progress
@@ -225,19 +225,30 @@ type TaskLogEntry struct {
// BlendMetadata represents extracted metadata from a blend file
type BlendMetadata struct {
FrameStart int `json:"frame_start"`
FrameEnd int `json:"frame_end"`
RenderSettings RenderSettings `json:"render_settings"`
SceneInfo SceneInfo `json:"scene_info"`
FrameStart int `json:"frame_start"`
FrameEnd int `json:"frame_end"`
RenderSettings RenderSettings `json:"render_settings"`
SceneInfo SceneInfo `json:"scene_info"`
MissingFilesInfo *MissingFilesInfo `json:"missing_files_info,omitempty"`
}
// MissingFilesInfo represents information about missing files/addons
type MissingFilesInfo struct {
Checked bool `json:"checked"`
HasMissing bool `json:"has_missing"`
MissingFiles []string `json:"missing_files,omitempty"`
MissingAddons []string `json:"missing_addons,omitempty"`
Error string `json:"error,omitempty"`
}
// RenderSettings represents render settings from a blend file
type RenderSettings struct {
ResolutionX int `json:"resolution_x"`
ResolutionY int `json:"resolution_y"`
Samples int `json:"samples"`
OutputFormat string `json:"output_format"`
Engine string `json:"engine"`
ResolutionX int `json:"resolution_x"`
ResolutionY int `json:"resolution_y"`
Samples int `json:"samples,omitempty"` // Deprecated, use EngineSettings
OutputFormat string `json:"output_format"`
Engine string `json:"engine"`
EngineSettings map[string]interface{} `json:"engine_settings,omitempty"`
}
// SceneInfo represents scene information from a blend file

View File

@@ -0,0 +1,154 @@
import { useState } from 'react';
export default function FileExplorer({ files, onDownload, onPreview, isImageFile }) {
const [expandedPaths, setExpandedPaths] = useState(new Set());
// Build directory tree from file paths
const buildTree = (files) => {
const tree = {};
files.forEach(file => {
const path = file.file_name;
// Handle both paths with slashes and single filenames
const parts = path.includes('/') ? path.split('/').filter(p => p) : [path];
// If it's a single file at root (no slashes), treat it specially
if (parts.length === 1 && !path.includes('/')) {
tree[parts[0]] = {
name: parts[0],
isFile: true,
file: file,
children: {},
path: parts[0]
};
return;
}
let current = tree;
parts.forEach((part, index) => {
if (!current[part]) {
current[part] = {
name: part,
isFile: index === parts.length - 1,
file: index === parts.length - 1 ? file : null,
children: {},
path: parts.slice(0, index + 1).join('/')
};
}
current = current[part].children;
});
});
return tree;
};
const togglePath = (path) => {
const newExpanded = new Set(expandedPaths);
if (newExpanded.has(path)) {
newExpanded.delete(path);
} else {
newExpanded.add(path);
}
setExpandedPaths(newExpanded);
};
const renderTree = (node, level = 0, parentPath = '') => {
const items = Object.values(node).sort((a, b) => {
// Directories first, then files
if (a.isFile !== b.isFile) {
return a.isFile ? 1 : -1;
}
return a.name.localeCompare(b.name);
});
return items.map((item) => {
const fullPath = parentPath ? `${parentPath}/${item.name}` : item.name;
const isExpanded = expandedPaths.has(fullPath);
const indent = level * 20;
if (item.isFile) {
const file = item.file;
const isImage = isImageFile && isImageFile(file.file_name);
const sizeMB = (file.file_size / 1024 / 1024).toFixed(2);
const isArchive = file.file_name.endsWith('.tar.gz') || file.file_name.endsWith('.zip');
return (
<div key={fullPath} className="flex items-center justify-between py-1.5 hover:bg-gray-800/50 rounded px-2" style={{ paddingLeft: `${indent + 8}px` }}>
<div className="flex items-center gap-2 flex-1 min-w-0">
<span className="text-gray-500 text-sm">{isArchive ? '📦' : '📄'}</span>
<span className="text-gray-200 text-sm truncate" title={item.name}>
{item.name}
</span>
<span className="text-gray-500 text-xs ml-2">{sizeMB} MB</span>
</div>
<div className="flex gap-2 ml-4 shrink-0">
{isImage && onPreview && (
<button
onClick={() => onPreview(file)}
className="px-2 py-1 bg-blue-600 text-white rounded text-xs hover:bg-blue-500 transition-colors"
title="Preview"
>
👁
</button>
)}
{onDownload && file.id && (
<button
onClick={() => onDownload(file.id, file.file_name)}
className="px-2 py-1 bg-orange-600 text-white rounded text-xs hover:bg-orange-500 transition-colors"
title="Download"
>
</button>
)}
</div>
</div>
);
} else {
const hasChildren = Object.keys(item.children).length > 0;
return (
<div key={fullPath}>
<div
className="flex items-center gap-2 py-1 hover:bg-gray-800/50 rounded px-2 cursor-pointer"
style={{ paddingLeft: `${indent + 8}px` }}
onClick={() => hasChildren && togglePath(fullPath)}
>
<span className="text-gray-500 text-sm">
{hasChildren ? (isExpanded ? '📂' : '📁') : '📁'}
</span>
<span className="text-gray-300 text-sm font-medium">{item.name}</span>
{hasChildren && (
<span className="text-gray-500 text-xs ml-2">
({Object.keys(item.children).length})
</span>
)}
</div>
{hasChildren && isExpanded && (
<div>
{renderTree(item.children, level + 1, fullPath)}
</div>
)}
</div>
);
}
});
};
const tree = buildTree(files);
if (Object.keys(tree).length === 0) {
return (
<div className="text-gray-400 text-sm py-4 text-center">
No files
</div>
);
}
return (
<div className="bg-gray-900 rounded-lg border border-gray-700 p-3">
<div className="space-y-1">
{renderTree(tree)}
</div>
</div>
);
}

View File

@@ -1,10 +1,12 @@
import { useState, useEffect, useRef } from 'react';
import { jobs } from '../utils/api';
import VideoPlayer from './VideoPlayer';
import FileExplorer from './FileExplorer';
export default function JobDetails({ job, onClose, onUpdate }) {
const [jobDetails, setJobDetails] = useState(job);
const [files, setFiles] = useState([]);
const [contextFiles, setContextFiles] = useState([]);
const [tasks, setTasks] = useState([]);
const [loading, setLoading] = useState(true);
const [videoUrl, setVideoUrl] = useState(null);
@@ -89,6 +91,15 @@ export default function JobDetails({ job, onClose, onUpdate }) {
setJobDetails(details);
setFiles(fileList);
setTasks(taskList);
// Fetch context archive contents separately (may not exist for old jobs)
try {
const contextList = await jobs.getContextArchive(job.id);
setContextFiles(contextList || []);
} catch (error) {
// Context archive may not exist for old jobs
setContextFiles([]);
}
// Only load task data (logs/steps) for tasks that don't have data yet
// This prevents overwriting logs that are being streamed via WebSocket
@@ -446,7 +457,7 @@ export default function JobDetails({ job, onClose, onUpdate }) {
</div>
</div>
{videoUrl && jobDetails.output_format === 'MP4' && (
{videoUrl && (jobDetails.output_format === 'EXR_264_MP4' || jobDetails.output_format === 'EXR_AV1_MP4') && (
<div>
<h3 className="text-lg font-semibold text-gray-100 mb-3">
Video Preview
@@ -455,68 +466,38 @@ export default function JobDetails({ job, onClose, onUpdate }) {
</div>
)}
{contextFiles.length > 0 && (
<div>
<h3 className="text-lg font-semibold text-gray-100 mb-3">
Context Archive
</h3>
<FileExplorer
files={contextFiles.map(f => ({
id: 0, // Context files don't have IDs
file_name: f.path || f.name || '',
file_size: f.size || 0,
file_type: 'input'
}))}
onDownload={null} // Context files can't be downloaded individually
isImageFile={isImageFile}
/>
</div>
)}
{outputFiles.length > 0 && (
<div>
<h3 className="text-lg font-semibold text-gray-100 mb-3">
Output Files
</h3>
<div className="space-y-2">
{outputFiles.map((file) => {
const isImage = isImageFile(file.file_name);
const imageUrl = isImage ? jobs.downloadFile(job.id, file.id) : null;
return (
<div
key={file.id}
className="flex items-center justify-between p-3 bg-gray-900 rounded-lg border border-gray-700"
>
<div className="flex-1">
<p className="font-medium text-gray-100">{file.file_name}</p>
<p className="text-sm text-gray-400">
{(file.file_size / 1024 / 1024).toFixed(2)} MB
</p>
</div>
<div className="flex gap-2">
{isImage && imageUrl && (
<button
onClick={() => setPreviewImage({ url: imageUrl, fileName: file.file_name })}
className="px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-500 transition-colors"
>
Preview
</button>
)}
<button
onClick={() => handleDownload(file.id, file.file_name)}
className="px-4 py-2 bg-orange-600 text-white rounded-lg hover:bg-orange-500 transition-colors"
>
Download
</button>
</div>
</div>
);
})}
</div>
</div>
)}
{inputFiles.length > 0 && (
<div>
<h3 className="text-lg font-semibold text-gray-100 mb-3">
Input Files
</h3>
<div className="space-y-2">
{inputFiles.map((file) => (
<div
key={file.id}
className="p-3 bg-gray-900 rounded-lg border border-gray-700"
>
<p className="font-medium text-gray-100">{file.file_name}</p>
<p className="text-sm text-gray-400">
{(file.file_size / 1024 / 1024).toFixed(2)} MB
</p>
</div>
))}
</div>
<FileExplorer
files={outputFiles}
onDownload={handleDownload}
onPreview={(file) => {
const imageUrl = jobs.downloadFile(job.id, file.id);
setPreviewImage({ url: imageUrl, fileName: file.file_name });
}}
isImageFile={isImageFile}
/>
</div>
)}

View File

@@ -24,6 +24,22 @@ export default function JobList() {
}
};
// Keep selectedJob in sync with the job list when it refreshes
// This prevents the selected job from becoming stale when format selection or other actions trigger updates
useEffect(() => {
if (selectedJob && jobList.length > 0) {
const freshJob = jobList.find(j => j.id === selectedJob.id);
if (freshJob) {
// Update to the fresh object from the list to keep it in sync
setSelectedJob(freshJob);
} else {
// Job was deleted or no longer exists, clear selection
setSelectedJob(null);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [jobList]); // Only depend on jobList, not selectedJob to avoid infinite loops
const handleCancel = async (jobId) => {
if (!confirm('Are you sure you want to cancel this job?')) return;
try {

View File

@@ -10,13 +10,16 @@ export default function JobSubmission({ onSuccess }) {
frame_end: 10,
output_format: 'PNG',
allow_parallel_runners: true,
render_settings: null, // Will contain engine settings
});
const [showAdvancedSettings, setShowAdvancedSettings] = useState(false);
const [file, setFile] = useState(null);
const [submitting, setSubmitting] = useState(false);
const [error, setError] = useState('');
const [metadataStatus, setMetadataStatus] = useState(null); // 'extracting', 'completed', 'error'
const [metadata, setMetadata] = useState(null);
const [currentJobId, setCurrentJobId] = useState(null);
const [uploadSessionId, setUploadSessionId] = useState(null); // Session ID from file upload
const [createdJob, setCreatedJob] = useState(null);
const [uploadProgress, setUploadProgress] = useState(0);
const [isUploading, setIsUploading] = useState(false);
@@ -28,121 +31,15 @@ export default function JobSubmission({ onSuccess }) {
const isCompletedRef = useRef(false);
const currentJobIdRef = useRef(null);
const cleanupRef = useRef(null);
// Poll for metadata after file upload
const formatManuallyChangedRef = useRef(false); // Track if user manually changed output format
const stepRef = useRef(step); // Track current step to avoid stale closures
// Keep stepRef in sync with step state
useEffect(() => {
if (!currentJobId || metadataStatus !== 'extracting') {
// Reset refs when not extracting
isCancelledRef.current = false;
isCompletedRef.current = false;
currentJobIdRef.current = null;
// Clear any pending cleanup
if (cleanupRef.current) {
cleanupRef.current = null;
}
return;
}
stepRef.current = step;
}, [step]);
// Reset refs for new job
if (currentJobIdRef.current !== currentJobId) {
isCancelledRef.current = false;
isCompletedRef.current = false;
currentJobIdRef.current = currentJobId;
}
let pollCount = 0;
const maxPolls = 30; // 60 seconds max (30 * 2 seconds)
let timeoutId = null;
let interval = null;
let mounted = true; // Track if effect is still mounted
const pollMetadata = async () => {
if (!mounted || isCancelledRef.current || isCompletedRef.current) return;
pollCount++;
// Stop polling after timeout
if (pollCount > maxPolls) {
if (!mounted) return;
setMetadataStatus('error');
// Cancel temp job on timeout
try {
await jobs.cancel(currentJobId);
isCancelledRef.current = true;
} catch (err) {
// Ignore errors when canceling
}
return;
}
try {
const metadata = await jobs.getMetadata(currentJobId);
if (metadata && mounted) {
setMetadata(metadata);
setMetadataStatus('completed');
isCompletedRef.current = true; // Mark as completed
// Auto-populate form fields
setFormData(prev => ({
...prev,
frame_start: metadata.frame_start || prev.frame_start,
frame_end: metadata.frame_end || prev.frame_end,
output_format: metadata.render_settings?.output_format || prev.output_format,
}));
// Stop polling on success
if (interval) clearInterval(interval);
if (timeoutId) clearTimeout(timeoutId);
}
} catch (err) {
if (!mounted) return;
// Metadata not ready yet, continue polling (only if 404/not found)
if (err.message.includes('404') || err.message.includes('not found')) {
// Continue polling via interval
} else {
setMetadataStatus('error');
// Stop polling on error
if (interval) clearInterval(interval);
if (timeoutId) clearTimeout(timeoutId);
}
}
};
interval = setInterval(pollMetadata, 2000);
// Set timeout to stop polling after 60 seconds
timeoutId = setTimeout(() => {
if (!mounted) return;
if (interval) clearInterval(interval);
if (!isCancelledRef.current && !isCompletedRef.current) {
setMetadataStatus('error');
// Cancel temp job on timeout
jobs.cancel(currentJobId).catch(() => {});
isCancelledRef.current = true;
}
}, 60000);
// Store cleanup function in ref so we can check if it should run
cleanupRef.current = () => {
mounted = false;
if (interval) clearInterval(interval);
if (timeoutId) clearTimeout(timeoutId);
// DO NOT cancel the job in cleanup - let it run to completion
// The job will be cleaned up when the user submits the actual job or navigates away
};
return cleanupRef.current;
}, [currentJobId, metadataStatus]); // Include metadataStatus to properly track state changes
// Separate effect to handle component unmount - only cancel if truly unmounting
useEffect(() => {
return () => {
// Only cancel on actual component unmount, not on effect re-run
// Check if we're still extracting and haven't completed
if (currentJobIdRef.current && !isCompletedRef.current && !isCancelledRef.current && metadataStatus === 'extracting') {
// Only cancel if we're actually unmounting (not just re-rendering)
// This is a last resort - ideally we should let metadata extraction complete
jobs.cancel(currentJobIdRef.current).catch(() => {});
}
};
}, []); // Empty deps - only runs on mount/unmount
// No polling needed - metadata is extracted synchronously during upload
const handleFileChange = async (e) => {
const selectedFile = e.target.files[0];
@@ -155,36 +52,35 @@ export default function JobSubmission({ onSuccess }) {
setMetadataStatus(null);
setMetadata(null);
setCurrentJobId(null);
setUploadSessionId(null);
setUploadProgress(0);
setBlendFiles([]);
setSelectedMainBlend('');
formatManuallyChangedRef.current = false; // Reset when new file is selected
const isBlend = selectedFile.name.toLowerCase().endsWith('.blend');
const isZip = selectedFile.name.toLowerCase().endsWith('.zip');
// If it's a blend file or ZIP, create a temporary job to extract metadata
// If it's a blend file or ZIP, upload and extract metadata
if (isBlend || isZip) {
try {
setIsUploading(true);
setUploadProgress(0);
// Create a temporary job for metadata extraction
const tempJob = await jobs.create({
job_type: 'metadata',
name: 'Metadata Extraction',
});
setCurrentJobId(tempJob.id);
setMetadataStatus('extracting');
// Upload file to trigger metadata extraction with progress tracking
const result = await jobs.uploadFile(tempJob.id, selectedFile, (progress) => {
// Upload file to new endpoint (no job required)
const result = await jobs.uploadFileForJobCreation(selectedFile, (progress) => {
setUploadProgress(progress);
}, selectedMainBlend || undefined);
setUploadProgress(100);
setIsUploading(false);
// Store session ID for later use when creating the job
if (result.session_id) {
setUploadSessionId(result.session_id);
}
// Check if ZIP extraction found multiple blend files
if (result.zip_extracted && result.blend_files && result.blend_files.length > 1) {
setBlendFiles(result.blend_files);
@@ -192,21 +88,55 @@ export default function JobSubmission({ onSuccess }) {
return;
}
// If metadata was extracted, use it
if (result.metadata_extracted && result.metadata) {
setMetadata(result.metadata);
setMetadataStatus('completed');
isCompletedRef.current = true;
// Auto-populate form fields
let normalizedFormat = result.metadata.render_settings?.output_format;
if (normalizedFormat) {
const formatMap = {
'OPEN_EXR': 'EXR',
'EXR': 'EXR',
'PNG': 'PNG',
'JPEG': 'JPEG',
'JPG': 'JPEG',
};
normalizedFormat = formatMap[normalizedFormat.toUpperCase()] || normalizedFormat;
}
setFormData(prev => ({
...prev,
frame_start: result.metadata.frame_start || prev.frame_start,
frame_end: result.metadata.frame_end || prev.frame_end,
output_format: normalizedFormat || prev.output_format,
render_settings: result.metadata.render_settings ? {
...result.metadata.render_settings,
engine_settings: result.metadata.render_settings.engine_settings || {},
} : null,
}));
} else {
setMetadataStatus('error');
}
// If main blend file was auto-detected or specified, continue
if (result.main_blend_file) {
setSelectedMainBlend(result.main_blend_file);
}
} catch (err) {
console.error('Failed to start metadata extraction:', err);
console.error('Failed to upload file and extract metadata:', err);
setMetadataStatus('error');
setIsUploading(false);
setUploadProgress(0);
setError(err.message || 'Failed to upload file and extract metadata');
}
}
};
const handleBlendFileSelect = async () => {
if (!selectedMainBlend || !currentJobId) {
if (!selectedMainBlend || !file) {
setError('Please select a main blend file');
return;
}
@@ -214,20 +144,59 @@ export default function JobSubmission({ onSuccess }) {
try {
setIsUploading(true);
setUploadProgress(0);
setMetadataStatus('extracting');
// Re-upload with selected main blend file
const result = await jobs.uploadFile(currentJobId, file, (progress) => {
const result = await jobs.uploadFileForJobCreation(file, (progress) => {
setUploadProgress(progress);
}, selectedMainBlend);
setUploadProgress(100);
setIsUploading(false);
setBlendFiles([]);
setMetadataStatus('extracting');
// Store session ID
if (result.session_id) {
setUploadSessionId(result.session_id);
}
// If metadata was extracted, use it
if (result.metadata_extracted && result.metadata) {
setMetadata(result.metadata);
setMetadataStatus('completed');
isCompletedRef.current = true;
// Auto-populate form fields
let normalizedFormat = result.metadata.render_settings?.output_format;
if (normalizedFormat) {
const formatMap = {
'OPEN_EXR': 'EXR',
'EXR': 'EXR',
'PNG': 'PNG',
'JPEG': 'JPEG',
'JPG': 'JPEG',
};
normalizedFormat = formatMap[normalizedFormat.toUpperCase()] || normalizedFormat;
}
setFormData(prev => ({
...prev,
frame_start: result.metadata.frame_start || prev.frame_start,
frame_end: result.metadata.frame_end || prev.frame_end,
output_format: normalizedFormat || prev.output_format,
render_settings: result.metadata.render_settings ? {
...result.metadata.render_settings,
engine_settings: result.metadata.render_settings.engine_settings || {},
} : null,
}));
} else {
setMetadataStatus('error');
}
} catch (err) {
console.error('Failed to upload with selected blend file:', err);
setError(err.message || 'Failed to upload with selected blend file');
setIsUploading(false);
setMetadataStatus('error');
}
};
@@ -251,20 +220,23 @@ export default function JobSubmission({ onSuccess }) {
throw new Error('Please select a Blender file');
}
if (!uploadSessionId) {
throw new Error('File upload session not found. Please upload the file again.');
}
if (formData.frame_start < 0 || formData.frame_end < formData.frame_start) {
throw new Error('Invalid frame range');
}
// If we have a temporary job for metadata extraction, cancel it
if (currentJobId) {
try {
await jobs.cancel(currentJobId);
} catch (err) {
// Ignore errors when canceling temp job
}
}
// Create render job with upload session ID if we have one
const renderSettings = formData.render_settings && formData.render_settings.engine_settings ? {
engine: formData.render_settings.engine || 'cycles',
resolution_x: formData.render_settings.resolution_x || 1920,
resolution_y: formData.render_settings.resolution_y || 1080,
engine_settings: formData.render_settings.engine_settings,
} : null;
// Create actual render job, linking it to the metadata job if we have one
console.log('Submitting job with output_format:', formData.output_format, 'formatManuallyChanged:', formatManuallyChangedRef.current);
const job = await jobs.create({
job_type: 'render',
name: formData.name,
@@ -272,12 +244,10 @@ export default function JobSubmission({ onSuccess }) {
frame_end: parseInt(formData.frame_end),
output_format: formData.output_format,
allow_parallel_runners: formData.allow_parallel_runners,
metadata_job_id: currentJobId || undefined, // Link to metadata job to copy input files
render_settings: renderSettings,
upload_session_id: uploadSessionId || undefined, // Pass session ID to move context archive
});
// Note: File is already uploaded to metadata job, so we don't need to upload again
// The backend will copy the file reference from the metadata job
// Fetch the full job details
const jobDetails = await jobs.get(job.id);
@@ -298,11 +268,14 @@ export default function JobSubmission({ onSuccess }) {
frame_end: 10,
output_format: 'PNG',
allow_parallel_runners: true,
render_settings: null,
});
setShowAdvancedSettings(false);
setFile(null);
setMetadata(null);
setMetadataStatus(null);
setCurrentJobId(null);
formatManuallyChangedRef.current = false;
setStep(1);
if (onSuccess) {
onSuccess();
@@ -427,7 +400,12 @@ export default function JobSubmission({ onSuccess }) {
<div>Frames: {metadata.frame_start} - {metadata.frame_end}</div>
<div>Resolution: {metadata.render_settings?.resolution_x} x {metadata.render_settings?.resolution_y}</div>
<div>Engine: {metadata.render_settings?.engine}</div>
<div>Samples: {metadata.render_settings?.samples}</div>
{metadata.render_settings?.engine_settings?.samples && (
<div>Cycles Samples: {metadata.render_settings.engine_settings.samples}</div>
)}
{metadata.render_settings?.engine_settings?.taa_render_samples && (
<div>EEVEE Samples: {metadata.render_settings.engine_settings.taa_render_samples}</div>
)}
</div>
<button
type="button"
@@ -511,13 +489,17 @@ export default function JobSubmission({ onSuccess }) {
</label>
<select
value={formData.output_format}
onChange={(e) => setFormData({ ...formData, output_format: e.target.value })}
onChange={(e) => {
formatManuallyChangedRef.current = true;
setFormData({ ...formData, output_format: e.target.value });
}}
className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent"
>
<option value="PNG">PNG</option>
<option value="JPEG">JPEG</option>
<option value="EXR">EXR</option>
<option value="MP4">MP4</option>
<option value="EXR_264_MP4">EXR_264_MP4 (High Quality Video Without Alpha)</option>
<option value="EXR_AV1_MP4">EXR_AV1_MP4 (High Quality Video With Alpha)</option>
</select>
</div>
@@ -541,11 +523,347 @@ export default function JobSubmission({ onSuccess }) {
<div>Frames: {metadata.frame_start} - {metadata.frame_end}</div>
<div>Resolution: {metadata.render_settings?.resolution_x} x {metadata.render_settings?.resolution_y}</div>
<div>Engine: {metadata.render_settings?.engine}</div>
<div>Samples: {metadata.render_settings?.samples}</div>
{metadata.render_settings?.engine_settings?.samples && (
<div>Samples: {metadata.render_settings.engine_settings.samples}</div>
)}
{metadata.render_settings?.engine_settings?.taa_render_samples && (
<div>EEVEE Samples: {metadata.render_settings.engine_settings.taa_render_samples}</div>
)}
</div>
</div>
)}
{/* Advanced Render Settings */}
{formData.render_settings && (
<div className="border border-gray-700 rounded-lg p-4">
<button
type="button"
onClick={() => setShowAdvancedSettings(!showAdvancedSettings)}
className="w-full flex items-center justify-between text-left text-sm font-medium text-gray-300 hover:text-gray-100"
>
<span>Advanced Render Settings</span>
<span className="text-gray-500">{showAdvancedSettings ? '▼' : '▶'}</span>
</button>
{showAdvancedSettings && (
<div className="mt-4 space-y-4">
{/* Engine Selection */}
<div>
<label className="block text-sm font-medium text-gray-300 mb-2">
Render Engine
</label>
<select
value={formData.render_settings.engine || 'cycles'}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine: e.target.value,
}
})}
className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent"
>
<option value="cycles">Cycles</option>
<option value="eevee">EEVEE</option>
<option value="eevee_next">EEVEE Next</option>
</select>
</div>
{/* Resolution */}
<div className="grid grid-cols-2 gap-4">
<div>
<label className="block text-sm font-medium text-gray-300 mb-2">
Resolution X
</label>
<input
type="number"
value={formData.render_settings.resolution_x || 1920}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
resolution_x: parseInt(e.target.value) || 1920,
}
})}
min="1"
className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent"
/>
</div>
<div>
<label className="block text-sm font-medium text-gray-300 mb-2">
Resolution Y
</label>
<input
type="number"
value={formData.render_settings.resolution_y || 1080}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
resolution_y: parseInt(e.target.value) || 1080,
}
})}
min="1"
className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent"
/>
</div>
</div>
{/* Cycles Settings */}
{formData.render_settings.engine === 'cycles' && formData.render_settings.engine_settings && (
<div className="space-y-3 p-3 bg-gray-900/50 rounded-lg">
<div className="text-sm font-medium text-gray-300 mb-2">Cycles Settings</div>
<div>
<label className="block text-xs font-medium text-gray-400 mb-1">
Samples
</label>
<input
type="number"
value={formData.render_settings.engine_settings.samples || 128}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine_settings: {
...formData.render_settings.engine_settings,
samples: parseInt(e.target.value) || 128,
}
}
})}
min="1"
className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent"
/>
</div>
<div className="flex items-center">
<input
type="checkbox"
checked={formData.render_settings.engine_settings.use_denoising || false}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine_settings: {
...formData.render_settings.engine_settings,
use_denoising: e.target.checked,
}
}
})}
className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded"
/>
<label className="ml-2 block text-xs text-gray-400">
Use Denoising
</label>
</div>
<div className="flex items-center">
<input
type="checkbox"
checked={formData.render_settings.engine_settings.use_adaptive_sampling || false}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine_settings: {
...formData.render_settings.engine_settings,
use_adaptive_sampling: e.target.checked,
}
}
})}
className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded"
/>
<label className="ml-2 block text-xs text-gray-400">
Adaptive Sampling
</label>
</div>
<div>
<label className="block text-xs font-medium text-gray-400 mb-1">
Max Bounces
</label>
<input
type="number"
value={formData.render_settings.engine_settings.max_bounces || 12}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine_settings: {
...formData.render_settings.engine_settings,
max_bounces: parseInt(e.target.value) || 12,
}
}
})}
min="0"
className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent"
/>
</div>
<div className="grid grid-cols-2 gap-2">
<div>
<label className="block text-xs font-medium text-gray-400 mb-1">
Diffuse Bounces
</label>
<input
type="number"
value={formData.render_settings.engine_settings.diffuse_bounces || 4}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine_settings: {
...formData.render_settings.engine_settings,
diffuse_bounces: parseInt(e.target.value) || 4,
}
}
})}
min="0"
className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent"
/>
</div>
<div>
<label className="block text-xs font-medium text-gray-400 mb-1">
Glossy Bounces
</label>
<input
type="number"
value={formData.render_settings.engine_settings.glossy_bounces || 4}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine_settings: {
...formData.render_settings.engine_settings,
glossy_bounces: parseInt(e.target.value) || 4,
}
}
})}
min="0"
className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent"
/>
</div>
</div>
</div>
)}
{/* EEVEE Settings */}
{(formData.render_settings.engine === 'eevee' || formData.render_settings.engine === 'eevee_next') && formData.render_settings.engine_settings && (
<div className="space-y-3 p-3 bg-gray-900/50 rounded-lg">
<div className="text-sm font-medium text-gray-300 mb-2">EEVEE Settings</div>
<div>
<label className="block text-xs font-medium text-gray-400 mb-1">
Render Samples
</label>
<input
type="number"
value={formData.render_settings.engine_settings.taa_render_samples || 64}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine_settings: {
...formData.render_settings.engine_settings,
taa_render_samples: parseInt(e.target.value) || 64,
}
}
})}
min="1"
className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent"
/>
</div>
<div className="flex items-center">
<input
type="checkbox"
checked={formData.render_settings.engine_settings.use_bloom || false}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine_settings: {
...formData.render_settings.engine_settings,
use_bloom: e.target.checked,
}
}
})}
className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded"
/>
<label className="ml-2 block text-xs text-gray-400">
Bloom
</label>
</div>
<div className="flex items-center">
<input
type="checkbox"
checked={formData.render_settings.engine_settings.use_ssr || false}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine_settings: {
...formData.render_settings.engine_settings,
use_ssr: e.target.checked,
}
}
})}
className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded"
/>
<label className="ml-2 block text-xs text-gray-400">
Screen Space Reflections (SSR)
</label>
</div>
<div className="flex items-center">
<input
type="checkbox"
checked={formData.render_settings.engine_settings.use_ssao || false}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine_settings: {
...formData.render_settings.engine_settings,
use_ssao: e.target.checked,
}
}
})}
className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded"
/>
<label className="ml-2 block text-xs text-gray-400">
Screen Space Ambient Occlusion (SSAO)
</label>
</div>
<div className="flex items-center">
<input
type="checkbox"
checked={formData.render_settings.engine_settings.use_volumetric || false}
onChange={(e) => setFormData({
...formData,
render_settings: {
...formData.render_settings,
engine_settings: {
...formData.render_settings.engine_settings,
use_volumetric: e.target.checked,
}
}
})}
className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded"
/>
<label className="ml-2 block text-xs text-gray-400">
Volumetric Rendering
</label>
</div>
</div>
)}
</div>
)}
</div>
)}
<div className="pt-4 border-t border-gray-700">
<div className="text-xs text-gray-400 mb-2">
Selected file: {file?.name}

View File

@@ -198,10 +198,18 @@ export const jobs = {
return api.uploadFile(`/jobs/${jobId}/upload`, file, onProgress, mainBlendFile);
},
async uploadFileForJobCreation(file, onProgress, mainBlendFile) {
return api.uploadFile(`/jobs/upload`, file, onProgress, mainBlendFile);
},
async getFiles(jobId) {
return api.get(`/jobs/${jobId}/files`);
},
async getContextArchive(jobId) {
return api.get(`/jobs/${jobId}/context`);
},
downloadFile(jobId, fileId) {
return `${API_BASE}/jobs/${jobId}/files/${fileId}/download`;
},