Enhance logging and context handling in job management. Introduce a logger initialization with configurable parameters in the manager and runner commands. Update job context handling to use tar files instead of tar.gz, and implement ETag generation for improved caching. Refactor API endpoints to support new context file structure and enhance error handling in job submissions. Add support for unhide objects and auto-execution options in job creation requests.

This commit is contained in:
2025-11-24 21:48:05 -06:00
parent a029714e08
commit 4ac05d50a1
23 changed files with 4133 additions and 1311 deletions

View File

@@ -4,8 +4,8 @@ import (
"archive/tar"
"bufio"
"bytes"
"compress/gzip"
"database/sql"
_ "embed"
"encoding/json"
"errors"
"fmt"
@@ -17,6 +17,7 @@ import (
"path/filepath"
"strings"
"jiggablend/pkg/scripts"
"jiggablend/pkg/types"
)
@@ -169,22 +170,26 @@ func (s *Server) handleGetJobMetadata(w http.ResponseWriter, r *http.Request) {
// extractMetadataFromContext extracts metadata from the blend file in a context archive
// Returns the extracted metadata or an error
func (s *Server) extractMetadataFromContext(jobID int64) (*types.BlendMetadata, error) {
contextPath := filepath.Join(s.storage.JobPath(jobID), "context.tar.gz")
contextPath := filepath.Join(s.storage.JobPath(jobID), "context.tar")
// Check if context exists
if _, err := os.Stat(contextPath); err != nil {
return nil, fmt.Errorf("context archive not found: %w", err)
}
// Create temporary directory for extraction
tmpDir, err := os.MkdirTemp("", fmt.Sprintf("fuego-metadata-%d-*", jobID))
// Create temporary directory for extraction under storage base path
tmpDir, err := s.storage.TempDir(fmt.Sprintf("jiggablend-metadata-%d-*", jobID))
if err != nil {
return nil, fmt.Errorf("failed to create temporary directory: %w", err)
}
defer os.RemoveAll(tmpDir)
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
log.Printf("Warning: Failed to clean up temp directory %s: %v", tmpDir, err)
}
}()
// Extract context archive
if err := s.extractTarGz(contextPath, tmpDir); err != nil {
if err := s.extractTar(contextPath, tmpDir); err != nil {
return nil, fmt.Errorf("failed to extract context: %w", err)
}
@@ -228,188 +233,20 @@ func (s *Server) extractMetadataFromContext(jobID int64) (*types.BlendMetadata,
return nil, fmt.Errorf("no .blend file found in context")
}
// Create Python script to extract metadata
// Use embedded Python script
scriptPath := filepath.Join(tmpDir, "extract_metadata.py")
scriptContent := `import bpy
import json
import sys
# Make all file paths relative to the blend file location FIRST
# This must be done immediately after file load, before any other operations
# to prevent Blender from trying to access external files with absolute paths
try:
bpy.ops.file.make_paths_relative()
print("Made all file paths relative to blend file")
except Exception as e:
print(f"Warning: Could not make paths relative: {e}")
# Check for missing addons that the blend file requires
# Blender marks missing addons with "_missing" suffix in preferences
missing_files_info = {
"checked": False,
"has_missing": False,
"missing_files": [],
"missing_addons": []
}
try:
missing = []
for mod in bpy.context.preferences.addons:
if mod.module.endswith("_missing"):
missing.append(mod.module.rsplit("_", 1)[0])
missing_files_info["checked"] = True
if missing:
missing_files_info["has_missing"] = True
missing_files_info["missing_addons"] = missing
print("Missing add-ons required by this .blend:")
for name in missing:
print(" -", name)
else:
print("No missing add-ons detected file is headless-safe")
except Exception as e:
print(f"Warning: Could not check for missing addons: {e}")
missing_files_info["error"] = str(e)
# Get scene
scene = bpy.context.scene
# Extract frame range from scene settings
frame_start = scene.frame_start
frame_end = scene.frame_end
# Also check for actual animation range (keyframes)
# Find the earliest and latest keyframes across all objects
animation_start = None
animation_end = None
for obj in scene.objects:
if obj.animation_data and obj.animation_data.action:
action = obj.animation_data.action
if action.fcurves:
for fcurve in action.fcurves:
if fcurve.keyframe_points:
for keyframe in fcurve.keyframe_points:
frame = int(keyframe.co[0])
if animation_start is None or frame < animation_start:
animation_start = frame
if animation_end is None or frame > animation_end:
animation_end = frame
# Use animation range if available, otherwise use scene frame range
# If scene range seems wrong (start == end), prefer animation range
if animation_start is not None and animation_end is not None:
if frame_start == frame_end or (animation_start < frame_start or animation_end > frame_end):
# Use animation range if scene range is invalid or animation extends beyond it
frame_start = animation_start
frame_end = animation_end
# Extract render settings
render = scene.render
resolution_x = render.resolution_x
resolution_y = render.resolution_y
engine = scene.render.engine.upper()
# Determine output format from file format
output_format = render.image_settings.file_format
# Extract engine-specific settings
engine_settings = {}
if engine == 'CYCLES':
cycles = scene.cycles
engine_settings = {
"samples": getattr(cycles, 'samples', 128),
"use_denoising": getattr(cycles, 'use_denoising', False),
"denoising_radius": getattr(cycles, 'denoising_radius', 0),
"denoising_strength": getattr(cycles, 'denoising_strength', 0.0),
"device": getattr(cycles, 'device', 'CPU'),
"use_adaptive_sampling": getattr(cycles, 'use_adaptive_sampling', False),
"adaptive_threshold": getattr(cycles, 'adaptive_threshold', 0.01) if getattr(cycles, 'use_adaptive_sampling', False) else 0.01,
"use_fast_gi": getattr(cycles, 'use_fast_gi', False),
"light_tree": getattr(cycles, 'use_light_tree', False),
"use_light_linking": getattr(cycles, 'use_light_linking', False),
"caustics_reflective": getattr(cycles, 'caustics_reflective', False),
"caustics_refractive": getattr(cycles, 'caustics_refractive', False),
"blur_glossy": getattr(cycles, 'blur_glossy', 0.0),
"max_bounces": getattr(cycles, 'max_bounces', 12),
"diffuse_bounces": getattr(cycles, 'diffuse_bounces', 4),
"glossy_bounces": getattr(cycles, 'glossy_bounces', 4),
"transmission_bounces": getattr(cycles, 'transmission_bounces', 12),
"volume_bounces": getattr(cycles, 'volume_bounces', 0),
"transparent_max_bounces": getattr(cycles, 'transparent_max_bounces', 8),
"film_transparent": getattr(cycles, 'film_transparent', False),
"use_layer_samples": getattr(cycles, 'use_layer_samples', False),
}
elif engine == 'EEVEE' or engine == 'EEVEE_NEXT':
eevee = scene.eevee
engine_settings = {
"taa_render_samples": getattr(eevee, 'taa_render_samples', 64),
"use_bloom": getattr(eevee, 'use_bloom', False),
"bloom_threshold": getattr(eevee, 'bloom_threshold', 0.8),
"bloom_intensity": getattr(eevee, 'bloom_intensity', 0.05),
"bloom_radius": getattr(eevee, 'bloom_radius', 6.5),
"use_ssr": getattr(eevee, 'use_ssr', True),
"use_ssr_refraction": getattr(eevee, 'use_ssr_refraction', False),
"ssr_quality": getattr(eevee, 'ssr_quality', 'MEDIUM'),
"use_ssao": getattr(eevee, 'use_ssao', True),
"ssao_quality": getattr(eevee, 'ssao_quality', 'MEDIUM'),
"ssao_distance": getattr(eevee, 'ssao_distance', 0.2),
"ssao_factor": getattr(eevee, 'ssao_factor', 1.0),
"use_soft_shadows": getattr(eevee, 'use_soft_shadows', True),
"use_shadow_high_bitdepth": getattr(eevee, 'use_shadow_high_bitdepth', True),
"use_volumetric": getattr(eevee, 'use_volumetric', False),
"volumetric_tile_size": getattr(eevee, 'volumetric_tile_size', '8'),
"volumetric_samples": getattr(eevee, 'volumetric_samples', 64),
"volumetric_start": getattr(eevee, 'volumetric_start', 0.0),
"volumetric_end": getattr(eevee, 'volumetric_end', 100.0),
"use_volumetric_lights": getattr(eevee, 'use_volumetric_lights', True),
"use_volumetric_shadows": getattr(eevee, 'use_volumetric_shadows', True),
"use_gtao": getattr(eevee, 'use_gtao', False),
"gtao_quality": getattr(eevee, 'gtao_quality', 'MEDIUM'),
"use_overscan": getattr(eevee, 'use_overscan', False),
}
else:
# For other engines, extract basic samples if available
engine_settings = {
"samples": getattr(scene, 'samples', 128) if hasattr(scene, 'samples') else 128
}
# Extract scene info
camera_count = len([obj for obj in scene.objects if obj.type == 'CAMERA'])
object_count = len(scene.objects)
material_count = len(bpy.data.materials)
# Build metadata dictionary
metadata = {
"frame_start": frame_start,
"frame_end": frame_end,
"render_settings": {
"resolution_x": resolution_x,
"resolution_y": resolution_y,
"output_format": output_format,
"engine": engine.lower(),
"engine_settings": engine_settings
},
"scene_info": {
"camera_count": camera_count,
"object_count": object_count,
"material_count": material_count
},
"missing_files_info": missing_files_info
}
# Output as JSON
print(json.dumps(metadata))
sys.stdout.flush()
`
if err := os.WriteFile(scriptPath, []byte(scriptContent), 0644); err != nil {
if err := os.WriteFile(scriptPath, []byte(scripts.ExtractMetadata), 0644); err != nil {
return nil, fmt.Errorf("failed to create extraction script: %w", err)
}
// Make blend file path relative to tmpDir to avoid path resolution issues
blendFileRel, err := filepath.Rel(tmpDir, blendFile)
if err != nil {
return nil, fmt.Errorf("failed to get relative path for blend file: %w", err)
}
// Execute Blender with Python script
cmd := exec.Command("blender", "-b", blendFile, "--python", scriptPath)
cmd := exec.Command("blender", "-b", blendFileRel, "--python", "extract_metadata.py")
cmd.Dir = tmpDir
// Capture stdout and stderr
@@ -443,14 +280,16 @@ sys.stdout.flush()
}
}()
// Stream stderr (discard for now, but could log if needed)
// Capture stderr for error reporting
var stderrBuffer bytes.Buffer
stderrDone := make(chan bool)
go func() {
defer close(stderrDone)
scanner := bufio.NewScanner(stderrPipe)
for scanner.Scan() {
// Could log stderr if needed
_ = scanner.Text()
line := scanner.Text()
stderrBuffer.WriteString(line)
stderrBuffer.WriteString("\n")
}
}()
@@ -462,6 +301,18 @@ sys.stdout.flush()
<-stderrDone
if err != nil {
stderrOutput := strings.TrimSpace(stderrBuffer.String())
stdoutOutput := strings.TrimSpace(stdoutBuffer.String())
log.Printf("Blender metadata extraction failed for job %d:", jobID)
if stderrOutput != "" {
log.Printf("Blender stderr: %s", stderrOutput)
}
if stdoutOutput != "" {
log.Printf("Blender stdout (last 500 chars): %s", truncateString(stdoutOutput, 500))
}
if stderrOutput != "" {
return nil, fmt.Errorf("blender metadata extraction failed: %w (stderr: %s)", err, truncateString(stderrOutput, 200))
}
return nil, fmt.Errorf("blender metadata extraction failed: %w", err)
}
@@ -484,21 +335,25 @@ sys.stdout.flush()
return &metadata, nil
}
// extractTarGz extracts a tar.gz archive to a destination directory
func (s *Server) extractTarGz(tarGzPath, destDir string) error {
file, err := os.Open(tarGzPath)
// extractTar extracts a tar archive to a destination directory
func (s *Server) extractTar(tarPath, destDir string) error {
log.Printf("Extracting tar archive: %s -> %s", tarPath, destDir)
// Ensure destination directory exists
if err := os.MkdirAll(destDir, 0755); err != nil {
return fmt.Errorf("failed to create destination directory: %w", err)
}
file, err := os.Open(tarPath)
if err != nil {
return fmt.Errorf("failed to open archive: %w", err)
}
defer file.Close()
gzr, err := gzip.NewReader(file)
if err != nil {
return fmt.Errorf("failed to create gzip reader: %w", err)
}
defer gzr.Close()
tr := tar.NewReader(file)
tr := tar.NewReader(gzr)
fileCount := 0
dirCount := 0
for {
header, err := tr.Next()
@@ -511,9 +366,13 @@ func (s *Server) extractTarGz(tarGzPath, destDir string) error {
// Sanitize path to prevent directory traversal
target := filepath.Join(destDir, header.Name)
// Ensure target is within destDir
if !strings.HasPrefix(filepath.Clean(target), filepath.Clean(destDir)+string(os.PathSeparator)) {
return fmt.Errorf("invalid file path in archive: %s", header.Name)
cleanTarget := filepath.Clean(target)
cleanDestDir := filepath.Clean(destDir)
if !strings.HasPrefix(cleanTarget, cleanDestDir+string(os.PathSeparator)) && cleanTarget != cleanDestDir {
log.Printf("ERROR: Invalid file path in TAR - target: %s, destDir: %s", cleanTarget, cleanDestDir)
return fmt.Errorf("invalid file path in archive: %s (target: %s, destDir: %s)", header.Name, cleanTarget, cleanDestDir)
}
// Create parent directories
@@ -527,14 +386,18 @@ func (s *Server) extractTarGz(tarGzPath, destDir string) error {
if err != nil {
return fmt.Errorf("failed to create file: %w", err)
}
if _, err := io.Copy(outFile, tr); err != nil {
_, err = io.Copy(outFile, tr)
if err != nil {
outFile.Close()
return fmt.Errorf("failed to write file: %w", err)
}
outFile.Close()
fileCount++
} else if header.Typeflag == tar.TypeDir {
dirCount++
}
}
log.Printf("Extraction complete: %d files, %d directories extracted to %s", fileCount, dirCount, destDir)
return nil
}