Implement context archive handling and metadata extraction for render jobs. Add functionality to check for Blender availability, create context archives, and extract metadata from .blend files. Update job creation and retrieval processes to support new metadata structure and context file management. Enhance client-side components to display context files and integrate new API endpoints for context handling.

This commit is contained in:
2025-11-24 10:02:13 -06:00
parent f9ff4d0138
commit a029714e08
13 changed files with 3887 additions and 856 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,21 @@
package api
import (
"archive/tar"
"bufio"
"bytes"
"compress/gzip"
"database/sql"
"encoding/json"
"errors"
"fmt"
"io"
"log"
"net/http"
"os"
"os/exec"
"path/filepath"
"strings"
"jiggablend/pkg/types"
)
@@ -156,3 +166,375 @@ func (s *Server) handleGetJobMetadata(w http.ResponseWriter, r *http.Request) {
s.respondJSON(w, http.StatusOK, metadata)
}
// extractMetadataFromContext extracts metadata from the blend file in a context archive
// Returns the extracted metadata or an error
func (s *Server) extractMetadataFromContext(jobID int64) (*types.BlendMetadata, error) {
contextPath := filepath.Join(s.storage.JobPath(jobID), "context.tar.gz")
// Check if context exists
if _, err := os.Stat(contextPath); err != nil {
return nil, fmt.Errorf("context archive not found: %w", err)
}
// Create temporary directory for extraction
tmpDir, err := os.MkdirTemp("", fmt.Sprintf("fuego-metadata-%d-*", jobID))
if err != nil {
return nil, fmt.Errorf("failed to create temporary directory: %w", err)
}
defer os.RemoveAll(tmpDir)
// Extract context archive
if err := s.extractTarGz(contextPath, tmpDir); err != nil {
return nil, fmt.Errorf("failed to extract context: %w", err)
}
// Find .blend file in extracted contents
blendFile := ""
err = filepath.Walk(tmpDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".blend") {
// Check it's not a Blender save file (.blend1, .blend2, etc.)
lower := strings.ToLower(info.Name())
idx := strings.LastIndex(lower, ".blend")
if idx != -1 {
suffix := lower[idx+len(".blend"):]
// If there are digits after .blend, it's a save file
isSaveFile := false
if len(suffix) > 0 {
isSaveFile = true
for _, r := range suffix {
if r < '0' || r > '9' {
isSaveFile = false
break
}
}
}
if !isSaveFile {
blendFile = path
return filepath.SkipAll // Stop walking once we find a blend file
}
}
}
return nil
})
if err != nil {
return nil, fmt.Errorf("failed to find blend file: %w", err)
}
if blendFile == "" {
return nil, fmt.Errorf("no .blend file found in context")
}
// Create Python script to extract metadata
scriptPath := filepath.Join(tmpDir, "extract_metadata.py")
scriptContent := `import bpy
import json
import sys
# Make all file paths relative to the blend file location FIRST
# This must be done immediately after file load, before any other operations
# to prevent Blender from trying to access external files with absolute paths
try:
bpy.ops.file.make_paths_relative()
print("Made all file paths relative to blend file")
except Exception as e:
print(f"Warning: Could not make paths relative: {e}")
# Check for missing addons that the blend file requires
# Blender marks missing addons with "_missing" suffix in preferences
missing_files_info = {
"checked": False,
"has_missing": False,
"missing_files": [],
"missing_addons": []
}
try:
missing = []
for mod in bpy.context.preferences.addons:
if mod.module.endswith("_missing"):
missing.append(mod.module.rsplit("_", 1)[0])
missing_files_info["checked"] = True
if missing:
missing_files_info["has_missing"] = True
missing_files_info["missing_addons"] = missing
print("Missing add-ons required by this .blend:")
for name in missing:
print(" -", name)
else:
print("No missing add-ons detected file is headless-safe")
except Exception as e:
print(f"Warning: Could not check for missing addons: {e}")
missing_files_info["error"] = str(e)
# Get scene
scene = bpy.context.scene
# Extract frame range from scene settings
frame_start = scene.frame_start
frame_end = scene.frame_end
# Also check for actual animation range (keyframes)
# Find the earliest and latest keyframes across all objects
animation_start = None
animation_end = None
for obj in scene.objects:
if obj.animation_data and obj.animation_data.action:
action = obj.animation_data.action
if action.fcurves:
for fcurve in action.fcurves:
if fcurve.keyframe_points:
for keyframe in fcurve.keyframe_points:
frame = int(keyframe.co[0])
if animation_start is None or frame < animation_start:
animation_start = frame
if animation_end is None or frame > animation_end:
animation_end = frame
# Use animation range if available, otherwise use scene frame range
# If scene range seems wrong (start == end), prefer animation range
if animation_start is not None and animation_end is not None:
if frame_start == frame_end or (animation_start < frame_start or animation_end > frame_end):
# Use animation range if scene range is invalid or animation extends beyond it
frame_start = animation_start
frame_end = animation_end
# Extract render settings
render = scene.render
resolution_x = render.resolution_x
resolution_y = render.resolution_y
engine = scene.render.engine.upper()
# Determine output format from file format
output_format = render.image_settings.file_format
# Extract engine-specific settings
engine_settings = {}
if engine == 'CYCLES':
cycles = scene.cycles
engine_settings = {
"samples": getattr(cycles, 'samples', 128),
"use_denoising": getattr(cycles, 'use_denoising', False),
"denoising_radius": getattr(cycles, 'denoising_radius', 0),
"denoising_strength": getattr(cycles, 'denoising_strength', 0.0),
"device": getattr(cycles, 'device', 'CPU'),
"use_adaptive_sampling": getattr(cycles, 'use_adaptive_sampling', False),
"adaptive_threshold": getattr(cycles, 'adaptive_threshold', 0.01) if getattr(cycles, 'use_adaptive_sampling', False) else 0.01,
"use_fast_gi": getattr(cycles, 'use_fast_gi', False),
"light_tree": getattr(cycles, 'use_light_tree', False),
"use_light_linking": getattr(cycles, 'use_light_linking', False),
"caustics_reflective": getattr(cycles, 'caustics_reflective', False),
"caustics_refractive": getattr(cycles, 'caustics_refractive', False),
"blur_glossy": getattr(cycles, 'blur_glossy', 0.0),
"max_bounces": getattr(cycles, 'max_bounces', 12),
"diffuse_bounces": getattr(cycles, 'diffuse_bounces', 4),
"glossy_bounces": getattr(cycles, 'glossy_bounces', 4),
"transmission_bounces": getattr(cycles, 'transmission_bounces', 12),
"volume_bounces": getattr(cycles, 'volume_bounces', 0),
"transparent_max_bounces": getattr(cycles, 'transparent_max_bounces', 8),
"film_transparent": getattr(cycles, 'film_transparent', False),
"use_layer_samples": getattr(cycles, 'use_layer_samples', False),
}
elif engine == 'EEVEE' or engine == 'EEVEE_NEXT':
eevee = scene.eevee
engine_settings = {
"taa_render_samples": getattr(eevee, 'taa_render_samples', 64),
"use_bloom": getattr(eevee, 'use_bloom', False),
"bloom_threshold": getattr(eevee, 'bloom_threshold', 0.8),
"bloom_intensity": getattr(eevee, 'bloom_intensity', 0.05),
"bloom_radius": getattr(eevee, 'bloom_radius', 6.5),
"use_ssr": getattr(eevee, 'use_ssr', True),
"use_ssr_refraction": getattr(eevee, 'use_ssr_refraction', False),
"ssr_quality": getattr(eevee, 'ssr_quality', 'MEDIUM'),
"use_ssao": getattr(eevee, 'use_ssao', True),
"ssao_quality": getattr(eevee, 'ssao_quality', 'MEDIUM'),
"ssao_distance": getattr(eevee, 'ssao_distance', 0.2),
"ssao_factor": getattr(eevee, 'ssao_factor', 1.0),
"use_soft_shadows": getattr(eevee, 'use_soft_shadows', True),
"use_shadow_high_bitdepth": getattr(eevee, 'use_shadow_high_bitdepth', True),
"use_volumetric": getattr(eevee, 'use_volumetric', False),
"volumetric_tile_size": getattr(eevee, 'volumetric_tile_size', '8'),
"volumetric_samples": getattr(eevee, 'volumetric_samples', 64),
"volumetric_start": getattr(eevee, 'volumetric_start', 0.0),
"volumetric_end": getattr(eevee, 'volumetric_end', 100.0),
"use_volumetric_lights": getattr(eevee, 'use_volumetric_lights', True),
"use_volumetric_shadows": getattr(eevee, 'use_volumetric_shadows', True),
"use_gtao": getattr(eevee, 'use_gtao', False),
"gtao_quality": getattr(eevee, 'gtao_quality', 'MEDIUM'),
"use_overscan": getattr(eevee, 'use_overscan', False),
}
else:
# For other engines, extract basic samples if available
engine_settings = {
"samples": getattr(scene, 'samples', 128) if hasattr(scene, 'samples') else 128
}
# Extract scene info
camera_count = len([obj for obj in scene.objects if obj.type == 'CAMERA'])
object_count = len(scene.objects)
material_count = len(bpy.data.materials)
# Build metadata dictionary
metadata = {
"frame_start": frame_start,
"frame_end": frame_end,
"render_settings": {
"resolution_x": resolution_x,
"resolution_y": resolution_y,
"output_format": output_format,
"engine": engine.lower(),
"engine_settings": engine_settings
},
"scene_info": {
"camera_count": camera_count,
"object_count": object_count,
"material_count": material_count
},
"missing_files_info": missing_files_info
}
# Output as JSON
print(json.dumps(metadata))
sys.stdout.flush()
`
if err := os.WriteFile(scriptPath, []byte(scriptContent), 0644); err != nil {
return nil, fmt.Errorf("failed to create extraction script: %w", err)
}
// Execute Blender with Python script
cmd := exec.Command("blender", "-b", blendFile, "--python", scriptPath)
cmd.Dir = tmpDir
// Capture stdout and stderr
stdoutPipe, err := cmd.StdoutPipe()
if err != nil {
return nil, fmt.Errorf("failed to create stdout pipe: %w", err)
}
stderrPipe, err := cmd.StderrPipe()
if err != nil {
return nil, fmt.Errorf("failed to create stderr pipe: %w", err)
}
// Buffer to collect stdout for JSON parsing
var stdoutBuffer bytes.Buffer
// Start the command
if err := cmd.Start(); err != nil {
return nil, fmt.Errorf("failed to start blender: %w", err)
}
// Stream stdout and collect for JSON parsing
stdoutDone := make(chan bool)
go func() {
defer close(stdoutDone)
scanner := bufio.NewScanner(stdoutPipe)
for scanner.Scan() {
line := scanner.Text()
stdoutBuffer.WriteString(line)
stdoutBuffer.WriteString("\n")
}
}()
// Stream stderr (discard for now, but could log if needed)
stderrDone := make(chan bool)
go func() {
defer close(stderrDone)
scanner := bufio.NewScanner(stderrPipe)
for scanner.Scan() {
// Could log stderr if needed
_ = scanner.Text()
}
}()
// Wait for command to complete
err = cmd.Wait()
// Wait for streaming goroutines to finish
<-stdoutDone
<-stderrDone
if err != nil {
return nil, fmt.Errorf("blender metadata extraction failed: %w", err)
}
// Parse output (metadata is printed to stdout)
metadataJSON := strings.TrimSpace(stdoutBuffer.String())
// Extract JSON from output (Blender may print other stuff)
jsonStart := strings.Index(metadataJSON, "{")
jsonEnd := strings.LastIndex(metadataJSON, "}")
if jsonStart == -1 || jsonEnd == -1 || jsonEnd <= jsonStart {
return nil, errors.New("failed to extract JSON from Blender output")
}
metadataJSON = metadataJSON[jsonStart : jsonEnd+1]
var metadata types.BlendMetadata
if err := json.Unmarshal([]byte(metadataJSON), &metadata); err != nil {
return nil, fmt.Errorf("failed to parse metadata JSON: %w", err)
}
log.Printf("Metadata extracted for job %d: frame_start=%d, frame_end=%d", jobID, metadata.FrameStart, metadata.FrameEnd)
return &metadata, nil
}
// extractTarGz extracts a tar.gz archive to a destination directory
func (s *Server) extractTarGz(tarGzPath, destDir string) error {
file, err := os.Open(tarGzPath)
if err != nil {
return fmt.Errorf("failed to open archive: %w", err)
}
defer file.Close()
gzr, err := gzip.NewReader(file)
if err != nil {
return fmt.Errorf("failed to create gzip reader: %w", err)
}
defer gzr.Close()
tr := tar.NewReader(gzr)
for {
header, err := tr.Next()
if err == io.EOF {
break
}
if err != nil {
return fmt.Errorf("failed to read tar header: %w", err)
}
// Sanitize path to prevent directory traversal
target := filepath.Join(destDir, header.Name)
// Ensure target is within destDir
if !strings.HasPrefix(filepath.Clean(target), filepath.Clean(destDir)+string(os.PathSeparator)) {
return fmt.Errorf("invalid file path in archive: %s", header.Name)
}
// Create parent directories
if err := os.MkdirAll(filepath.Dir(target), 0755); err != nil {
return fmt.Errorf("failed to create directory: %w", err)
}
// Write file
if header.Typeflag == tar.TypeReg {
outFile, err := os.Create(target)
if err != nil {
return fmt.Errorf("failed to create file: %w", err)
}
if _, err := io.Copy(outFile, tr); err != nil {
outFile.Close()
return fmt.Errorf("failed to write file: %w", err)
}
outFile.Close()
}
}
return nil
}

View File

@@ -17,7 +17,6 @@ import (
"jiggablend/pkg/types"
"github.com/go-chi/chi/v5"
"github.com/gorilla/websocket"
)
@@ -294,51 +293,37 @@ func (s *Server) handleUpdateTaskStep(w http.ResponseWriter, r *http.Request) {
})
}
// handleDownloadFileForRunner allows runners to download job files
func (s *Server) handleDownloadFileForRunner(w http.ResponseWriter, r *http.Request) {
// handleDownloadJobContext allows runners to download the job context tar.gz
func (s *Server) handleDownloadJobContext(w http.ResponseWriter, r *http.Request) {
jobID, err := parseID(r, "jobId")
if err != nil {
s.respondError(w, http.StatusBadRequest, err.Error())
return
}
// Get the file path from the wildcard parameter (supports subdirectories)
filePathParam := chi.URLParam(r, "*")
if filePathParam == "" {
s.respondError(w, http.StatusBadRequest, "File path not specified")
return
}
// Remove leading slash if present
filePathParam = strings.TrimPrefix(filePathParam, "/")
// Construct the context file path
contextPath := filepath.Join(s.storage.JobPath(jobID), "context.tar.gz")
// Find the file in the database by matching file_name (which stores relative path)
var filePath string
var storedFileName string
err = s.db.QueryRow(
`SELECT file_path, file_name FROM job_files WHERE job_id = ? AND file_name = ?`,
jobID, filePathParam,
).Scan(&filePath, &storedFileName)
if err == sql.ErrNoRows {
s.respondError(w, http.StatusNotFound, "File not found")
return
}
if err != nil {
s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to query file: %v", err))
// Check if context file exists
if !s.storage.FileExists(contextPath) {
log.Printf("Context archive not found for job %d", jobID)
s.respondError(w, http.StatusNotFound, "Context archive not found. The file may not have been uploaded successfully.")
return
}
// Open and serve file
file, err := s.storage.GetFile(filePath)
file, err := s.storage.GetFile(contextPath)
if err != nil {
s.respondError(w, http.StatusNotFound, "File not found on disk")
s.respondError(w, http.StatusNotFound, "Context file not found on disk")
return
}
defer file.Close()
// Use the stored file name for the download (preserves original filename)
downloadFileName := filepath.Base(storedFileName)
w.Header().Set("Content-Type", "application/octet-stream")
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s", downloadFileName))
// Set appropriate headers for tar.gz file
w.Header().Set("Content-Type", "application/gzip")
w.Header().Set("Content-Disposition", "attachment; filename=context.tar.gz")
// Stream the file to the response
io.Copy(w, file)
}
@@ -488,6 +473,43 @@ func (s *Server) handleGetJobFilesForRunner(w http.ResponseWriter, r *http.Reque
s.respondJSON(w, http.StatusOK, files)
}
// handleGetJobMetadataForRunner allows runners to get job metadata
func (s *Server) handleGetJobMetadataForRunner(w http.ResponseWriter, r *http.Request) {
jobID, err := parseID(r, "jobId")
if err != nil {
s.respondError(w, http.StatusBadRequest, err.Error())
return
}
var blendMetadataJSON sql.NullString
err = s.db.QueryRow(
`SELECT blend_metadata FROM jobs WHERE id = ?`,
jobID,
).Scan(&blendMetadataJSON)
if err == sql.ErrNoRows {
s.respondError(w, http.StatusNotFound, "Job not found")
return
}
if err != nil {
s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to query job: %v", err))
return
}
if !blendMetadataJSON.Valid || blendMetadataJSON.String == "" {
s.respondJSON(w, http.StatusOK, nil)
return
}
var metadata types.BlendMetadata
if err := json.Unmarshal([]byte(blendMetadataJSON.String), &metadata); err != nil {
s.respondError(w, http.StatusInternalServerError, "Failed to parse metadata")
return
}
s.respondJSON(w, http.StatusOK, metadata)
}
// WebSocket message types
type WSMessage struct {
Type string `json:"type"`
@@ -1020,7 +1042,7 @@ func (s *Server) updateJobStatusFromTasks(jobID int64) {
log.Printf("Updated job %d status to %s (progress: %.1f%%, completed tasks: %d/%d)", jobID, jobStatus, progress, completedTasks, totalTasks)
}
if outputFormatStr == "MP4" {
if outputFormatStr == "EXR_264_MP4" || outputFormatStr == "EXR_AV1_MP4" {
// Check if a video generation task already exists for this job (any status)
var existingVideoTask int
s.db.QueryRow(
@@ -1603,6 +1625,9 @@ func (s *Server) assignTaskToRunner(runnerID int64, taskID int64) error {
task.JobName = jobName
if outputFormat.Valid {
task.OutputFormat = outputFormat.String
log.Printf("Task %d assigned with output_format: '%s' (from job %d)", taskID, outputFormat.String, task.JobID)
} else {
log.Printf("Task %d assigned with no output_format (job %d)", taskID, task.JobID)
}
task.TaskType = taskType

View File

@@ -116,12 +116,14 @@ func (s *Server) setupRoutes() {
return http.HandlerFunc(s.auth.Middleware(next.ServeHTTP))
})
r.Post("/", s.handleCreateJob)
r.Post("/upload", s.handleUploadFileForJobCreation) // Upload before job creation
r.Get("/", s.handleListJobs)
r.Get("/{id}", s.handleGetJob)
r.Delete("/{id}", s.handleCancelJob)
r.Post("/{id}/delete", s.handleDeleteJob)
r.Post("/{id}/upload", s.handleUploadJobFile)
r.Get("/{id}/files", s.handleListJobFiles)
r.Get("/{id}/context", s.handleListContextArchive)
r.Get("/{id}/files/{fileId}/download", s.handleDownloadJobFile)
r.Get("/{id}/video", s.handleStreamVideo)
r.Get("/{id}/metadata", s.handleGetJobMetadata)
@@ -179,10 +181,11 @@ func (s *Server) setupRoutes() {
})
r.Post("/tasks/{id}/progress", s.handleUpdateTaskProgress)
r.Post("/tasks/{id}/steps", s.handleUpdateTaskStep)
r.Get("/files/{jobId}/*", s.handleDownloadFileForRunner)
r.Get("/jobs/{jobId}/context.tar.gz", s.handleDownloadJobContext)
r.Post("/files/{jobId}/upload", s.handleUploadFileFromRunner)
r.Get("/jobs/{jobId}/status", s.handleGetJobStatusForRunner)
r.Get("/jobs/{jobId}/files", s.handleGetJobFilesForRunner)
r.Get("/jobs/{jobId}/metadata", s.handleGetJobMetadataForRunner)
r.Post("/jobs/{jobId}/metadata", s.handleSubmitMetadata)
})
})
@@ -508,7 +511,7 @@ func parseID(r *http.Request, param string) (int64, error) {
// StartBackgroundTasks starts background goroutines for error recovery
func (s *Server) StartBackgroundTasks() {
go s.recoverStuckTasks()
go s.cleanupOldMetadataJobs()
go s.cleanupOldRenderJobs()
}
// recoverStuckTasks periodically checks for dead runners and stuck tasks

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,9 @@
package storage
import (
"archive/tar"
"archive/zip"
"compress/gzip"
"fmt"
"io"
"os"
@@ -194,3 +196,377 @@ func (s *Storage) ExtractZip(zipPath, destDir string) ([]string, error) {
return extractedFiles, nil
}
// findCommonPrefix finds the common leading directory prefix if all paths share the same first-level directory
// Returns the prefix to strip (with trailing slash) or empty string if no common prefix
func findCommonPrefix(relPaths []string) string {
if len(relPaths) == 0 {
return ""
}
// Get the first path component of each path
firstComponents := make([]string, 0, len(relPaths))
for _, path := range relPaths {
parts := strings.Split(filepath.ToSlash(path), "/")
if len(parts) > 0 && parts[0] != "" {
firstComponents = append(firstComponents, parts[0])
} else {
// If any path is at root level, no common prefix
return ""
}
}
// Check if all first components are the same
if len(firstComponents) == 0 {
return ""
}
commonFirst := firstComponents[0]
for _, comp := range firstComponents {
if comp != commonFirst {
// Not all paths share the same first directory
return ""
}
}
// All paths share the same first directory - return it with trailing slash
return commonFirst + "/"
}
// isBlenderSaveFile checks if a filename is a Blender save file (.blend1, .blend2, etc.)
// Returns true for files like "file.blend1", "file.blend2", but false for "file.blend"
func isBlenderSaveFile(filename string) bool {
lower := strings.ToLower(filename)
// Check if it ends with .blend followed by one or more digits
// Pattern: *.blend[digits]
if !strings.HasSuffix(lower, ".blend") {
// Doesn't end with .blend, check if it ends with .blend + digits
idx := strings.LastIndex(lower, ".blend")
if idx == -1 {
return false
}
// Check if there are digits after .blend
suffix := lower[idx+len(".blend"):]
if len(suffix) == 0 {
return false
}
// All remaining characters must be digits
for _, r := range suffix {
if r < '0' || r > '9' {
return false
}
}
return true
}
// Ends with .blend exactly - this is a regular blend file, not a save file
return false
}
// CreateJobContext creates a tar.gz archive containing all job input files
// Filters out Blender save files (.blend1, .blend2, etc.)
// Uses temporary directories and streaming to handle large files efficiently
func (s *Storage) CreateJobContext(jobID int64) (string, error) {
jobPath := s.JobPath(jobID)
contextPath := filepath.Join(jobPath, "context.tar.gz")
// Create temporary directory for staging
tmpDir, err := os.MkdirTemp("", "fuego-context-*")
if err != nil {
return "", fmt.Errorf("failed to create temporary directory: %w", err)
}
defer os.RemoveAll(tmpDir)
// Collect all files from job directory, excluding the context file itself and Blender save files
var filesToInclude []string
err = filepath.Walk(jobPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
// Skip directories
if info.IsDir() {
return nil
}
// Skip the context file itself if it exists
if path == contextPath {
return nil
}
// Skip Blender save files
if isBlenderSaveFile(info.Name()) {
return nil
}
// Get relative path from job directory
relPath, err := filepath.Rel(jobPath, path)
if err != nil {
return err
}
// Sanitize path - ensure it doesn't escape the job directory
cleanRelPath := filepath.Clean(relPath)
if strings.HasPrefix(cleanRelPath, "..") {
return fmt.Errorf("invalid file path: %s", relPath)
}
filesToInclude = append(filesToInclude, path)
return nil
})
if err != nil {
return "", fmt.Errorf("failed to walk job directory: %w", err)
}
if len(filesToInclude) == 0 {
return "", fmt.Errorf("no files found to include in context")
}
// Create the tar.gz file using streaming
contextFile, err := os.Create(contextPath)
if err != nil {
return "", fmt.Errorf("failed to create context file: %w", err)
}
defer contextFile.Close()
gzWriter := gzip.NewWriter(contextFile)
defer gzWriter.Close()
tarWriter := tar.NewWriter(gzWriter)
defer tarWriter.Close()
// Add each file to the tar archive
for _, filePath := range filesToInclude {
file, err := os.Open(filePath)
if err != nil {
return "", fmt.Errorf("failed to open file %s: %w", filePath, err)
}
info, err := file.Stat()
if err != nil {
file.Close()
return "", fmt.Errorf("failed to stat file %s: %w", filePath, err)
}
// Get relative path for tar header
relPath, err := filepath.Rel(jobPath, filePath)
if err != nil {
file.Close()
return "", fmt.Errorf("failed to get relative path for %s: %w", filePath, err)
}
// Normalize path separators for tar (use forward slashes)
tarPath := filepath.ToSlash(relPath)
// Create tar header
header, err := tar.FileInfoHeader(info, "")
if err != nil {
file.Close()
return "", fmt.Errorf("failed to create tar header for %s: %w", filePath, err)
}
header.Name = tarPath
// Write header
if err := tarWriter.WriteHeader(header); err != nil {
file.Close()
return "", fmt.Errorf("failed to write tar header for %s: %w", filePath, err)
}
// Copy file contents using streaming
if _, err := io.Copy(tarWriter, file); err != nil {
file.Close()
return "", fmt.Errorf("failed to write file %s to tar: %w", filePath, err)
}
file.Close()
}
// Ensure all data is flushed
if err := tarWriter.Close(); err != nil {
return "", fmt.Errorf("failed to close tar writer: %w", err)
}
if err := gzWriter.Close(); err != nil {
return "", fmt.Errorf("failed to close gzip writer: %w", err)
}
if err := contextFile.Close(); err != nil {
return "", fmt.Errorf("failed to close context file: %w", err)
}
return contextPath, nil
}
// CreateJobContextFromDir creates a context archive (tar.gz) from files in a source directory
// This is used during upload to immediately create the context archive as the primary artifact
// excludeFiles is a set of relative paths (from sourceDir) to exclude from the context
func (s *Storage) CreateJobContextFromDir(sourceDir string, jobID int64, excludeFiles ...string) (string, error) {
jobPath := s.JobPath(jobID)
contextPath := filepath.Join(jobPath, "context.tar.gz")
// Ensure job directory exists
if err := os.MkdirAll(jobPath, 0755); err != nil {
return "", fmt.Errorf("failed to create job directory: %w", err)
}
// Build set of files to exclude (normalize paths)
excludeSet := make(map[string]bool)
for _, excludeFile := range excludeFiles {
// Normalize the exclude path
excludePath := filepath.Clean(excludeFile)
excludeSet[excludePath] = true
// Also add with forward slash for cross-platform compatibility
excludeSet[filepath.ToSlash(excludePath)] = true
}
// Collect all files from source directory, excluding Blender save files and excluded files
var filesToInclude []string
err := filepath.Walk(sourceDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
// Skip directories
if info.IsDir() {
return nil
}
// Skip Blender save files
if isBlenderSaveFile(info.Name()) {
return nil
}
// Get relative path from source directory
relPath, err := filepath.Rel(sourceDir, path)
if err != nil {
return err
}
// Sanitize path - ensure it doesn't escape the source directory
cleanRelPath := filepath.Clean(relPath)
if strings.HasPrefix(cleanRelPath, "..") {
return fmt.Errorf("invalid file path: %s", relPath)
}
// Check if this file should be excluded
if excludeSet[cleanRelPath] || excludeSet[filepath.ToSlash(cleanRelPath)] {
return nil
}
filesToInclude = append(filesToInclude, path)
return nil
})
if err != nil {
return "", fmt.Errorf("failed to walk source directory: %w", err)
}
if len(filesToInclude) == 0 {
return "", fmt.Errorf("no files found to include in context archive")
}
// Collect relative paths to find common prefix
relPaths := make([]string, 0, len(filesToInclude))
for _, filePath := range filesToInclude {
relPath, err := filepath.Rel(sourceDir, filePath)
if err != nil {
return "", fmt.Errorf("failed to get relative path for %s: %w", filePath, err)
}
relPaths = append(relPaths, relPath)
}
// Find and strip common leading directory if all files share one
commonPrefix := findCommonPrefix(relPaths)
// Validate that there's exactly one .blend file at the root level after prefix stripping
blendFilesAtRoot := 0
for _, relPath := range relPaths {
tarPath := filepath.ToSlash(relPath)
// Strip common prefix if present
if commonPrefix != "" && strings.HasPrefix(tarPath, commonPrefix) {
tarPath = strings.TrimPrefix(tarPath, commonPrefix)
}
// Check if it's a .blend file at root (no path separators after prefix stripping)
if strings.HasSuffix(strings.ToLower(tarPath), ".blend") {
// Check if it's at root level (no directory separators)
if !strings.Contains(tarPath, "/") {
blendFilesAtRoot++
}
}
}
if blendFilesAtRoot == 0 {
return "", fmt.Errorf("no .blend file found at root level in context archive")
}
if blendFilesAtRoot > 1 {
return "", fmt.Errorf("multiple .blend files found at root level in context archive (found %d, expected 1)", blendFilesAtRoot)
}
// Create the tar.gz file using streaming
contextFile, err := os.Create(contextPath)
if err != nil {
return "", fmt.Errorf("failed to create context file: %w", err)
}
defer contextFile.Close()
gzWriter := gzip.NewWriter(contextFile)
defer gzWriter.Close()
tarWriter := tar.NewWriter(gzWriter)
defer tarWriter.Close()
// Add each file to the tar archive
for i, filePath := range filesToInclude {
file, err := os.Open(filePath)
if err != nil {
return "", fmt.Errorf("failed to open file %s: %w", filePath, err)
}
info, err := file.Stat()
if err != nil {
file.Close()
return "", fmt.Errorf("failed to stat file %s: %w", filePath, err)
}
// Get relative path and strip common prefix if present
relPath := relPaths[i]
tarPath := filepath.ToSlash(relPath)
// Strip common prefix if found
if commonPrefix != "" && strings.HasPrefix(tarPath, commonPrefix) {
tarPath = strings.TrimPrefix(tarPath, commonPrefix)
}
// Create tar header
header, err := tar.FileInfoHeader(info, "")
if err != nil {
file.Close()
return "", fmt.Errorf("failed to create tar header for %s: %w", filePath, err)
}
header.Name = tarPath
// Write header
if err := tarWriter.WriteHeader(header); err != nil {
file.Close()
return "", fmt.Errorf("failed to write tar header for %s: %w", filePath, err)
}
// Copy file contents using streaming
if _, err := io.Copy(tarWriter, file); err != nil {
file.Close()
return "", fmt.Errorf("failed to write file %s to tar: %w", filePath, err)
}
file.Close()
}
// Ensure all data is flushed
if err := tarWriter.Close(); err != nil {
return "", fmt.Errorf("failed to close tar writer: %w", err)
}
if err := gzWriter.Close(); err != nil {
return "", fmt.Errorf("failed to close gzip writer: %w", err)
}
if err := contextFile.Close(); err != nil {
return "", fmt.Errorf("failed to close context file: %w", err)
}
return contextPath, nil
}