Update .gitignore to include log files and database journal files. Modify go.mod to update dependencies for go-sqlite3 and cloud.google.com/go/compute/metadata. Enhance Makefile to include logging options for manager and runner commands. Introduce new job token handling in auth package and implement database migration scripts. Refactor manager and runner components to improve job processing and metadata extraction. Add support for video preview in frontend components and enhance WebSocket management for channel subscriptions.

This commit is contained in:
2026-01-02 13:55:19 -06:00
parent edc8ea160c
commit 94490237fe
44 changed files with 9463 additions and 7875 deletions

View File

@@ -0,0 +1,333 @@
package api
import (
"fmt"
"log"
"strings"
"sync"
"time"
"jiggablend/pkg/types"
"github.com/gorilla/websocket"
)
// JobConnection wraps a WebSocket connection for job communication.
type JobConnection struct {
conn *websocket.Conn
writeMu sync.Mutex
stopPing chan struct{}
stopHeartbeat chan struct{}
isConnected bool
connMu sync.RWMutex
}
// NewJobConnection creates a new job connection wrapper.
func NewJobConnection() *JobConnection {
return &JobConnection{}
}
// Connect establishes a WebSocket connection for a job (no runnerID needed).
func (j *JobConnection) Connect(managerURL, jobPath, jobToken string) error {
wsPath := jobPath + "/ws"
wsURL := strings.Replace(managerURL, "http://", "ws://", 1)
wsURL = strings.Replace(wsURL, "https://", "wss://", 1)
wsURL += wsPath
log.Printf("Connecting to job WebSocket: %s", wsPath)
dialer := websocket.Dialer{
HandshakeTimeout: 10 * time.Second,
}
conn, _, err := dialer.Dial(wsURL, nil)
if err != nil {
return fmt.Errorf("failed to connect job WebSocket: %w", err)
}
j.conn = conn
// Send auth message
authMsg := map[string]interface{}{
"type": "auth",
"job_token": jobToken,
}
if err := conn.WriteJSON(authMsg); err != nil {
conn.Close()
return fmt.Errorf("failed to send auth: %w", err)
}
// Wait for auth_ok
conn.SetReadDeadline(time.Now().Add(30 * time.Second))
var authResp map[string]string
if err := conn.ReadJSON(&authResp); err != nil {
conn.Close()
return fmt.Errorf("failed to read auth response: %w", err)
}
if authResp["type"] == "error" {
conn.Close()
return fmt.Errorf("auth failed: %s", authResp["message"])
}
if authResp["type"] != "auth_ok" {
conn.Close()
return fmt.Errorf("unexpected auth response: %s", authResp["type"])
}
// Clear read deadline after auth
conn.SetReadDeadline(time.Time{})
// Set up ping/pong handler for keepalive
conn.SetPongHandler(func(string) error {
conn.SetReadDeadline(time.Now().Add(90 * time.Second))
return nil
})
// Start ping goroutine
j.stopPing = make(chan struct{})
j.connMu.Lock()
j.isConnected = true
j.connMu.Unlock()
go j.pingLoop()
// Start WebSocket heartbeat goroutine
j.stopHeartbeat = make(chan struct{})
go j.heartbeatLoop()
return nil
}
// pingLoop sends periodic pings to keep the WebSocket connection alive.
func (j *JobConnection) pingLoop() {
defer func() {
if rec := recover(); rec != nil {
log.Printf("Ping loop panicked: %v", rec)
}
}()
ticker := time.NewTicker(30 * time.Second)
defer ticker.Stop()
for {
select {
case <-j.stopPing:
return
case <-ticker.C:
j.writeMu.Lock()
if j.conn != nil {
deadline := time.Now().Add(10 * time.Second)
if err := j.conn.WriteControl(websocket.PingMessage, []byte{}, deadline); err != nil {
log.Printf("Failed to send ping, closing connection: %v", err)
j.connMu.Lock()
j.isConnected = false
if j.conn != nil {
j.conn.Close()
j.conn = nil
}
j.connMu.Unlock()
}
}
j.writeMu.Unlock()
}
}
}
// Heartbeat sends a heartbeat message over WebSocket to keep runner online.
func (j *JobConnection) Heartbeat() {
if j.conn == nil {
return
}
j.writeMu.Lock()
defer j.writeMu.Unlock()
msg := map[string]interface{}{
"type": "runner_heartbeat",
"timestamp": time.Now().Unix(),
}
if err := j.conn.WriteJSON(msg); err != nil {
log.Printf("Failed to send WebSocket heartbeat: %v", err)
// Handle connection failure
j.connMu.Lock()
j.isConnected = false
if j.conn != nil {
j.conn.Close()
j.conn = nil
}
j.connMu.Unlock()
}
}
// heartbeatLoop sends periodic heartbeat messages over WebSocket.
func (j *JobConnection) heartbeatLoop() {
defer func() {
if rec := recover(); rec != nil {
log.Printf("WebSocket heartbeat loop panicked: %v", rec)
}
}()
ticker := time.NewTicker(30 * time.Second)
defer ticker.Stop()
for {
select {
case <-j.stopHeartbeat:
return
case <-ticker.C:
j.Heartbeat()
}
}
}
// Close closes the WebSocket connection.
func (j *JobConnection) Close() {
j.connMu.Lock()
j.isConnected = false
j.connMu.Unlock()
// Stop heartbeat goroutine
if j.stopHeartbeat != nil {
close(j.stopHeartbeat)
j.stopHeartbeat = nil
}
// Stop ping goroutine
if j.stopPing != nil {
close(j.stopPing)
j.stopPing = nil
}
if j.conn != nil {
j.conn.Close()
j.conn = nil
}
}
// IsConnected returns true if the connection is established.
func (j *JobConnection) IsConnected() bool {
j.connMu.RLock()
defer j.connMu.RUnlock()
return j.isConnected && j.conn != nil
}
// Log sends a log entry to the manager.
func (j *JobConnection) Log(taskID int64, level types.LogLevel, message string) {
if j.conn == nil {
return
}
j.writeMu.Lock()
defer j.writeMu.Unlock()
msg := map[string]interface{}{
"type": "log_entry",
"data": map[string]interface{}{
"task_id": taskID,
"log_level": string(level),
"message": message,
},
"timestamp": time.Now().Unix(),
}
if err := j.conn.WriteJSON(msg); err != nil {
log.Printf("Failed to send job log, connection may be broken: %v", err)
// Close the connection on write error
j.connMu.Lock()
j.isConnected = false
if j.conn != nil {
j.conn.Close()
j.conn = nil
}
j.connMu.Unlock()
}
}
// Progress sends a progress update to the manager.
func (j *JobConnection) Progress(taskID int64, progress float64) {
if j.conn == nil {
return
}
j.writeMu.Lock()
defer j.writeMu.Unlock()
msg := map[string]interface{}{
"type": "progress",
"data": map[string]interface{}{
"task_id": taskID,
"progress": progress,
},
"timestamp": time.Now().Unix(),
}
if err := j.conn.WriteJSON(msg); err != nil {
log.Printf("Failed to send job progress, connection may be broken: %v", err)
// Close the connection on write error
j.connMu.Lock()
j.isConnected = false
if j.conn != nil {
j.conn.Close()
j.conn = nil
}
j.connMu.Unlock()
}
}
// OutputUploaded notifies that an output file was uploaded.
func (j *JobConnection) OutputUploaded(taskID int64, fileName string) {
if j.conn == nil {
return
}
j.writeMu.Lock()
defer j.writeMu.Unlock()
msg := map[string]interface{}{
"type": "output_uploaded",
"data": map[string]interface{}{
"task_id": taskID,
"file_name": fileName,
},
"timestamp": time.Now().Unix(),
}
if err := j.conn.WriteJSON(msg); err != nil {
log.Printf("Failed to send output uploaded, connection may be broken: %v", err)
// Close the connection on write error
j.connMu.Lock()
j.isConnected = false
if j.conn != nil {
j.conn.Close()
j.conn = nil
}
j.connMu.Unlock()
}
}
// Complete sends task completion to the manager.
func (j *JobConnection) Complete(taskID int64, success bool, errorMsg error) {
if j.conn == nil {
log.Printf("Cannot send task complete: WebSocket connection is nil")
return
}
j.writeMu.Lock()
defer j.writeMu.Unlock()
msg := map[string]interface{}{
"type": "task_complete",
"data": map[string]interface{}{
"task_id": taskID,
"success": success,
"error": errorMsg,
},
"timestamp": time.Now().Unix(),
}
if err := j.conn.WriteJSON(msg); err != nil {
log.Printf("Failed to send task complete, connection may be broken: %v", err)
// Close the connection on write error
j.connMu.Lock()
j.isConnected = false
if j.conn != nil {
j.conn.Close()
j.conn = nil
}
j.connMu.Unlock()
}
}

View File

@@ -0,0 +1,421 @@
// Package api provides HTTP and WebSocket communication with the manager server.
package api
import (
"bytes"
"encoding/json"
"fmt"
"io"
"mime/multipart"
"net/http"
"net/url"
"os"
"path/filepath"
"strings"
"time"
"jiggablend/pkg/types"
)
// ManagerClient handles all HTTP communication with the manager server.
type ManagerClient struct {
baseURL string
apiKey string
runnerID int64
httpClient *http.Client // Standard timeout for quick requests
longClient *http.Client // No timeout for large file transfers
}
// NewManagerClient creates a new manager client.
func NewManagerClient(baseURL string) *ManagerClient {
return &ManagerClient{
baseURL: strings.TrimSuffix(baseURL, "/"),
httpClient: &http.Client{Timeout: 30 * time.Second},
longClient: &http.Client{Timeout: 0}, // No timeout for large transfers
}
}
// SetCredentials sets the API key and runner ID after registration.
func (m *ManagerClient) SetCredentials(runnerID int64, apiKey string) {
m.runnerID = runnerID
m.apiKey = apiKey
}
// GetRunnerID returns the registered runner ID.
func (m *ManagerClient) GetRunnerID() int64 {
return m.runnerID
}
// GetAPIKey returns the API key.
func (m *ManagerClient) GetAPIKey() string {
return m.apiKey
}
// GetBaseURL returns the base URL.
func (m *ManagerClient) GetBaseURL() string {
return m.baseURL
}
// Request performs an authenticated HTTP request with standard timeout.
func (m *ManagerClient) Request(method, path string, body []byte) (*http.Response, error) {
return m.doRequest(method, path, body, m.httpClient)
}
// RequestLong performs an authenticated HTTP request with no timeout.
// Use for large file uploads/downloads.
func (m *ManagerClient) RequestLong(method, path string, body []byte) (*http.Response, error) {
return m.doRequest(method, path, body, m.longClient)
}
func (m *ManagerClient) doRequest(method, path string, body []byte, client *http.Client) (*http.Response, error) {
if m.apiKey == "" {
return nil, fmt.Errorf("not authenticated")
}
fullURL := m.baseURL + path
req, err := http.NewRequest(method, fullURL, bytes.NewReader(body))
if err != nil {
return nil, err
}
req.Header.Set("Authorization", "Bearer "+m.apiKey)
if len(body) > 0 {
req.Header.Set("Content-Type", "application/json")
}
return client.Do(req)
}
// RequestWithToken performs an authenticated HTTP request using a specific token.
func (m *ManagerClient) RequestWithToken(method, path, token string, body []byte) (*http.Response, error) {
return m.doRequestWithToken(method, path, token, body, m.httpClient)
}
// RequestLongWithToken performs a long-running request with a specific token.
func (m *ManagerClient) RequestLongWithToken(method, path, token string, body []byte) (*http.Response, error) {
return m.doRequestWithToken(method, path, token, body, m.longClient)
}
func (m *ManagerClient) doRequestWithToken(method, path, token string, body []byte, client *http.Client) (*http.Response, error) {
fullURL := m.baseURL + path
req, err := http.NewRequest(method, fullURL, bytes.NewReader(body))
if err != nil {
return nil, err
}
req.Header.Set("Authorization", "Bearer "+token)
if len(body) > 0 {
req.Header.Set("Content-Type", "application/json")
}
return client.Do(req)
}
// RegisterRequest is the request body for runner registration.
type RegisterRequest struct {
Name string `json:"name"`
Hostname string `json:"hostname"`
Capabilities string `json:"capabilities"`
APIKey string `json:"api_key"`
Fingerprint string `json:"fingerprint,omitempty"`
}
// RegisterResponse is the response from runner registration.
type RegisterResponse struct {
ID int64 `json:"id"`
}
// Register registers the runner with the manager.
func (m *ManagerClient) Register(name, hostname string, capabilities map[string]interface{}, registrationToken, fingerprint string) (int64, error) {
capsJSON, err := json.Marshal(capabilities)
if err != nil {
return 0, fmt.Errorf("failed to marshal capabilities: %w", err)
}
reqBody := RegisterRequest{
Name: name,
Hostname: hostname,
Capabilities: string(capsJSON),
APIKey: registrationToken,
}
// Only send fingerprint for non-fixed API keys
if !strings.HasPrefix(registrationToken, "jk_r0_") {
reqBody.Fingerprint = fingerprint
}
body, _ := json.Marshal(reqBody)
resp, err := m.httpClient.Post(
m.baseURL+"/api/runner/register",
"application/json",
bytes.NewReader(body),
)
if err != nil {
return 0, fmt.Errorf("connection error: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusCreated {
bodyBytes, _ := io.ReadAll(resp.Body)
errorBody := string(bodyBytes)
// Check for token-related errors (should not retry)
if resp.StatusCode == http.StatusUnauthorized || resp.StatusCode == http.StatusBadRequest {
errorLower := strings.ToLower(errorBody)
if strings.Contains(errorLower, "invalid") ||
strings.Contains(errorLower, "expired") ||
strings.Contains(errorLower, "already used") ||
strings.Contains(errorLower, "token") {
return 0, fmt.Errorf("token error: %s", errorBody)
}
}
return 0, fmt.Errorf("registration failed (status %d): %s", resp.StatusCode, errorBody)
}
var result RegisterResponse
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return 0, fmt.Errorf("failed to decode response: %w", err)
}
m.runnerID = result.ID
m.apiKey = registrationToken
return result.ID, nil
}
// NextJobResponse represents the response from the next-job endpoint.
type NextJobResponse struct {
JobToken string `json:"job_token"`
JobPath string `json:"job_path"`
Task NextJobTaskInfo `json:"task"`
}
// NextJobTaskInfo contains task information from the next-job response.
type NextJobTaskInfo struct {
TaskID int64 `json:"task_id"`
JobID int64 `json:"job_id"`
JobName string `json:"job_name"`
Frame int `json:"frame"`
TaskType string `json:"task_type"`
Metadata *types.BlendMetadata `json:"metadata,omitempty"`
}
// PollNextJob polls the manager for the next available job.
// Returns nil, nil if no job is available.
func (m *ManagerClient) PollNextJob() (*NextJobResponse, error) {
if m.runnerID == 0 || m.apiKey == "" {
return nil, fmt.Errorf("runner not authenticated")
}
path := fmt.Sprintf("/api/runner/workers/%d/next-job", m.runnerID)
resp, err := m.Request("GET", path, nil)
if err != nil {
return nil, fmt.Errorf("failed to poll for job: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode == http.StatusNoContent {
return nil, nil // No job available
}
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("unexpected status %d: %s", resp.StatusCode, string(body))
}
var job NextJobResponse
if err := json.NewDecoder(resp.Body).Decode(&job); err != nil {
return nil, fmt.Errorf("failed to decode job response: %w", err)
}
return &job, nil
}
// DownloadContext downloads the job context tar file.
func (m *ManagerClient) DownloadContext(contextPath, jobToken string) (io.ReadCloser, error) {
resp, err := m.RequestLongWithToken("GET", contextPath, jobToken, nil)
if err != nil {
return nil, fmt.Errorf("failed to download context: %w", err)
}
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
resp.Body.Close()
return nil, fmt.Errorf("context download failed with status %d: %s", resp.StatusCode, string(body))
}
return resp.Body, nil
}
// UploadFile uploads a file to the manager.
func (m *ManagerClient) UploadFile(uploadPath, jobToken, filePath string) error {
file, err := os.Open(filePath)
if err != nil {
return fmt.Errorf("failed to open file: %w", err)
}
defer file.Close()
// Create multipart form
body := &bytes.Buffer{}
writer := multipart.NewWriter(body)
part, err := writer.CreateFormFile("file", filepath.Base(filePath))
if err != nil {
return fmt.Errorf("failed to create form file: %w", err)
}
if _, err := io.Copy(part, file); err != nil {
return fmt.Errorf("failed to copy file to form: %w", err)
}
writer.Close()
fullURL := m.baseURL + uploadPath
req, err := http.NewRequest("POST", fullURL, body)
if err != nil {
return err
}
req.Header.Set("Authorization", "Bearer "+jobToken)
req.Header.Set("Content-Type", writer.FormDataContentType())
resp, err := m.longClient.Do(req)
if err != nil {
return fmt.Errorf("failed to upload file: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusOK {
respBody, _ := io.ReadAll(resp.Body)
return fmt.Errorf("upload failed with status %d: %s", resp.StatusCode, string(respBody))
}
return nil
}
// GetJobMetadata retrieves job metadata from the manager.
func (m *ManagerClient) GetJobMetadata(jobID int64) (*types.BlendMetadata, error) {
path := fmt.Sprintf("/api/runner/jobs/%d/metadata?runner_id=%d", jobID, m.runnerID)
resp, err := m.Request("GET", path, nil)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode == http.StatusNotFound {
return nil, nil // No metadata found
}
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("failed to get job metadata: %s", string(body))
}
var metadata types.BlendMetadata
if err := json.NewDecoder(resp.Body).Decode(&metadata); err != nil {
return nil, err
}
return &metadata, nil
}
// JobFile represents a file associated with a job.
type JobFile struct {
ID int64 `json:"id"`
JobID int64 `json:"job_id"`
FileType string `json:"file_type"`
FilePath string `json:"file_path"`
FileName string `json:"file_name"`
FileSize int64 `json:"file_size"`
}
// GetJobFiles retrieves the list of files for a job.
func (m *ManagerClient) GetJobFiles(jobID int64) ([]JobFile, error) {
path := fmt.Sprintf("/api/runner/jobs/%d/files?runner_id=%d", jobID, m.runnerID)
resp, err := m.Request("GET", path, nil)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("failed to get job files: %s", string(body))
}
var files []JobFile
if err := json.NewDecoder(resp.Body).Decode(&files); err != nil {
return nil, err
}
return files, nil
}
// DownloadFrame downloads a frame file from the manager.
func (m *ManagerClient) DownloadFrame(jobID int64, fileName, destPath string) error {
encodedFileName := url.PathEscape(fileName)
path := fmt.Sprintf("/api/runner/files/%d/%s?runner_id=%d", jobID, encodedFileName, m.runnerID)
resp, err := m.RequestLong("GET", path, nil)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return fmt.Errorf("download failed: %s", string(body))
}
file, err := os.Create(destPath)
if err != nil {
return err
}
defer file.Close()
_, err = io.Copy(file, resp.Body)
return err
}
// SubmitMetadata submits extracted metadata to the manager.
func (m *ManagerClient) SubmitMetadata(jobID int64, metadata types.BlendMetadata) error {
metadataJSON, err := json.Marshal(metadata)
if err != nil {
return fmt.Errorf("failed to marshal metadata: %w", err)
}
path := fmt.Sprintf("/api/runner/jobs/%d/metadata?runner_id=%d", jobID, m.runnerID)
fullURL := m.baseURL + path
req, err := http.NewRequest("POST", fullURL, bytes.NewReader(metadataJSON))
if err != nil {
return fmt.Errorf("failed to create request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+m.apiKey)
resp, err := m.httpClient.Do(req)
if err != nil {
return fmt.Errorf("failed to submit metadata: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return fmt.Errorf("metadata submission failed: %s", string(body))
}
return nil
}
// DownloadBlender downloads a Blender version from the manager.
func (m *ManagerClient) DownloadBlender(version string) (io.ReadCloser, error) {
path := fmt.Sprintf("/api/runner/blender/download?version=%s&runner_id=%d", version, m.runnerID)
resp, err := m.RequestLong("GET", path, nil)
if err != nil {
return nil, fmt.Errorf("failed to download blender from manager: %w", err)
}
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
resp.Body.Close()
return nil, fmt.Errorf("failed to download blender: status %d, body: %s", resp.StatusCode, string(body))
}
return resp.Body, nil
}

View File

@@ -0,0 +1,87 @@
// Package blender handles Blender binary management and execution.
package blender
import (
"fmt"
"log"
"os"
"path/filepath"
"jiggablend/internal/runner/api"
"jiggablend/internal/runner/workspace"
)
// Manager handles Blender binary downloads and management.
type Manager struct {
manager *api.ManagerClient
workspaceDir string
}
// NewManager creates a new Blender manager.
func NewManager(managerClient *api.ManagerClient, workspaceDir string) *Manager {
return &Manager{
manager: managerClient,
workspaceDir: workspaceDir,
}
}
// GetBinaryPath returns the path to the Blender binary for a specific version.
// Downloads from manager and extracts if not already present.
func (m *Manager) GetBinaryPath(version string) (string, error) {
blenderDir := filepath.Join(m.workspaceDir, "blender-versions")
if err := os.MkdirAll(blenderDir, 0755); err != nil {
return "", fmt.Errorf("failed to create blender directory: %w", err)
}
// Check if already installed - look for version folder first
versionDir := filepath.Join(blenderDir, version)
binaryPath := filepath.Join(versionDir, "blender")
// Check if version folder exists and contains the binary
if versionInfo, err := os.Stat(versionDir); err == nil && versionInfo.IsDir() {
// Version folder exists, check if binary is present
if binaryInfo, err := os.Stat(binaryPath); err == nil {
// Verify it's actually a file (not a directory)
if !binaryInfo.IsDir() {
log.Printf("Found existing Blender %s installation at %s", version, binaryPath)
return binaryPath, nil
}
}
// Version folder exists but binary is missing - might be incomplete installation
log.Printf("Version folder %s exists but binary not found, will re-download", versionDir)
}
// Download from manager
log.Printf("Downloading Blender %s from manager", version)
reader, err := m.manager.DownloadBlender(version)
if err != nil {
return "", err
}
defer reader.Close()
// Manager serves pre-decompressed .tar files - extract directly
log.Printf("Extracting Blender %s...", version)
if err := workspace.ExtractTarStripPrefix(reader, versionDir); err != nil {
return "", fmt.Errorf("failed to extract blender: %w", err)
}
// Verify binary exists
if _, err := os.Stat(binaryPath); err != nil {
return "", fmt.Errorf("blender binary not found after extraction")
}
log.Printf("Blender %s installed at %s", version, binaryPath)
return binaryPath, nil
}
// GetBinaryForJob returns the Blender binary path for a job.
// Uses the version from metadata or falls back to system blender.
func (m *Manager) GetBinaryForJob(version string) (string, error) {
if version == "" {
return "blender", nil // System blender
}
return m.GetBinaryPath(version)
}

View File

@@ -0,0 +1,100 @@
package blender
import (
"regexp"
"strings"
"jiggablend/pkg/types"
)
// FilterLog checks if a Blender log line should be filtered or downgraded.
// Returns (shouldFilter, logLevel) - if shouldFilter is true, the log should be skipped.
func FilterLog(line string) (shouldFilter bool, logLevel types.LogLevel) {
trimmed := strings.TrimSpace(line)
// Filter out empty lines
if trimmed == "" {
return true, types.LogLevelInfo
}
// Filter out separator lines
if trimmed == "--------------------------------------------------------------------" ||
(strings.HasPrefix(trimmed, "-----") && strings.Contains(trimmed, "----")) {
return true, types.LogLevelInfo
}
// Filter out trace headers
upperLine := strings.ToUpper(trimmed)
upperOriginal := strings.ToUpper(line)
if trimmed == "Trace:" ||
trimmed == "Depth Type Name" ||
trimmed == "----- ---- ----" ||
line == "Depth Type Name" ||
line == "----- ---- ----" ||
(strings.Contains(upperLine, "DEPTH") && strings.Contains(upperLine, "TYPE") && strings.Contains(upperLine, "NAME")) ||
(strings.Contains(upperOriginal, "DEPTH") && strings.Contains(upperOriginal, "TYPE") && strings.Contains(upperOriginal, "NAME")) ||
strings.Contains(line, "Depth Type Name") ||
strings.Contains(line, "----- ---- ----") ||
strings.HasPrefix(trimmed, "-----") ||
regexp.MustCompile(`^[-]+\s+[-]+\s+[-]+$`).MatchString(trimmed) {
return true, types.LogLevelInfo
}
// Completely filter out dependency graph messages (they're just noise)
dependencyGraphPatterns := []string{
"Failed to add relation",
"Could not find op_from",
"OperationKey",
"find_node_operation: Failed for",
"BONE_DONE",
"component name:",
"operation code:",
"rope_ctrl_rot_",
}
for _, pattern := range dependencyGraphPatterns {
if strings.Contains(line, pattern) {
return true, types.LogLevelInfo
}
}
// Filter out animation system warnings (invalid drivers are common and harmless)
animationSystemPatterns := []string{
"BKE_animsys_eval_driver: invalid driver",
"bke.anim_sys",
"rotation_quaternion[",
"constraints[",
".influence[0]",
"pose.bones[",
}
for _, pattern := range animationSystemPatterns {
if strings.Contains(line, pattern) {
return true, types.LogLevelInfo
}
}
// Filter out modifier warnings (common when vertices change)
modifierPatterns := []string{
"BKE_modifier_set_error",
"bke.modifier",
"Vertices changed from",
"Modifier:",
}
for _, pattern := range modifierPatterns {
if strings.Contains(line, pattern) {
return true, types.LogLevelInfo
}
}
// Filter out lines that are just numbers or trace depth indicators
// Pattern: number, word, word (e.g., "1 Object timer_box_franck")
if matched, _ := regexp.MatchString(`^\d+\s+\w+\s+\w+`, trimmed); matched {
return true, types.LogLevelInfo
}
return false, types.LogLevelInfo
}

View File

@@ -0,0 +1,143 @@
package blender
import (
"compress/gzip"
"fmt"
"io"
"os"
"os/exec"
)
// ParseVersionFromFile parses the Blender version that a .blend file was saved with.
// Returns major and minor version numbers.
func ParseVersionFromFile(blendPath string) (major, minor int, err error) {
file, err := os.Open(blendPath)
if err != nil {
return 0, 0, fmt.Errorf("failed to open blend file: %w", err)
}
defer file.Close()
// Read the first 12 bytes of the blend file header
// Format: BLENDER-v<major><minor><patch> or BLENDER_v<major><minor><patch>
// The header is: "BLENDER" (7 bytes) + pointer size (1 byte: '-' for 64-bit, '_' for 32-bit)
// + endianness (1 byte: 'v' for little-endian, 'V' for big-endian)
// + version (3 bytes: e.g., "402" for 4.02)
header := make([]byte, 12)
n, err := file.Read(header)
if err != nil || n < 12 {
return 0, 0, fmt.Errorf("failed to read blend file header: %w", err)
}
// Check for BLENDER magic
if string(header[:7]) != "BLENDER" {
// Might be compressed - try to decompress
file.Seek(0, 0)
return parseCompressedVersion(file)
}
// Parse version from bytes 9-11 (3 digits)
versionStr := string(header[9:12])
// Version format changed in Blender 3.0
// Pre-3.0: "279" = 2.79, "280" = 2.80
// 3.0+: "300" = 3.0, "402" = 4.02, "410" = 4.10
if len(versionStr) == 3 {
// First digit is major version
fmt.Sscanf(string(versionStr[0]), "%d", &major)
// Next two digits are minor version
fmt.Sscanf(versionStr[1:3], "%d", &minor)
}
return major, minor, nil
}
// parseCompressedVersion handles gzip and zstd compressed blend files.
func parseCompressedVersion(file *os.File) (major, minor int, err error) {
magic := make([]byte, 4)
if _, err := file.Read(magic); err != nil {
return 0, 0, err
}
file.Seek(0, 0)
if magic[0] == 0x1f && magic[1] == 0x8b {
// gzip compressed
gzReader, err := gzip.NewReader(file)
if err != nil {
return 0, 0, fmt.Errorf("failed to create gzip reader: %w", err)
}
defer gzReader.Close()
header := make([]byte, 12)
n, err := gzReader.Read(header)
if err != nil || n < 12 {
return 0, 0, fmt.Errorf("failed to read compressed blend header: %w", err)
}
if string(header[:7]) != "BLENDER" {
return 0, 0, fmt.Errorf("invalid blend file format")
}
versionStr := string(header[9:12])
if len(versionStr) == 3 {
fmt.Sscanf(string(versionStr[0]), "%d", &major)
fmt.Sscanf(versionStr[1:3], "%d", &minor)
}
return major, minor, nil
}
// Check for zstd magic (Blender 3.0+): 0x28 0xB5 0x2F 0xFD
if magic[0] == 0x28 && magic[1] == 0xb5 && magic[2] == 0x2f && magic[3] == 0xfd {
return parseZstdVersion(file)
}
return 0, 0, fmt.Errorf("unknown blend file format")
}
// parseZstdVersion handles zstd-compressed blend files (Blender 3.0+).
// Uses zstd command line tool since Go doesn't have native zstd support.
func parseZstdVersion(file *os.File) (major, minor int, err error) {
file.Seek(0, 0)
cmd := exec.Command("zstd", "-d", "-c")
cmd.Stdin = file
stdout, err := cmd.StdoutPipe()
if err != nil {
return 0, 0, fmt.Errorf("failed to create zstd stdout pipe: %w", err)
}
if err := cmd.Start(); err != nil {
return 0, 0, fmt.Errorf("failed to start zstd decompression: %w", err)
}
// Read just the header (12 bytes)
header := make([]byte, 12)
n, readErr := io.ReadFull(stdout, header)
// Kill the process early - we only need the header
cmd.Process.Kill()
cmd.Wait()
if readErr != nil || n < 12 {
return 0, 0, fmt.Errorf("failed to read zstd compressed blend header: %v", readErr)
}
if string(header[:7]) != "BLENDER" {
return 0, 0, fmt.Errorf("invalid blend file format in zstd archive")
}
versionStr := string(header[9:12])
if len(versionStr) == 3 {
fmt.Sscanf(string(versionStr[0]), "%d", &major)
fmt.Sscanf(versionStr[1:3], "%d", &minor)
}
return major, minor, nil
}
// VersionString returns a formatted version string like "4.2".
func VersionString(major, minor int) string {
return fmt.Sprintf("%d.%d", major, minor)
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,71 @@
// Package encoding handles video encoding with software encoders.
package encoding
import (
"os/exec"
)
// Encoder represents a video encoder.
type Encoder interface {
Name() string
Codec() string
Available() bool
BuildCommand(config *EncodeConfig) *exec.Cmd
}
// EncodeConfig holds configuration for video encoding.
type EncodeConfig struct {
InputPattern string // Input file pattern (e.g., "frame_%04d.exr")
OutputPath string // Output file path
StartFrame int // Starting frame number
FrameRate float64 // Frame rate
WorkDir string // Working directory
UseAlpha bool // Whether to preserve alpha channel
TwoPass bool // Whether to use 2-pass encoding
SourceFormat string // Source format: "exr" or "png" (defaults to "exr")
PreserveHDR bool // Whether to preserve HDR range for EXR (uses HLG with bt709 primaries)
}
// Selector selects the software encoder.
type Selector struct {
h264Encoders []Encoder
av1Encoders []Encoder
vp9Encoders []Encoder
}
// NewSelector creates a new encoder selector with software encoders.
func NewSelector() *Selector {
s := &Selector{}
s.detectEncoders()
return s
}
func (s *Selector) detectEncoders() {
// Use software encoding only - reliable and avoids hardware-specific colorspace issues
s.h264Encoders = []Encoder{
&SoftwareEncoder{codec: "libx264"},
}
s.av1Encoders = []Encoder{
&SoftwareEncoder{codec: "libaom-av1"},
}
s.vp9Encoders = []Encoder{
&SoftwareEncoder{codec: "libvpx-vp9"},
}
}
// SelectH264 returns the software H.264 encoder.
func (s *Selector) SelectH264() Encoder {
return &SoftwareEncoder{codec: "libx264"}
}
// SelectAV1 returns the software AV1 encoder.
func (s *Selector) SelectAV1() Encoder {
return &SoftwareEncoder{codec: "libaom-av1"}
}
// SelectVP9 returns the software VP9 encoder.
func (s *Selector) SelectVP9() Encoder {
return &SoftwareEncoder{codec: "libvpx-vp9"}
}

View File

@@ -0,0 +1,270 @@
package encoding
import (
"fmt"
"log"
"os/exec"
"strconv"
"strings"
)
const (
// CRFH264 is the Constant Rate Factor for H.264 encoding (lower = higher quality, range 0-51)
CRFH264 = 15
// CRFAV1 is the Constant Rate Factor for AV1 encoding (lower = higher quality, range 0-63)
CRFAV1 = 30
// CRFVP9 is the Constant Rate Factor for VP9 encoding (lower = higher quality, range 0-63)
CRFVP9 = 30
)
// tonemapFilter returns the appropriate filter for EXR input.
// For HDR preservation: converts linear RGB (EXR) to bt2020 YUV with HLG transfer function
// Uses zscale to properly convert colorspace from linear RGB to bt2020 YUV while preserving HDR range
// Step 1: Ensure format is gbrpf32le (linear RGB)
// Step 2: Convert transfer function from linear to HLG (arib-std-b67) with bt2020 primaries/matrix
// Step 3: Convert to YUV format
func tonemapFilter(useAlpha bool) string {
// Convert from linear RGB (gbrpf32le) to HLG with bt709 primaries to match PNG appearance
// Based on best practices: convert linear RGB directly to HLG with bt709 primaries
// This matches PNG color appearance (bt709 primaries) while preserving HDR range (HLG transfer)
// zscale uses numeric values:
// primaries: 1=bt709 (matches PNG), 9=bt2020
// matrix: 1=bt709, 9=bt2020nc, 0=gbr (RGB input)
// transfer: 8=linear, 18=arib-std-b67 (HLG)
// Direct conversion: linear RGB -> HLG with bt709 primaries -> bt2020 YUV (for wider gamut metadata)
// The bt709 primaries in the conversion match PNG, but we set bt2020 in metadata for HDR displays
// Convert linear RGB to sRGB first, then convert to HLG
// This approach: linear -> sRGB -> HLG -> bt2020
// Fixes red tint by using sRGB conversion, preserves HDR range with HLG
filter := "format=gbrpf32le,zscale=transferin=8:transfer=13:primariesin=1:primaries=1:matrixin=0:matrix=1:rangein=full:range=full,zscale=transferin=13:transfer=18:primariesin=1:primaries=9:matrixin=1:matrix=9:rangein=full:range=full"
if useAlpha {
return filter + ",format=yuva420p10le"
}
return filter + ",format=yuv420p10le"
}
// SoftwareEncoder implements software encoding (libx264, libaom-av1, libvpx-vp9).
type SoftwareEncoder struct {
codec string
}
func (e *SoftwareEncoder) Name() string { return "software" }
func (e *SoftwareEncoder) Codec() string { return e.codec }
func (e *SoftwareEncoder) Available() bool {
return true // Software encoding is always available
}
func (e *SoftwareEncoder) BuildCommand(config *EncodeConfig) *exec.Cmd {
// Use HDR pixel formats for EXR, SDR for PNG
var pixFmt string
var colorPrimaries, colorTrc, colorspace string
if config.SourceFormat == "png" {
// PNG: SDR format
pixFmt = "yuv420p"
if config.UseAlpha {
pixFmt = "yuva420p"
}
colorPrimaries = "bt709"
colorTrc = "bt709"
colorspace = "bt709"
} else {
// EXR: Use HDR encoding if PreserveHDR is true, otherwise SDR (like PNG)
if config.PreserveHDR {
// HDR: Use HLG transfer with bt709 primaries to preserve HDR range while matching PNG color
pixFmt = "yuv420p10le" // 10-bit to preserve HDR range
if config.UseAlpha {
pixFmt = "yuva420p10le"
}
colorPrimaries = "bt709" // bt709 primaries to match PNG color appearance
colorTrc = "arib-std-b67" // HLG transfer function - preserves HDR range, works on SDR displays
colorspace = "bt709" // bt709 colorspace to match PNG
} else {
// SDR: Treat as SDR (like PNG) - encode as bt709
pixFmt = "yuv420p"
if config.UseAlpha {
pixFmt = "yuva420p"
}
colorPrimaries = "bt709"
colorTrc = "bt709"
colorspace = "bt709"
}
}
var codecArgs []string
switch e.codec {
case "libaom-av1":
codecArgs = []string{"-crf", strconv.Itoa(CRFAV1), "-b:v", "0", "-tiles", "2x2", "-g", "240"}
case "libvpx-vp9":
// VP9 supports alpha and HDR, use good quality settings
codecArgs = []string{"-crf", strconv.Itoa(CRFVP9), "-b:v", "0", "-row-mt", "1", "-g", "240"}
default:
// H.264: Use High 10 profile for HDR EXR (10-bit), High profile for SDR
if config.SourceFormat != "png" && config.PreserveHDR {
codecArgs = []string{"-preset", "veryslow", "-crf", strconv.Itoa(CRFH264), "-profile:v", "high10", "-level", "5.2", "-tune", "film", "-keyint_min", "24", "-g", "240", "-bf", "2", "-refs", "4"}
} else {
codecArgs = []string{"-preset", "veryslow", "-crf", strconv.Itoa(CRFH264), "-profile:v", "high", "-level", "5.2", "-tune", "film", "-keyint_min", "24", "-g", "240", "-bf", "2", "-refs", "4"}
}
}
args := []string{
"-y",
"-f", "image2",
"-start_number", fmt.Sprintf("%d", config.StartFrame),
"-framerate", fmt.Sprintf("%.2f", config.FrameRate),
"-i", config.InputPattern,
"-c:v", e.codec,
"-pix_fmt", pixFmt,
"-r", fmt.Sprintf("%.2f", config.FrameRate),
"-color_primaries", colorPrimaries,
"-color_trc", colorTrc,
"-colorspace", colorspace,
"-color_range", "tv",
}
// Add video filter for EXR: convert linear RGB based on HDR setting
// PNG doesn't need any filter as it's already in sRGB
if config.SourceFormat != "png" {
var vf string
if config.PreserveHDR {
// HDR: Convert linear RGB -> sRGB -> HLG with bt709 primaries
// This preserves HDR range while matching PNG color appearance
vf = "format=gbrpf32le,zscale=transferin=8:transfer=13:primariesin=1:primaries=1:matrixin=0:matrix=1:rangein=full:range=full,zscale=transferin=13:transfer=18:primariesin=1:primaries=1:matrixin=1:matrix=1:rangein=full:range=full"
if config.UseAlpha {
vf += ",format=yuva420p10le"
} else {
vf += ",format=yuv420p10le"
}
} else {
// SDR: Convert linear RGB (EXR) to sRGB (bt709) - simple conversion like Krita does
// zscale: linear (8) -> sRGB (13) with bt709 primaries/matrix
vf = "format=gbrpf32le,zscale=transferin=8:transfer=13:primariesin=1:primaries=1:matrixin=0:matrix=1:rangein=full:range=full"
if config.UseAlpha {
vf += ",format=yuva420p"
} else {
vf += ",format=yuv420p"
}
}
args = append(args, "-vf", vf)
}
args = append(args, codecArgs...)
if config.TwoPass {
// For 2-pass, this builds pass 2 command
args = append(args, "-pass", "2")
}
args = append(args, config.OutputPath)
if config.TwoPass {
log.Printf("Build Software Pass 2 command: ffmpeg %s", strings.Join(args, " "))
} else {
log.Printf("Build Software command: ffmpeg %s", strings.Join(args, " "))
}
cmd := exec.Command("ffmpeg", args...)
cmd.Dir = config.WorkDir
return cmd
}
// BuildPass1Command builds the first pass command for 2-pass encoding.
func (e *SoftwareEncoder) BuildPass1Command(config *EncodeConfig) *exec.Cmd {
// Use HDR pixel formats for EXR, SDR for PNG
var pixFmt string
var colorPrimaries, colorTrc, colorspace string
if config.SourceFormat == "png" {
// PNG: SDR format
pixFmt = "yuv420p"
if config.UseAlpha {
pixFmt = "yuva420p"
}
colorPrimaries = "bt709"
colorTrc = "bt709"
colorspace = "bt709"
} else {
// EXR: Use HDR encoding if PreserveHDR is true, otherwise SDR (like PNG)
if config.PreserveHDR {
// HDR: Use HLG transfer with bt709 primaries to preserve HDR range while matching PNG color
pixFmt = "yuv420p10le" // 10-bit to preserve HDR range
if config.UseAlpha {
pixFmt = "yuva420p10le"
}
colorPrimaries = "bt709" // bt709 primaries to match PNG color appearance
colorTrc = "arib-std-b67" // HLG transfer function - preserves HDR range, works on SDR displays
colorspace = "bt709" // bt709 colorspace to match PNG
} else {
// SDR: Treat as SDR (like PNG) - encode as bt709
pixFmt = "yuv420p"
if config.UseAlpha {
pixFmt = "yuva420p"
}
colorPrimaries = "bt709"
colorTrc = "bt709"
colorspace = "bt709"
}
}
var codecArgs []string
switch e.codec {
case "libaom-av1":
codecArgs = []string{"-crf", strconv.Itoa(CRFAV1), "-b:v", "0", "-tiles", "2x2", "-g", "240"}
case "libvpx-vp9":
// VP9 supports alpha and HDR, use good quality settings
codecArgs = []string{"-crf", strconv.Itoa(CRFVP9), "-b:v", "0", "-row-mt", "1", "-g", "240"}
default:
// H.264: Use High 10 profile for HDR EXR (10-bit), High profile for SDR
if config.SourceFormat != "png" && config.PreserveHDR {
codecArgs = []string{"-preset", "veryslow", "-crf", strconv.Itoa(CRFH264), "-profile:v", "high10", "-level", "5.2", "-tune", "film", "-keyint_min", "24", "-g", "240", "-bf", "2", "-refs", "4"}
} else {
codecArgs = []string{"-preset", "veryslow", "-crf", strconv.Itoa(CRFH264), "-profile:v", "high", "-level", "5.2", "-tune", "film", "-keyint_min", "24", "-g", "240", "-bf", "2", "-refs", "4"}
}
}
args := []string{
"-y",
"-f", "image2",
"-start_number", fmt.Sprintf("%d", config.StartFrame),
"-framerate", fmt.Sprintf("%.2f", config.FrameRate),
"-i", config.InputPattern,
"-c:v", e.codec,
"-pix_fmt", pixFmt,
"-r", fmt.Sprintf("%.2f", config.FrameRate),
"-color_primaries", colorPrimaries,
"-color_trc", colorTrc,
"-colorspace", colorspace,
"-color_range", "tv",
}
// Add video filter for EXR: convert linear RGB based on HDR setting
// PNG doesn't need any filter as it's already in sRGB
if config.SourceFormat != "png" {
var vf string
if config.PreserveHDR {
// HDR: Convert linear RGB -> sRGB -> HLG with bt709 primaries
// This preserves HDR range while matching PNG color appearance
vf = "format=gbrpf32le,zscale=transferin=8:transfer=13:primariesin=1:primaries=1:matrixin=0:matrix=1:rangein=full:range=full,zscale=transferin=13:transfer=18:primariesin=1:primaries=1:matrixin=1:matrix=1:rangein=full:range=full"
if config.UseAlpha {
vf += ",format=yuva420p10le"
} else {
vf += ",format=yuv420p10le"
}
} else {
// SDR: Convert linear RGB (EXR) to sRGB (bt709) - simple conversion like Krita does
// zscale: linear (8) -> sRGB (13) with bt709 primaries/matrix
vf = "format=gbrpf32le,zscale=transferin=8:transfer=13:primariesin=1:primaries=1:matrixin=0:matrix=1:rangein=full:range=full"
if config.UseAlpha {
vf += ",format=yuva420p"
} else {
vf += ",format=yuv420p"
}
}
args = append(args, "-vf", vf)
}
args = append(args, codecArgs...)
args = append(args, "-pass", "1", "-f", "null", "/dev/null")
log.Printf("Build Software Pass 1 command: ffmpeg %s", strings.Join(args, " "))
cmd := exec.Command("ffmpeg", args...)
cmd.Dir = config.WorkDir
return cmd
}

View File

@@ -0,0 +1,980 @@
package encoding
import (
"os"
"os/exec"
"path/filepath"
"strings"
"testing"
)
func TestSoftwareEncoder_BuildCommand_H264_EXR(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libx264"}
config := &EncodeConfig{
InputPattern: "frame_%04d.exr",
OutputPath: "output.mp4",
StartFrame: 1,
FrameRate: 24.0,
WorkDir: "/tmp",
UseAlpha: false,
TwoPass: true,
SourceFormat: "exr",
}
cmd := encoder.BuildCommand(config)
if cmd == nil {
t.Fatal("BuildCommand returned nil")
}
if !strings.Contains(cmd.Path, "ffmpeg") {
t.Errorf("Expected command path to contain 'ffmpeg', got '%s'", cmd.Path)
}
if cmd.Dir != "/tmp" {
t.Errorf("Expected work dir '/tmp', got '%s'", cmd.Dir)
}
args := cmd.Args[1:] // Skip "ffmpeg"
argsStr := strings.Join(args, " ")
// Check required arguments
checks := []struct {
name string
expected string
}{
{"-y flag", "-y"},
{"image2 format", "-f image2"},
{"start number", "-start_number 1"},
{"framerate", "-framerate 24.00"},
{"input pattern", "-i frame_%04d.exr"},
{"codec", "-c:v libx264"},
{"pixel format", "-pix_fmt yuv420p"}, // EXR now treated as SDR (like PNG)
{"frame rate", "-r 24.00"},
{"color primaries", "-color_primaries bt709"}, // EXR now uses bt709 (SDR)
{"color trc", "-color_trc bt709"}, // EXR now uses bt709 (SDR)
{"colorspace", "-colorspace bt709"},
{"color range", "-color_range tv"},
{"video filter", "-vf"},
{"preset", "-preset veryslow"},
{"crf", "-crf 15"},
{"profile", "-profile:v high"}, // EXR now uses high profile (SDR)
{"pass 2", "-pass 2"},
{"output path", "output.mp4"},
}
for _, check := range checks {
if !strings.Contains(argsStr, check.expected) {
t.Errorf("Missing expected argument: %s", check.expected)
}
}
// Verify filter is present for EXR (linear RGB to sRGB conversion, like Krita does)
if !strings.Contains(argsStr, "format=gbrpf32le") {
t.Error("Expected format conversion filter for EXR source, but not found")
}
if !strings.Contains(argsStr, "zscale=transferin=8:transfer=13") {
t.Error("Expected linear to sRGB conversion for EXR source, but not found")
}
}
func TestSoftwareEncoder_BuildCommand_H264_PNG(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libx264"}
config := &EncodeConfig{
InputPattern: "frame_%04d.png",
OutputPath: "output.mp4",
StartFrame: 1,
FrameRate: 24.0,
WorkDir: "/tmp",
UseAlpha: false,
TwoPass: true,
SourceFormat: "png",
}
cmd := encoder.BuildCommand(config)
args := cmd.Args[1:]
argsStr := strings.Join(args, " ")
// PNG should NOT have video filter
if strings.Contains(argsStr, "-vf") {
t.Error("PNG source should not have video filter, but -vf was found")
}
// Should still have all other required args
if !strings.Contains(argsStr, "-c:v libx264") {
t.Error("Missing codec argument")
}
}
func TestSoftwareEncoder_BuildCommand_AV1_WithAlpha(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libaom-av1"}
config := &EncodeConfig{
InputPattern: "frame_%04d.exr",
OutputPath: "output.mp4",
StartFrame: 100,
FrameRate: 30.0,
WorkDir: "/tmp",
UseAlpha: true,
TwoPass: true,
SourceFormat: "exr",
}
cmd := encoder.BuildCommand(config)
args := cmd.Args[1:]
argsStr := strings.Join(args, " ")
// Check alpha-specific settings
if !strings.Contains(argsStr, "-pix_fmt yuva420p") {
t.Error("Expected yuva420p pixel format for alpha, but not found")
}
// Check AV1-specific arguments
av1Checks := []string{
"-c:v libaom-av1",
"-crf 30",
"-b:v 0",
"-tiles 2x2",
"-g 240",
}
for _, check := range av1Checks {
if !strings.Contains(argsStr, check) {
t.Errorf("Missing AV1 argument: %s", check)
}
}
// Check tonemap filter includes alpha format
if !strings.Contains(argsStr, "format=yuva420p") {
t.Error("Expected tonemap filter to output yuva420p for alpha, but not found")
}
}
func TestSoftwareEncoder_BuildCommand_VP9(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libvpx-vp9"}
config := &EncodeConfig{
InputPattern: "frame_%04d.exr",
OutputPath: "output.webm",
StartFrame: 1,
FrameRate: 24.0,
WorkDir: "/tmp",
UseAlpha: true,
TwoPass: true,
SourceFormat: "exr",
}
cmd := encoder.BuildCommand(config)
args := cmd.Args[1:]
argsStr := strings.Join(args, " ")
// Check VP9-specific arguments
vp9Checks := []string{
"-c:v libvpx-vp9",
"-crf 30",
"-b:v 0",
"-row-mt 1",
"-g 240",
}
for _, check := range vp9Checks {
if !strings.Contains(argsStr, check) {
t.Errorf("Missing VP9 argument: %s", check)
}
}
}
func TestSoftwareEncoder_BuildPass1Command(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libx264"}
config := &EncodeConfig{
InputPattern: "frame_%04d.exr",
OutputPath: "output.mp4",
StartFrame: 1,
FrameRate: 24.0,
WorkDir: "/tmp",
UseAlpha: false,
TwoPass: true,
SourceFormat: "exr",
}
cmd := encoder.BuildPass1Command(config)
args := cmd.Args[1:]
argsStr := strings.Join(args, " ")
// Pass 1 should have -pass 1 and output to null
if !strings.Contains(argsStr, "-pass 1") {
t.Error("Pass 1 command should include '-pass 1'")
}
if !strings.Contains(argsStr, "-f null") {
t.Error("Pass 1 command should include '-f null'")
}
if !strings.Contains(argsStr, "/dev/null") {
t.Error("Pass 1 command should output to /dev/null")
}
// Should NOT have output path
if strings.Contains(argsStr, "output.mp4") {
t.Error("Pass 1 command should not include output path")
}
}
func TestSoftwareEncoder_BuildPass1Command_AV1(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libaom-av1"}
config := &EncodeConfig{
InputPattern: "frame_%04d.exr",
OutputPath: "output.mp4",
StartFrame: 1,
FrameRate: 24.0,
WorkDir: "/tmp",
UseAlpha: false,
TwoPass: true,
SourceFormat: "exr",
}
cmd := encoder.BuildPass1Command(config)
args := cmd.Args[1:]
argsStr := strings.Join(args, " ")
// Pass 1 should have -pass 1 and output to null
if !strings.Contains(argsStr, "-pass 1") {
t.Error("Pass 1 command should include '-pass 1'")
}
if !strings.Contains(argsStr, "-f null") {
t.Error("Pass 1 command should include '-f null'")
}
if !strings.Contains(argsStr, "/dev/null") {
t.Error("Pass 1 command should output to /dev/null")
}
// Check AV1-specific arguments in pass 1
av1Checks := []string{
"-c:v libaom-av1",
"-crf 30",
"-b:v 0",
"-tiles 2x2",
"-g 240",
}
for _, check := range av1Checks {
if !strings.Contains(argsStr, check) {
t.Errorf("Missing AV1 argument in pass 1: %s", check)
}
}
}
func TestSoftwareEncoder_BuildPass1Command_VP9(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libvpx-vp9"}
config := &EncodeConfig{
InputPattern: "frame_%04d.exr",
OutputPath: "output.webm",
StartFrame: 1,
FrameRate: 24.0,
WorkDir: "/tmp",
UseAlpha: false,
TwoPass: true,
SourceFormat: "exr",
}
cmd := encoder.BuildPass1Command(config)
args := cmd.Args[1:]
argsStr := strings.Join(args, " ")
// Pass 1 should have -pass 1 and output to null
if !strings.Contains(argsStr, "-pass 1") {
t.Error("Pass 1 command should include '-pass 1'")
}
if !strings.Contains(argsStr, "-f null") {
t.Error("Pass 1 command should include '-f null'")
}
if !strings.Contains(argsStr, "/dev/null") {
t.Error("Pass 1 command should output to /dev/null")
}
// Check VP9-specific arguments in pass 1
vp9Checks := []string{
"-c:v libvpx-vp9",
"-crf 30",
"-b:v 0",
"-row-mt 1",
"-g 240",
}
for _, check := range vp9Checks {
if !strings.Contains(argsStr, check) {
t.Errorf("Missing VP9 argument in pass 1: %s", check)
}
}
}
func TestSoftwareEncoder_BuildCommand_NoTwoPass(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libx264"}
config := &EncodeConfig{
InputPattern: "frame_%04d.exr",
OutputPath: "output.mp4",
StartFrame: 1,
FrameRate: 24.0,
WorkDir: "/tmp",
UseAlpha: false,
TwoPass: false,
SourceFormat: "exr",
}
cmd := encoder.BuildCommand(config)
args := cmd.Args[1:]
argsStr := strings.Join(args, " ")
// Should NOT have -pass flag when TwoPass is false
if strings.Contains(argsStr, "-pass") {
t.Error("Command should not include -pass flag when TwoPass is false")
}
}
func TestSelector_SelectH264(t *testing.T) {
selector := NewSelector()
encoder := selector.SelectH264()
if encoder == nil {
t.Fatal("SelectH264 returned nil")
}
if encoder.Codec() != "libx264" {
t.Errorf("Expected codec 'libx264', got '%s'", encoder.Codec())
}
if encoder.Name() != "software" {
t.Errorf("Expected name 'software', got '%s'", encoder.Name())
}
}
func TestSelector_SelectAV1(t *testing.T) {
selector := NewSelector()
encoder := selector.SelectAV1()
if encoder == nil {
t.Fatal("SelectAV1 returned nil")
}
if encoder.Codec() != "libaom-av1" {
t.Errorf("Expected codec 'libaom-av1', got '%s'", encoder.Codec())
}
}
func TestSelector_SelectVP9(t *testing.T) {
selector := NewSelector()
encoder := selector.SelectVP9()
if encoder == nil {
t.Fatal("SelectVP9 returned nil")
}
if encoder.Codec() != "libvpx-vp9" {
t.Errorf("Expected codec 'libvpx-vp9', got '%s'", encoder.Codec())
}
}
func TestTonemapFilter_WithAlpha(t *testing.T) {
filter := tonemapFilter(true)
// Filter should convert from gbrpf32le to yuva420p10le with proper colorspace conversion
if !strings.Contains(filter, "yuva420p10le") {
t.Error("Tonemap filter with alpha should output yuva420p10le format for HDR")
}
if !strings.Contains(filter, "gbrpf32le") {
t.Error("Tonemap filter should start with gbrpf32le format")
}
// Should use zscale for colorspace conversion from linear RGB to bt2020 YUV
if !strings.Contains(filter, "zscale") {
t.Error("Tonemap filter should use zscale for colorspace conversion")
}
// Check for HLG transfer function (numeric value 18 or string arib-std-b67)
if !strings.Contains(filter, "transfer=18") && !strings.Contains(filter, "transfer=arib-std-b67") {
t.Error("Tonemap filter should use HLG transfer function (18 or arib-std-b67)")
}
}
func TestTonemapFilter_WithoutAlpha(t *testing.T) {
filter := tonemapFilter(false)
// Filter should convert from gbrpf32le to yuv420p10le with proper colorspace conversion
if !strings.Contains(filter, "yuv420p10le") {
t.Error("Tonemap filter without alpha should output yuv420p10le format for HDR")
}
if strings.Contains(filter, "yuva420p") {
t.Error("Tonemap filter without alpha should not output yuva420p format")
}
if !strings.Contains(filter, "gbrpf32le") {
t.Error("Tonemap filter should start with gbrpf32le format")
}
// Should use zscale for colorspace conversion from linear RGB to bt2020 YUV
if !strings.Contains(filter, "zscale") {
t.Error("Tonemap filter should use zscale for colorspace conversion")
}
// Check for HLG transfer function (numeric value 18 or string arib-std-b67)
if !strings.Contains(filter, "transfer=18") && !strings.Contains(filter, "transfer=arib-std-b67") {
t.Error("Tonemap filter should use HLG transfer function (18 or arib-std-b67)")
}
}
func TestSoftwareEncoder_Available(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libx264"}
if !encoder.Available() {
t.Error("Software encoder should always be available")
}
}
func TestEncodeConfig_DefaultSourceFormat(t *testing.T) {
config := &EncodeConfig{
InputPattern: "frame_%04d.exr",
OutputPath: "output.mp4",
StartFrame: 1,
FrameRate: 24.0,
WorkDir: "/tmp",
UseAlpha: false,
TwoPass: false,
// SourceFormat not set, should default to empty string (treated as exr)
}
encoder := &SoftwareEncoder{codec: "libx264"}
cmd := encoder.BuildCommand(config)
args := strings.Join(cmd.Args[1:], " ")
// Should still have tonemap filter when SourceFormat is empty (defaults to exr behavior)
if !strings.Contains(args, "-vf") {
t.Error("Empty SourceFormat should default to EXR behavior with tonemap filter")
}
}
func TestCommandOrder(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libx264"}
config := &EncodeConfig{
InputPattern: "frame_%04d.exr",
OutputPath: "output.mp4",
StartFrame: 1,
FrameRate: 24.0,
WorkDir: "/tmp",
UseAlpha: false,
TwoPass: true,
SourceFormat: "exr",
}
cmd := encoder.BuildCommand(config)
args := cmd.Args[1:]
// Verify argument order: input should come before codec
inputIdx := -1
codecIdx := -1
vfIdx := -1
for i, arg := range args {
if arg == "-i" && i+1 < len(args) && args[i+1] == "frame_%04d.exr" {
inputIdx = i
}
if arg == "-c:v" && i+1 < len(args) && args[i+1] == "libx264" {
codecIdx = i
}
if arg == "-vf" {
vfIdx = i
}
}
if inputIdx == -1 {
t.Fatal("Input pattern not found in command")
}
if codecIdx == -1 {
t.Fatal("Codec not found in command")
}
if vfIdx == -1 {
t.Fatal("Video filter not found in command")
}
// Input should come before codec
if inputIdx >= codecIdx {
t.Error("Input pattern should come before codec in command")
}
// Video filter should come after input (order: input -> codec -> colorspace -> filter -> codec args)
// In practice, the filter comes after codec and colorspace metadata but before codec-specific args
if vfIdx <= inputIdx {
t.Error("Video filter should come after input")
}
}
func TestCommand_ColorspaceMetadata(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libx264"}
config := &EncodeConfig{
InputPattern: "frame_%04d.exr",
OutputPath: "output.mp4",
StartFrame: 1,
FrameRate: 24.0,
WorkDir: "/tmp",
UseAlpha: false,
TwoPass: false,
SourceFormat: "exr",
PreserveHDR: false, // SDR encoding
}
cmd := encoder.BuildCommand(config)
args := cmd.Args[1:]
argsStr := strings.Join(args, " ")
// Verify all SDR colorspace metadata is present for EXR (SDR encoding)
colorspaceArgs := []string{
"-color_primaries bt709", // EXR uses bt709 (SDR)
"-color_trc bt709", // EXR uses bt709 (SDR)
"-colorspace bt709",
"-color_range tv",
}
for _, arg := range colorspaceArgs {
if !strings.Contains(argsStr, arg) {
t.Errorf("Missing colorspace metadata: %s", arg)
}
}
// Verify SDR pixel format
if !strings.Contains(argsStr, "-pix_fmt yuv420p") {
t.Error("SDR encoding should use yuv420p pixel format")
}
// Verify H.264 high profile (not high10)
if !strings.Contains(argsStr, "-profile:v high") {
t.Error("SDR encoding should use high profile")
}
if strings.Contains(argsStr, "-profile:v high10") {
t.Error("SDR encoding should not use high10 profile")
}
}
func TestCommand_HDR_ColorspaceMetadata(t *testing.T) {
encoder := &SoftwareEncoder{codec: "libx264"}
config := &EncodeConfig{
InputPattern: "frame_%04d.exr",
OutputPath: "output.mp4",
StartFrame: 1,
FrameRate: 24.0,
WorkDir: "/tmp",
UseAlpha: false,
TwoPass: false,
SourceFormat: "exr",
PreserveHDR: true, // HDR encoding
}
cmd := encoder.BuildCommand(config)
args := cmd.Args[1:]
argsStr := strings.Join(args, " ")
// Verify all HDR colorspace metadata is present for EXR (HDR encoding)
colorspaceArgs := []string{
"-color_primaries bt709", // bt709 primaries to match PNG color appearance
"-color_trc arib-std-b67", // HLG transfer function for HDR/SDR compatibility
"-colorspace bt709", // bt709 colorspace to match PNG
"-color_range tv",
}
for _, arg := range colorspaceArgs {
if !strings.Contains(argsStr, arg) {
t.Errorf("Missing HDR colorspace metadata: %s", arg)
}
}
// Verify HDR pixel format (10-bit)
if !strings.Contains(argsStr, "-pix_fmt yuv420p10le") {
t.Error("HDR encoding should use yuv420p10le pixel format")
}
// Verify H.264 high10 profile (for 10-bit)
if !strings.Contains(argsStr, "-profile:v high10") {
t.Error("HDR encoding should use high10 profile")
}
// Verify HDR filter chain (linear -> sRGB -> HLG)
if !strings.Contains(argsStr, "-vf") {
t.Fatal("HDR encoding should have video filter")
}
vfIdx := -1
for i, arg := range args {
if arg == "-vf" && i+1 < len(args) {
vfIdx = i + 1
break
}
}
if vfIdx == -1 {
t.Fatal("Video filter not found")
}
filter := args[vfIdx]
if !strings.Contains(filter, "transfer=18") {
t.Error("HDR filter should convert to HLG (transfer=18)")
}
if !strings.Contains(filter, "yuv420p10le") {
t.Error("HDR filter should output yuv420p10le format")
}
}
// Integration tests using example files
func TestIntegration_Encode_EXR_H264(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Check if example file exists
exampleDir := filepath.Join("..", "..", "..", "examples")
exrFile := filepath.Join(exampleDir, "frame_0800.exr")
if _, err := os.Stat(exrFile); os.IsNotExist(err) {
t.Skipf("Example file not found: %s", exrFile)
}
// Get absolute paths
workspaceRoot, err := filepath.Abs(filepath.Join("..", "..", ".."))
if err != nil {
t.Fatalf("Failed to get workspace root: %v", err)
}
exampleDirAbs, err := filepath.Abs(exampleDir)
if err != nil {
t.Fatalf("Failed to get example directory: %v", err)
}
tmpDir := filepath.Join(workspaceRoot, "tmp")
if err := os.MkdirAll(tmpDir, 0755); err != nil {
t.Fatalf("Failed to create tmp directory: %v", err)
}
encoder := &SoftwareEncoder{codec: "libx264"}
config := &EncodeConfig{
InputPattern: filepath.Join(exampleDirAbs, "frame_%04d.exr"),
OutputPath: filepath.Join(tmpDir, "test_exr_h264.mp4"),
StartFrame: 800,
FrameRate: 24.0,
WorkDir: tmpDir,
UseAlpha: false,
TwoPass: false, // Use single pass for faster testing
SourceFormat: "exr",
}
// Build and run command
cmd := encoder.BuildCommand(config)
if cmd == nil {
t.Fatal("BuildCommand returned nil")
}
// Capture stderr to see what went wrong
output, err := cmd.CombinedOutput()
if err != nil {
t.Errorf("FFmpeg command failed: %v\nCommand output: %s", err, string(output))
return
}
// Verify output file was created
if _, err := os.Stat(config.OutputPath); os.IsNotExist(err) {
t.Errorf("Output file was not created: %s\nCommand output: %s", config.OutputPath, string(output))
} else {
t.Logf("Successfully created output file: %s", config.OutputPath)
// Verify file has content
info, _ := os.Stat(config.OutputPath)
if info.Size() == 0 {
t.Errorf("Output file was created but is empty\nCommand output: %s", string(output))
} else {
t.Logf("Output file size: %d bytes", info.Size())
}
}
}
func TestIntegration_Encode_PNG_H264(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Check if example file exists
exampleDir := filepath.Join("..", "..", "..", "examples")
pngFile := filepath.Join(exampleDir, "frame_0800.png")
if _, err := os.Stat(pngFile); os.IsNotExist(err) {
t.Skipf("Example file not found: %s", pngFile)
}
// Get absolute paths
workspaceRoot, err := filepath.Abs(filepath.Join("..", "..", ".."))
if err != nil {
t.Fatalf("Failed to get workspace root: %v", err)
}
exampleDirAbs, err := filepath.Abs(exampleDir)
if err != nil {
t.Fatalf("Failed to get example directory: %v", err)
}
tmpDir := filepath.Join(workspaceRoot, "tmp")
if err := os.MkdirAll(tmpDir, 0755); err != nil {
t.Fatalf("Failed to create tmp directory: %v", err)
}
encoder := &SoftwareEncoder{codec: "libx264"}
config := &EncodeConfig{
InputPattern: filepath.Join(exampleDirAbs, "frame_%04d.png"),
OutputPath: filepath.Join(tmpDir, "test_png_h264.mp4"),
StartFrame: 800,
FrameRate: 24.0,
WorkDir: tmpDir,
UseAlpha: false,
TwoPass: false, // Use single pass for faster testing
SourceFormat: "png",
}
// Build and run command
cmd := encoder.BuildCommand(config)
if cmd == nil {
t.Fatal("BuildCommand returned nil")
}
// Verify no video filter is used for PNG
argsStr := strings.Join(cmd.Args, " ")
if strings.Contains(argsStr, "-vf") {
t.Error("PNG encoding should not use video filter, but -vf was found in command")
}
// Run the command
cmdOutput, err := cmd.CombinedOutput()
if err != nil {
t.Errorf("FFmpeg command failed: %v\nCommand output: %s", err, string(cmdOutput))
return
}
// Verify output file was created
if _, err := os.Stat(config.OutputPath); os.IsNotExist(err) {
t.Errorf("Output file was not created: %s\nCommand output: %s", config.OutputPath, string(cmdOutput))
} else {
t.Logf("Successfully created output file: %s", config.OutputPath)
info, _ := os.Stat(config.OutputPath)
if info.Size() == 0 {
t.Error("Output file was created but is empty")
} else {
t.Logf("Output file size: %d bytes", info.Size())
}
}
}
func TestIntegration_Encode_EXR_VP9(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Check if example file exists
exampleDir := filepath.Join("..", "..", "..", "examples")
exrFile := filepath.Join(exampleDir, "frame_0800.exr")
if _, err := os.Stat(exrFile); os.IsNotExist(err) {
t.Skipf("Example file not found: %s", exrFile)
}
// Check if VP9 encoder is available
checkCmd := exec.Command("ffmpeg", "-hide_banner", "-encoders")
checkOutput, err := checkCmd.CombinedOutput()
if err != nil || !strings.Contains(string(checkOutput), "libvpx-vp9") {
t.Skip("VP9 encoder (libvpx-vp9) not available in ffmpeg")
}
// Get absolute paths
workspaceRoot, err := filepath.Abs(filepath.Join("..", "..", ".."))
if err != nil {
t.Fatalf("Failed to get workspace root: %v", err)
}
exampleDirAbs, err := filepath.Abs(exampleDir)
if err != nil {
t.Fatalf("Failed to get example directory: %v", err)
}
tmpDir := filepath.Join(workspaceRoot, "tmp")
if err := os.MkdirAll(tmpDir, 0755); err != nil {
t.Fatalf("Failed to create tmp directory: %v", err)
}
encoder := &SoftwareEncoder{codec: "libvpx-vp9"}
config := &EncodeConfig{
InputPattern: filepath.Join(exampleDirAbs, "frame_%04d.exr"),
OutputPath: filepath.Join(tmpDir, "test_exr_vp9.webm"),
StartFrame: 800,
FrameRate: 24.0,
WorkDir: tmpDir,
UseAlpha: false,
TwoPass: false, // Use single pass for faster testing
SourceFormat: "exr",
}
// Build and run command
cmd := encoder.BuildCommand(config)
if cmd == nil {
t.Fatal("BuildCommand returned nil")
}
// Capture stderr to see what went wrong
output, err := cmd.CombinedOutput()
if err != nil {
t.Errorf("FFmpeg command failed: %v\nCommand output: %s", err, string(output))
return
}
// Verify output file was created
if _, err := os.Stat(config.OutputPath); os.IsNotExist(err) {
t.Errorf("Output file was not created: %s\nCommand output: %s", config.OutputPath, string(output))
} else {
t.Logf("Successfully created output file: %s", config.OutputPath)
// Verify file has content
info, _ := os.Stat(config.OutputPath)
if info.Size() == 0 {
t.Errorf("Output file was created but is empty\nCommand output: %s", string(output))
} else {
t.Logf("Output file size: %d bytes", info.Size())
}
}
}
func TestIntegration_Encode_EXR_AV1(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Check if example file exists
exampleDir := filepath.Join("..", "..", "..", "examples")
exrFile := filepath.Join(exampleDir, "frame_0800.exr")
if _, err := os.Stat(exrFile); os.IsNotExist(err) {
t.Skipf("Example file not found: %s", exrFile)
}
// Check if AV1 encoder is available
checkCmd := exec.Command("ffmpeg", "-hide_banner", "-encoders")
output, err := checkCmd.CombinedOutput()
if err != nil || !strings.Contains(string(output), "libaom-av1") {
t.Skip("AV1 encoder (libaom-av1) not available in ffmpeg")
}
// Get absolute paths
workspaceRoot, err := filepath.Abs(filepath.Join("..", "..", ".."))
if err != nil {
t.Fatalf("Failed to get workspace root: %v", err)
}
exampleDirAbs, err := filepath.Abs(exampleDir)
if err != nil {
t.Fatalf("Failed to get example directory: %v", err)
}
tmpDir := filepath.Join(workspaceRoot, "tmp")
if err := os.MkdirAll(tmpDir, 0755); err != nil {
t.Fatalf("Failed to create tmp directory: %v", err)
}
encoder := &SoftwareEncoder{codec: "libaom-av1"}
config := &EncodeConfig{
InputPattern: filepath.Join(exampleDirAbs, "frame_%04d.exr"),
OutputPath: filepath.Join(tmpDir, "test_exr_av1.mp4"),
StartFrame: 800,
FrameRate: 24.0,
WorkDir: tmpDir,
UseAlpha: false,
TwoPass: false,
SourceFormat: "exr",
}
// Build and run command
cmd := encoder.BuildCommand(config)
cmdOutput, err := cmd.CombinedOutput()
if err != nil {
t.Errorf("FFmpeg command failed: %v\nCommand output: %s", err, string(cmdOutput))
return
}
// Verify output file was created
if _, err := os.Stat(config.OutputPath); os.IsNotExist(err) {
t.Errorf("Output file was not created: %s\nCommand output: %s", config.OutputPath, string(cmdOutput))
} else {
t.Logf("Successfully created AV1 output file: %s", config.OutputPath)
info, _ := os.Stat(config.OutputPath)
if info.Size() == 0 {
t.Errorf("Output file was created but is empty\nCommand output: %s", string(cmdOutput))
} else {
t.Logf("Output file size: %d bytes", info.Size())
}
}
}
func TestIntegration_Encode_EXR_VP9_WithAlpha(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Check if example file exists
exampleDir := filepath.Join("..", "..", "..", "examples")
exrFile := filepath.Join(exampleDir, "frame_0800.exr")
if _, err := os.Stat(exrFile); os.IsNotExist(err) {
t.Skipf("Example file not found: %s", exrFile)
}
// Check if VP9 encoder is available
checkCmd := exec.Command("ffmpeg", "-hide_banner", "-encoders")
output, err := checkCmd.CombinedOutput()
if err != nil || !strings.Contains(string(output), "libvpx-vp9") {
t.Skip("VP9 encoder (libvpx-vp9) not available in ffmpeg")
}
// Get absolute paths
workspaceRoot, err := filepath.Abs(filepath.Join("..", "..", ".."))
if err != nil {
t.Fatalf("Failed to get workspace root: %v", err)
}
exampleDirAbs, err := filepath.Abs(exampleDir)
if err != nil {
t.Fatalf("Failed to get example directory: %v", err)
}
tmpDir := filepath.Join(workspaceRoot, "tmp")
if err := os.MkdirAll(tmpDir, 0755); err != nil {
t.Fatalf("Failed to create tmp directory: %v", err)
}
encoder := &SoftwareEncoder{codec: "libvpx-vp9"}
config := &EncodeConfig{
InputPattern: filepath.Join(exampleDirAbs, "frame_%04d.exr"),
OutputPath: filepath.Join(tmpDir, "test_exr_vp9_alpha.webm"),
StartFrame: 800,
FrameRate: 24.0,
WorkDir: tmpDir,
UseAlpha: true, // Test with alpha
TwoPass: false, // Use single pass for faster testing
SourceFormat: "exr",
}
// Build and run command
cmd := encoder.BuildCommand(config)
if cmd == nil {
t.Fatal("BuildCommand returned nil")
}
// Capture stderr to see what went wrong
cmdOutput, err := cmd.CombinedOutput()
if err != nil {
t.Errorf("FFmpeg command failed: %v\nCommand output: %s", err, string(cmdOutput))
return
}
// Verify output file was created
if _, err := os.Stat(config.OutputPath); os.IsNotExist(err) {
t.Errorf("Output file was not created: %s\nCommand output: %s", config.OutputPath, string(cmdOutput))
} else {
t.Logf("Successfully created VP9 output file with alpha: %s", config.OutputPath)
info, _ := os.Stat(config.OutputPath)
if info.Size() == 0 {
t.Errorf("Output file was created but is empty\nCommand output: %s", string(cmdOutput))
} else {
t.Logf("Output file size: %d bytes", info.Size())
}
}
}
// Helper function to copy files
func copyFile(src, dst string) error {
data, err := os.ReadFile(src)
if err != nil {
return err
}
return os.WriteFile(dst, data, 0644)
}

361
internal/runner/runner.go Normal file
View File

@@ -0,0 +1,361 @@
// Package runner provides the Jiggablend render runner.
package runner
import (
"crypto/sha256"
"encoding/hex"
"fmt"
"log"
"net"
"os"
"os/exec"
"strings"
"sync"
"time"
"jiggablend/internal/runner/api"
"jiggablend/internal/runner/blender"
"jiggablend/internal/runner/encoding"
"jiggablend/internal/runner/tasks"
"jiggablend/internal/runner/workspace"
"jiggablend/pkg/executils"
"jiggablend/pkg/types"
)
// Runner is the main render runner.
type Runner struct {
id int64
name string
hostname string
manager *api.ManagerClient
workspace *workspace.Manager
blender *blender.Manager
encoder *encoding.Selector
processes *executils.ProcessTracker
processors map[string]tasks.Processor
stopChan chan struct{}
fingerprint string
fingerprintMu sync.RWMutex
}
// New creates a new runner.
func New(managerURL, name, hostname string) *Runner {
manager := api.NewManagerClient(managerURL)
r := &Runner{
name: name,
hostname: hostname,
manager: manager,
processes: executils.NewProcessTracker(),
stopChan: make(chan struct{}),
processors: make(map[string]tasks.Processor),
}
// Generate fingerprint
r.generateFingerprint()
return r
}
// CheckRequiredTools verifies that required external tools are available.
func (r *Runner) CheckRequiredTools() error {
if err := exec.Command("zstd", "--version").Run(); err != nil {
return fmt.Errorf("zstd not found - required for compressed blend file support. Install with: apt install zstd")
}
log.Printf("Found zstd for compressed blend file support")
if err := exec.Command("xvfb-run", "--help").Run(); err != nil {
return fmt.Errorf("xvfb-run not found - required for headless Blender rendering. Install with: apt install xvfb")
}
log.Printf("Found xvfb-run for headless rendering without -b option")
return nil
}
var cachedCapabilities map[string]interface{} = nil
// ProbeCapabilities detects hardware capabilities.
func (r *Runner) ProbeCapabilities() map[string]interface{} {
if cachedCapabilities != nil {
return cachedCapabilities
}
caps := make(map[string]interface{})
// Check for ffmpeg and probe encoding capabilities
if err := exec.Command("ffmpeg", "-version").Run(); err == nil {
caps["ffmpeg"] = true
} else {
caps["ffmpeg"] = false
}
cachedCapabilities = caps
return caps
}
// Register registers the runner with the manager.
func (r *Runner) Register(apiKey string) (int64, error) {
caps := r.ProbeCapabilities()
id, err := r.manager.Register(r.name, r.hostname, caps, apiKey, r.GetFingerprint())
if err != nil {
return 0, err
}
r.id = id
// Initialize workspace after registration
r.workspace = workspace.NewManager(r.name)
// Initialize blender manager
r.blender = blender.NewManager(r.manager, r.workspace.BaseDir())
// Initialize encoder selector
r.encoder = encoding.NewSelector()
// Register task processors
r.processors["render"] = tasks.NewRenderProcessor()
r.processors["encode"] = tasks.NewEncodeProcessor()
return id, nil
}
// Start starts the job polling loop.
func (r *Runner) Start(pollInterval time.Duration) {
log.Printf("Starting job polling loop (interval: %v)", pollInterval)
for {
select {
case <-r.stopChan:
log.Printf("Stopping job polling loop")
return
default:
}
log.Printf("Polling for next job (runner ID: %d)", r.id)
job, err := r.manager.PollNextJob()
if err != nil {
log.Printf("Error polling for job: %v", err)
time.Sleep(pollInterval)
continue
}
if job == nil {
log.Printf("No job available, sleeping for %v", pollInterval)
time.Sleep(pollInterval)
continue
}
log.Printf("Received job assignment: task=%d, job=%d, type=%s",
job.Task.TaskID, job.Task.JobID, job.Task.TaskType)
if err := r.executeJob(job); err != nil {
log.Printf("Error processing job: %v", err)
}
}
}
// Stop stops the runner.
func (r *Runner) Stop() {
close(r.stopChan)
}
// KillAllProcesses kills all running processes.
func (r *Runner) KillAllProcesses() {
log.Printf("Killing all running processes...")
killedCount := r.processes.KillAll()
// Release all allocated devices
if r.encoder != nil {
// Device pool cleanup is handled internally
}
log.Printf("Killed %d process(es)", killedCount)
}
// Cleanup removes the workspace directory.
func (r *Runner) Cleanup() {
if r.workspace != nil {
r.workspace.Cleanup()
}
}
// executeJob handles a job using per-job WebSocket connection.
func (r *Runner) executeJob(job *api.NextJobResponse) (err error) {
// Recover from panics to prevent runner process crashes during task execution
defer func() {
if rec := recover(); rec != nil {
log.Printf("Task execution panicked: %v", rec)
err = fmt.Errorf("task execution panicked: %v", rec)
}
}()
// Connect to job WebSocket (no runnerID needed - authentication handles it)
jobConn := api.NewJobConnection()
if err := jobConn.Connect(r.manager.GetBaseURL(), job.JobPath, job.JobToken); err != nil {
return fmt.Errorf("failed to connect job WebSocket: %w", err)
}
defer jobConn.Close()
log.Printf("Job WebSocket authenticated for task %d", job.Task.TaskID)
// Create task context
workDir := r.workspace.JobDir(job.Task.JobID)
ctx := tasks.NewContext(
job.Task.TaskID,
job.Task.JobID,
job.Task.JobName,
job.Task.Frame,
job.Task.TaskType,
workDir,
job.JobToken,
job.Task.Metadata,
r.manager,
jobConn,
r.workspace,
r.blender,
r.encoder,
r.processes,
)
ctx.Info(fmt.Sprintf("Task assignment received (job: %d, type: %s)",
job.Task.JobID, job.Task.TaskType))
// Get processor for task type
processor, ok := r.processors[job.Task.TaskType]
if !ok {
return fmt.Errorf("unknown task type: %s", job.Task.TaskType)
}
// Process the task
var processErr error
switch job.Task.TaskType {
case "render": // this task has a upload outputs step because the frames are not uploaded by the render task directly we have to do it manually here TODO: maybe we should make it work like the encode task
// Download context
contextPath := job.JobPath + "/context.tar"
if err := r.downloadContext(job.Task.JobID, contextPath, job.JobToken); err != nil {
jobConn.Log(job.Task.TaskID, types.LogLevelError, fmt.Sprintf("Failed to download context: %v", err))
jobConn.Complete(job.Task.TaskID, false, fmt.Errorf("failed to download context: %v", err))
return fmt.Errorf("failed to download context: %w", err)
}
processErr = processor.Process(ctx)
if processErr == nil {
processErr = r.uploadOutputs(ctx, job)
}
case "encode": // this task doesn't have a upload outputs step because the video is already uploaded by the encode task
processErr = processor.Process(ctx)
default:
return fmt.Errorf("unknown task type: %s", job.Task.TaskType)
}
if processErr != nil {
ctx.Error(fmt.Sprintf("Task failed: %v", processErr))
ctx.Complete(false, processErr)
return processErr
}
ctx.Complete(true, nil)
return nil
}
func (r *Runner) downloadContext(jobID int64, contextPath, jobToken string) error {
reader, err := r.manager.DownloadContext(contextPath, jobToken)
if err != nil {
return err
}
defer reader.Close()
jobDir := r.workspace.JobDir(jobID)
return workspace.ExtractTar(reader, jobDir)
}
func (r *Runner) uploadOutputs(ctx *tasks.Context, job *api.NextJobResponse) error {
outputDir := ctx.WorkDir + "/output"
uploadPath := fmt.Sprintf("/api/runner/jobs/%d/upload", job.Task.JobID)
entries, err := os.ReadDir(outputDir)
if err != nil {
return fmt.Errorf("failed to read output directory: %w", err)
}
for _, entry := range entries {
if entry.IsDir() {
continue
}
filePath := outputDir + "/" + entry.Name()
if err := r.manager.UploadFile(uploadPath, job.JobToken, filePath); err != nil {
log.Printf("Failed to upload %s: %v", filePath, err)
} else {
ctx.OutputUploaded(entry.Name())
}
}
return nil
}
// generateFingerprint creates a unique hardware fingerprint.
func (r *Runner) generateFingerprint() {
r.fingerprintMu.Lock()
defer r.fingerprintMu.Unlock()
var components []string
components = append(components, r.hostname)
if machineID, err := os.ReadFile("/etc/machine-id"); err == nil {
components = append(components, strings.TrimSpace(string(machineID)))
}
if productUUID, err := os.ReadFile("/sys/class/dmi/id/product_uuid"); err == nil {
components = append(components, strings.TrimSpace(string(productUUID)))
}
if macAddr, err := r.getMACAddress(); err == nil {
components = append(components, macAddr)
}
if len(components) <= 1 {
components = append(components, fmt.Sprintf("%d", os.Getpid()))
components = append(components, fmt.Sprintf("%d", time.Now().Unix()))
}
h := sha256.New()
for _, comp := range components {
h.Write([]byte(comp))
h.Write([]byte{0})
}
r.fingerprint = hex.EncodeToString(h.Sum(nil))
}
func (r *Runner) getMACAddress() (string, error) {
interfaces, err := net.Interfaces()
if err != nil {
return "", err
}
for _, iface := range interfaces {
if iface.Flags&net.FlagLoopback != 0 || iface.Flags&net.FlagUp == 0 {
continue
}
if len(iface.HardwareAddr) == 0 {
continue
}
return iface.HardwareAddr.String(), nil
}
return "", fmt.Errorf("no suitable network interface found")
}
// GetFingerprint returns the runner's hardware fingerprint.
func (r *Runner) GetFingerprint() string {
r.fingerprintMu.RLock()
defer r.fingerprintMu.RUnlock()
return r.fingerprint
}
// GetID returns the runner ID.
func (r *Runner) GetID() int64 {
return r.id
}

View File

@@ -0,0 +1,588 @@
package tasks
import (
"bufio"
"errors"
"fmt"
"log"
"math"
"os"
"os/exec"
"path/filepath"
"regexp"
"sort"
"strings"
"jiggablend/internal/runner/encoding"
)
// EncodeProcessor handles encode tasks.
type EncodeProcessor struct{}
// NewEncodeProcessor creates a new encode processor.
func NewEncodeProcessor() *EncodeProcessor {
return &EncodeProcessor{}
}
// Process executes an encode task.
func (p *EncodeProcessor) Process(ctx *Context) error {
ctx.Info(fmt.Sprintf("Starting encode task: job %d", ctx.JobID))
log.Printf("Processing encode task %d for job %d", ctx.TaskID, ctx.JobID)
// Create temporary work directory
workDir, err := ctx.Workspace.CreateVideoDir(ctx.JobID)
if err != nil {
return fmt.Errorf("failed to create work directory: %w", err)
}
defer func() {
if err := ctx.Workspace.CleanupVideoDir(ctx.JobID); err != nil {
log.Printf("Warning: Failed to cleanup encode work directory: %v", err)
}
}()
// Get output format and frame rate
outputFormat := ctx.GetOutputFormat()
if outputFormat == "" {
outputFormat = "EXR_264_MP4"
}
frameRate := ctx.GetFrameRate()
ctx.Info(fmt.Sprintf("Encode: detected output format '%s'", outputFormat))
ctx.Info(fmt.Sprintf("Encode: using frame rate %.2f fps", frameRate))
// Get job files
files, err := ctx.Manager.GetJobFiles(ctx.JobID)
if err != nil {
ctx.Error(fmt.Sprintf("Failed to get job files: %v", err))
return fmt.Errorf("failed to get job files: %w", err)
}
ctx.Info(fmt.Sprintf("GetJobFiles returned %d total files for job %d", len(files), ctx.JobID))
// Log all files for debugging
for _, file := range files {
ctx.Info(fmt.Sprintf("File: %s (type: %s, size: %d)", file.FileName, file.FileType, file.FileSize))
}
// Determine source format based on output format
sourceFormat := "exr"
fileExt := ".exr"
// Find and deduplicate frame files (EXR or PNG)
frameFileSet := make(map[string]bool)
var frameFilesList []string
for _, file := range files {
if file.FileType == "output" && strings.HasSuffix(strings.ToLower(file.FileName), fileExt) {
// Deduplicate by filename
if !frameFileSet[file.FileName] {
frameFileSet[file.FileName] = true
frameFilesList = append(frameFilesList, file.FileName)
}
}
}
if len(frameFilesList) == 0 {
// Log why no files matched (deduplicate for error reporting)
outputFileSet := make(map[string]bool)
frameFilesOtherTypeSet := make(map[string]bool)
var outputFiles []string
var frameFilesOtherType []string
for _, file := range files {
if file.FileType == "output" {
if !outputFileSet[file.FileName] {
outputFileSet[file.FileName] = true
outputFiles = append(outputFiles, file.FileName)
}
}
if strings.HasSuffix(strings.ToLower(file.FileName), fileExt) {
key := fmt.Sprintf("%s (type: %s)", file.FileName, file.FileType)
if !frameFilesOtherTypeSet[key] {
frameFilesOtherTypeSet[key] = true
frameFilesOtherType = append(frameFilesOtherType, key)
}
}
}
ctx.Error(fmt.Sprintf("no %s frame files found for encode: found %d total files, %d unique output files, %d unique %s files (with other types)", strings.ToUpper(fileExt[1:]), len(files), len(outputFiles), len(frameFilesOtherType), strings.ToUpper(fileExt[1:])))
if len(outputFiles) > 0 {
ctx.Error(fmt.Sprintf("Output files found: %v", outputFiles))
}
if len(frameFilesOtherType) > 0 {
ctx.Error(fmt.Sprintf("%s files with wrong type: %v", strings.ToUpper(fileExt[1:]), frameFilesOtherType))
}
err := fmt.Errorf("no %s frame files found for encode", strings.ToUpper(fileExt[1:]))
return err
}
ctx.Info(fmt.Sprintf("Found %d %s frames for encode", len(frameFilesList), strings.ToUpper(fileExt[1:])))
// Download frames
ctx.Info(fmt.Sprintf("Downloading %d %s frames for encode...", len(frameFilesList), strings.ToUpper(fileExt[1:])))
var frameFiles []string
for i, fileName := range frameFilesList {
ctx.Info(fmt.Sprintf("Downloading frame %d/%d: %s", i+1, len(frameFilesList), fileName))
framePath := filepath.Join(workDir, fileName)
if err := ctx.Manager.DownloadFrame(ctx.JobID, fileName, framePath); err != nil {
ctx.Error(fmt.Sprintf("Failed to download %s frame %s: %v", strings.ToUpper(fileExt[1:]), fileName, err))
log.Printf("Failed to download %s frame for encode %s: %v", strings.ToUpper(fileExt[1:]), fileName, err)
continue
}
ctx.Info(fmt.Sprintf("Successfully downloaded frame %d/%d: %s", i+1, len(frameFilesList), fileName))
frameFiles = append(frameFiles, framePath)
}
if len(frameFiles) == 0 {
err := fmt.Errorf("failed to download any %s frames for encode", strings.ToUpper(fileExt[1:]))
ctx.Error(err.Error())
return err
}
sort.Strings(frameFiles)
ctx.Info(fmt.Sprintf("Downloaded %d frames", len(frameFiles)))
// Check if EXR files have alpha channel and HDR content (only for EXR source format)
hasAlpha := false
hasHDR := false
if sourceFormat == "exr" {
// Check first frame for alpha channel and HDR using ffprobe
firstFrame := frameFiles[0]
hasAlpha = detectAlphaChannel(ctx, firstFrame)
if hasAlpha {
ctx.Info("Detected alpha channel in EXR files")
} else {
ctx.Info("No alpha channel detected in EXR files")
}
hasHDR = detectHDR(ctx, firstFrame)
if hasHDR {
ctx.Info("Detected HDR content in EXR files")
} else {
ctx.Info("No HDR content detected in EXR files (SDR range)")
}
}
// Generate video
// Use alpha if:
// 1. User explicitly enabled it OR source has alpha channel AND
// 2. Codec supports alpha (AV1 or VP9)
preserveAlpha := ctx.ShouldPreserveAlpha()
useAlpha := (preserveAlpha || hasAlpha) && (outputFormat == "EXR_AV1_MP4" || outputFormat == "EXR_VP9_WEBM")
if (preserveAlpha || hasAlpha) && outputFormat == "EXR_264_MP4" {
ctx.Warn("Alpha channel requested/detected but H.264 does not support alpha. Consider using EXR_AV1_MP4 or EXR_VP9_WEBM to preserve alpha.")
}
if preserveAlpha && !hasAlpha {
ctx.Warn("Alpha preservation requested but no alpha channel detected in EXR files.")
}
if useAlpha {
if preserveAlpha && hasAlpha {
ctx.Info("Alpha preservation enabled: Using alpha channel encoding")
} else if hasAlpha {
ctx.Info("Alpha channel detected - automatically enabling alpha encoding")
}
}
var outputExt string
switch outputFormat {
case "EXR_VP9_WEBM":
outputExt = "webm"
ctx.Info("Encoding WebM video with VP9 codec (with alpha channel and HDR support)...")
case "EXR_AV1_MP4":
outputExt = "mp4"
ctx.Info("Encoding MP4 video with AV1 codec (with alpha channel)...")
default:
outputExt = "mp4"
ctx.Info("Encoding MP4 video with H.264 codec...")
}
outputVideo := filepath.Join(workDir, fmt.Sprintf("output_%d.%s", ctx.JobID, outputExt))
// Build input pattern
firstFrame := frameFiles[0]
baseName := filepath.Base(firstFrame)
re := regexp.MustCompile(`_(\d+)\.`)
var pattern string
var startNumber int
frameNumStr := re.FindStringSubmatch(baseName)
if len(frameNumStr) > 1 {
pattern = re.ReplaceAllString(baseName, "_%04d.")
fmt.Sscanf(frameNumStr[1], "%d", &startNumber)
} else {
startNumber = extractFrameNumber(baseName)
pattern = strings.Replace(baseName, fmt.Sprintf("%d", startNumber), "%04d", 1)
}
patternPath := filepath.Join(workDir, pattern)
// Select encoder and build command (software encoding only)
var encoder encoding.Encoder
switch outputFormat {
case "EXR_AV1_MP4":
encoder = ctx.Encoder.SelectAV1()
case "EXR_VP9_WEBM":
encoder = ctx.Encoder.SelectVP9()
default:
encoder = ctx.Encoder.SelectH264()
}
ctx.Info(fmt.Sprintf("Using encoder: %s (%s)", encoder.Name(), encoder.Codec()))
// All software encoders use 2-pass for optimal quality
ctx.Info("Starting 2-pass encode for optimal quality...")
// Pass 1
ctx.Info("Pass 1/2: Analyzing content for optimal encode...")
softEncoder := encoder.(*encoding.SoftwareEncoder)
// Use HDR if: user explicitly enabled it OR HDR content was detected
preserveHDR := (ctx.ShouldPreserveHDR() || hasHDR) && sourceFormat == "exr"
if hasHDR && !ctx.ShouldPreserveHDR() {
ctx.Info("HDR content detected - automatically enabling HDR preservation")
}
pass1Cmd := softEncoder.BuildPass1Command(&encoding.EncodeConfig{
InputPattern: patternPath,
OutputPath: outputVideo,
StartFrame: startNumber,
FrameRate: frameRate,
WorkDir: workDir,
UseAlpha: useAlpha,
TwoPass: true,
SourceFormat: sourceFormat,
PreserveHDR: preserveHDR,
})
if err := pass1Cmd.Run(); err != nil {
ctx.Warn(fmt.Sprintf("Pass 1 completed (warnings expected): %v", err))
}
// Pass 2
ctx.Info("Pass 2/2: Encoding with optimal quality...")
preserveHDR = (ctx.ShouldPreserveHDR() || hasHDR) && sourceFormat == "exr"
if preserveHDR {
if hasHDR && !ctx.ShouldPreserveHDR() {
ctx.Info("HDR preservation enabled (auto-detected): Using HLG transfer with bt709 primaries")
} else {
ctx.Info("HDR preservation enabled: Using HLG transfer with bt709 primaries")
}
}
config := &encoding.EncodeConfig{
InputPattern: patternPath,
OutputPath: outputVideo,
StartFrame: startNumber,
FrameRate: frameRate,
WorkDir: workDir,
UseAlpha: useAlpha,
TwoPass: true, // Software encoding always uses 2-pass for quality
SourceFormat: sourceFormat,
PreserveHDR: preserveHDR,
}
cmd := encoder.BuildCommand(config)
if cmd == nil {
return errors.New("failed to build encode command")
}
// Set up pipes
stdoutPipe, err := cmd.StdoutPipe()
if err != nil {
return fmt.Errorf("failed to create stdout pipe: %w", err)
}
stderrPipe, err := cmd.StderrPipe()
if err != nil {
return fmt.Errorf("failed to create stderr pipe: %w", err)
}
if err := cmd.Start(); err != nil {
return fmt.Errorf("failed to start encode command: %w", err)
}
ctx.Processes.Track(ctx.TaskID, cmd)
defer ctx.Processes.Untrack(ctx.TaskID)
// Stream stdout
stdoutDone := make(chan bool)
go func() {
defer close(stdoutDone)
scanner := bufio.NewScanner(stdoutPipe)
for scanner.Scan() {
line := scanner.Text()
if line != "" {
ctx.Info(line)
}
}
}()
// Stream stderr
stderrDone := make(chan bool)
go func() {
defer close(stderrDone)
scanner := bufio.NewScanner(stderrPipe)
for scanner.Scan() {
line := scanner.Text()
if line != "" {
ctx.Warn(line)
}
}
}()
err = cmd.Wait()
<-stdoutDone
<-stderrDone
if err != nil {
var errMsg string
if exitErr, ok := err.(*exec.ExitError); ok {
if exitErr.ExitCode() == 137 {
errMsg = "FFmpeg was killed due to excessive memory usage (OOM)"
} else {
errMsg = fmt.Sprintf("ffmpeg encoding failed: %v", err)
}
} else {
errMsg = fmt.Sprintf("ffmpeg encoding failed: %v", err)
}
if sizeErr := checkFFmpegSizeError(errMsg); sizeErr != nil {
ctx.Error(sizeErr.Error())
return sizeErr
}
ctx.Error(errMsg)
return errors.New(errMsg)
}
// Verify output
if _, err := os.Stat(outputVideo); os.IsNotExist(err) {
err := fmt.Errorf("video %s file not created: %s", outputExt, outputVideo)
ctx.Error(err.Error())
return err
}
// Clean up 2-pass log files
os.Remove(filepath.Join(workDir, "ffmpeg2pass-0.log"))
os.Remove(filepath.Join(workDir, "ffmpeg2pass-0.log.mbtree"))
ctx.Info(fmt.Sprintf("%s video encoded successfully", strings.ToUpper(outputExt)))
// Upload video
ctx.Info(fmt.Sprintf("Uploading encoded %s video...", strings.ToUpper(outputExt)))
uploadPath := fmt.Sprintf("/api/runner/jobs/%d/upload", ctx.JobID)
if err := ctx.Manager.UploadFile(uploadPath, ctx.JobToken, outputVideo); err != nil {
ctx.Error(fmt.Sprintf("Failed to upload %s: %v", strings.ToUpper(outputExt), err))
return fmt.Errorf("failed to upload %s: %w", strings.ToUpper(outputExt), err)
}
ctx.Info(fmt.Sprintf("Successfully uploaded %s: %s", strings.ToUpper(outputExt), filepath.Base(outputVideo)))
log.Printf("Successfully generated and uploaded %s for job %d: %s", strings.ToUpper(outputExt), ctx.JobID, filepath.Base(outputVideo))
return nil
}
// detectAlphaChannel checks if an EXR file has an alpha channel using ffprobe
func detectAlphaChannel(ctx *Context, filePath string) bool {
// Use ffprobe to check pixel format and stream properties
// EXR files with alpha will have formats like gbrapf32le (RGBA) vs gbrpf32le (RGB)
cmd := exec.Command("ffprobe",
"-v", "error",
"-select_streams", "v:0",
"-show_entries", "stream=pix_fmt:stream=codec_name",
"-of", "default=noprint_wrappers=1",
filePath,
)
output, err := cmd.Output()
if err != nil {
// If ffprobe fails, assume no alpha (conservative approach)
ctx.Warn(fmt.Sprintf("Failed to detect alpha channel in %s: %v", filepath.Base(filePath), err))
return false
}
outputStr := string(output)
// Check pixel format - EXR with alpha typically has 'a' in the format name (e.g., gbrapf32le)
// Also check for formats that explicitly indicate alpha
hasAlpha := strings.Contains(outputStr, "pix_fmt=gbrap") ||
strings.Contains(outputStr, "pix_fmt=rgba") ||
strings.Contains(outputStr, "pix_fmt=yuva") ||
strings.Contains(outputStr, "pix_fmt=abgr")
if hasAlpha {
ctx.Info(fmt.Sprintf("Detected alpha channel in EXR file: %s", filepath.Base(filePath)))
}
return hasAlpha
}
// detectHDR checks if an EXR file contains HDR content using ffprobe
func detectHDR(ctx *Context, filePath string) bool {
// First, check if the pixel format supports HDR (32-bit float)
cmd := exec.Command("ffprobe",
"-v", "error",
"-select_streams", "v:0",
"-show_entries", "stream=pix_fmt",
"-of", "default=noprint_wrappers=1:nokey=1",
filePath,
)
output, err := cmd.Output()
if err != nil {
// If ffprobe fails, assume no HDR (conservative approach)
ctx.Warn(fmt.Sprintf("Failed to detect HDR in %s: %v", filepath.Base(filePath), err))
return false
}
pixFmt := strings.TrimSpace(string(output))
// EXR files with 32-bit float format (gbrpf32le, gbrapf32le) can contain HDR
// Check if it's a 32-bit float format
isFloat32 := strings.Contains(pixFmt, "f32") || strings.Contains(pixFmt, "f32le")
if !isFloat32 {
// Not a float format, definitely not HDR
return false
}
// For 32-bit float EXR, sample pixels to check if values exceed SDR range (> 1.0)
// Use ffmpeg to extract pixel statistics - check max pixel values
// This is more efficient than sampling individual pixels
cmd = exec.Command("ffmpeg",
"-v", "error",
"-i", filePath,
"-vf", "signalstats",
"-f", "null",
"-",
)
output, err = cmd.CombinedOutput()
if err != nil {
// If stats extraction fails, try sampling a few pixels directly
return detectHDRBySampling(ctx, filePath)
}
// Check output for max pixel values
outputStr := string(output)
// Look for max values in the signalstats output
// If we find values > 1.0, it's HDR
if strings.Contains(outputStr, "MAX") {
// Try to extract max values from signalstats output
// Format is typically like: YMAX:1.234 UMAX:0.567 VMAX:0.890
// For EXR (RGB), we need to check R, G, B channels
// Since signalstats works on YUV, we'll use a different approach
return detectHDRBySampling(ctx, filePath)
}
// Fallback to pixel sampling
return detectHDRBySampling(ctx, filePath)
}
// detectHDRBySampling samples pixels from multiple regions to detect HDR content
func detectHDRBySampling(ctx *Context, filePath string) bool {
// Sample multiple 10x10 regions from different parts of the image
// This gives us better coverage than a single sample
sampleRegions := []string{
"crop=10:10:iw/4:ih/4", // Top-left quadrant
"crop=10:10:iw*3/4:ih/4", // Top-right quadrant
"crop=10:10:iw/4:ih*3/4", // Bottom-left quadrant
"crop=10:10:iw*3/4:ih*3/4", // Bottom-right quadrant
"crop=10:10:iw/2:ih/2", // Center
}
for _, region := range sampleRegions {
cmd := exec.Command("ffmpeg",
"-v", "error",
"-i", filePath,
"-vf", fmt.Sprintf("%s,scale=1:1", region),
"-f", "rawvideo",
"-pix_fmt", "gbrpf32le",
"-",
)
output, err := cmd.Output()
if err != nil {
continue // Skip this region if sampling fails
}
// Parse the float32 values (4 bytes per float, 3 channels RGB)
if len(output) >= 12 { // At least 3 floats (RGB) = 12 bytes
for i := 0; i < len(output)-11; i += 12 {
// Read RGB values (little-endian float32)
r := float32FromBytes(output[i : i+4])
g := float32FromBytes(output[i+4 : i+8])
b := float32FromBytes(output[i+8 : i+12])
// Check if any channel exceeds 1.0 (SDR range)
if r > 1.0 || g > 1.0 || b > 1.0 {
maxVal := max(r, max(g, b))
ctx.Info(fmt.Sprintf("Detected HDR content in EXR file: %s (max value: %.2f)", filepath.Base(filePath), maxVal))
return true
}
}
}
}
// If we sampled multiple regions and none exceed 1.0, it's likely SDR content
// But since it's 32-bit float format, user can still manually enable HDR if needed
return false
}
// float32FromBytes converts 4 bytes (little-endian) to float32
func float32FromBytes(bytes []byte) float32 {
if len(bytes) < 4 {
return 0
}
bits := uint32(bytes[0]) | uint32(bytes[1])<<8 | uint32(bytes[2])<<16 | uint32(bytes[3])<<24
return math.Float32frombits(bits)
}
// max returns the maximum of two float32 values
func max(a, b float32) float32 {
if a > b {
return a
}
return b
}
func extractFrameNumber(filename string) int {
parts := strings.Split(filepath.Base(filename), "_")
if len(parts) < 2 {
return 0
}
framePart := strings.Split(parts[1], ".")[0]
var frameNum int
fmt.Sscanf(framePart, "%d", &frameNum)
return frameNum
}
func checkFFmpegSizeError(output string) error {
outputLower := strings.ToLower(output)
if strings.Contains(outputLower, "hardware does not support encoding at size") {
constraintsMatch := regexp.MustCompile(`constraints:\s*width\s+(\d+)-(\d+)\s+height\s+(\d+)-(\d+)`).FindStringSubmatch(output)
if len(constraintsMatch) == 5 {
return fmt.Errorf("video frame size is outside hardware encoder limits. Hardware requires: width %s-%s, height %s-%s",
constraintsMatch[1], constraintsMatch[2], constraintsMatch[3], constraintsMatch[4])
}
return fmt.Errorf("video frame size is outside hardware encoder limits")
}
if strings.Contains(outputLower, "picture size") && strings.Contains(outputLower, "is invalid") {
sizeMatch := regexp.MustCompile(`picture size\s+(\d+)x(\d+)`).FindStringSubmatch(output)
if len(sizeMatch) == 3 {
return fmt.Errorf("invalid video frame size: %sx%s", sizeMatch[1], sizeMatch[2])
}
return fmt.Errorf("invalid video frame size")
}
if strings.Contains(outputLower, "error while opening encoder") &&
(strings.Contains(outputLower, "width") || strings.Contains(outputLower, "height") || strings.Contains(outputLower, "size")) {
sizeMatch := regexp.MustCompile(`at size\s+(\d+)x(\d+)`).FindStringSubmatch(output)
if len(sizeMatch) == 3 {
return fmt.Errorf("hardware encoder cannot encode frame size %sx%s", sizeMatch[1], sizeMatch[2])
}
return fmt.Errorf("hardware encoder error: frame size may be invalid")
}
if strings.Contains(outputLower, "invalid") &&
(strings.Contains(outputLower, "width") || strings.Contains(outputLower, "height") || strings.Contains(outputLower, "dimension")) {
return fmt.Errorf("invalid frame dimensions detected")
}
return nil
}

View File

@@ -0,0 +1,156 @@
// Package tasks provides task processing implementations.
package tasks
import (
"jiggablend/internal/runner/api"
"jiggablend/internal/runner/blender"
"jiggablend/internal/runner/encoding"
"jiggablend/internal/runner/workspace"
"jiggablend/pkg/executils"
"jiggablend/pkg/types"
)
// Processor handles a specific task type.
type Processor interface {
Process(ctx *Context) error
}
// Context provides task execution context.
type Context struct {
TaskID int64
JobID int64
JobName string
Frame int
TaskType string
WorkDir string
JobToken string
Metadata *types.BlendMetadata
Manager *api.ManagerClient
JobConn *api.JobConnection
Workspace *workspace.Manager
Blender *blender.Manager
Encoder *encoding.Selector
Processes *executils.ProcessTracker
}
// NewContext creates a new task context.
func NewContext(
taskID, jobID int64,
jobName string,
frame int,
taskType string,
workDir string,
jobToken string,
metadata *types.BlendMetadata,
manager *api.ManagerClient,
jobConn *api.JobConnection,
ws *workspace.Manager,
blenderMgr *blender.Manager,
encoder *encoding.Selector,
processes *executils.ProcessTracker,
) *Context {
return &Context{
TaskID: taskID,
JobID: jobID,
JobName: jobName,
Frame: frame,
TaskType: taskType,
WorkDir: workDir,
JobToken: jobToken,
Metadata: metadata,
Manager: manager,
JobConn: jobConn,
Workspace: ws,
Blender: blenderMgr,
Encoder: encoder,
Processes: processes,
}
}
// Log sends a log entry to the manager.
func (c *Context) Log(level types.LogLevel, message string) {
if c.JobConn != nil {
c.JobConn.Log(c.TaskID, level, message)
}
}
// Info logs an info message.
func (c *Context) Info(message string) {
c.Log(types.LogLevelInfo, message)
}
// Warn logs a warning message.
func (c *Context) Warn(message string) {
c.Log(types.LogLevelWarn, message)
}
// Error logs an error message.
func (c *Context) Error(message string) {
c.Log(types.LogLevelError, message)
}
// Progress sends a progress update.
func (c *Context) Progress(progress float64) {
if c.JobConn != nil {
c.JobConn.Progress(c.TaskID, progress)
}
}
// OutputUploaded notifies that an output file was uploaded.
func (c *Context) OutputUploaded(fileName string) {
if c.JobConn != nil {
c.JobConn.OutputUploaded(c.TaskID, fileName)
}
}
// Complete sends task completion.
func (c *Context) Complete(success bool, errorMsg error) {
if c.JobConn != nil {
c.JobConn.Complete(c.TaskID, success, errorMsg)
}
}
// GetOutputFormat returns the output format from metadata or default.
func (c *Context) GetOutputFormat() string {
if c.Metadata != nil && c.Metadata.RenderSettings.OutputFormat != "" {
return c.Metadata.RenderSettings.OutputFormat
}
return "PNG"
}
// GetFrameRate returns the frame rate from metadata or default.
func (c *Context) GetFrameRate() float64 {
if c.Metadata != nil && c.Metadata.RenderSettings.FrameRate > 0 {
return c.Metadata.RenderSettings.FrameRate
}
return 24.0
}
// GetBlenderVersion returns the Blender version from metadata.
func (c *Context) GetBlenderVersion() string {
if c.Metadata != nil {
return c.Metadata.BlenderVersion
}
return ""
}
// ShouldUnhideObjects returns whether to unhide objects.
func (c *Context) ShouldUnhideObjects() bool {
return c.Metadata != nil && c.Metadata.UnhideObjects != nil && *c.Metadata.UnhideObjects
}
// ShouldEnableExecution returns whether to enable auto-execution.
func (c *Context) ShouldEnableExecution() bool {
return c.Metadata != nil && c.Metadata.EnableExecution != nil && *c.Metadata.EnableExecution
}
// ShouldPreserveHDR returns whether to preserve HDR range for EXR encoding.
func (c *Context) ShouldPreserveHDR() bool {
return c.Metadata != nil && c.Metadata.PreserveHDR != nil && *c.Metadata.PreserveHDR
}
// ShouldPreserveAlpha returns whether to preserve alpha channel for EXR encoding.
func (c *Context) ShouldPreserveAlpha() bool {
return c.Metadata != nil && c.Metadata.PreserveAlpha != nil && *c.Metadata.PreserveAlpha
}

View File

@@ -0,0 +1,301 @@
package tasks
import (
"bufio"
"encoding/json"
"errors"
"fmt"
"log"
"os"
"os/exec"
"path/filepath"
"strings"
"jiggablend/internal/runner/blender"
"jiggablend/internal/runner/workspace"
"jiggablend/pkg/scripts"
"jiggablend/pkg/types"
)
// RenderProcessor handles render tasks.
type RenderProcessor struct{}
// NewRenderProcessor creates a new render processor.
func NewRenderProcessor() *RenderProcessor {
return &RenderProcessor{}
}
// Process executes a render task.
func (p *RenderProcessor) Process(ctx *Context) error {
ctx.Info(fmt.Sprintf("Starting task: job %d, frame %d, format: %s",
ctx.JobID, ctx.Frame, ctx.GetOutputFormat()))
log.Printf("Processing task %d: job %d, frame %d", ctx.TaskID, ctx.JobID, ctx.Frame)
// Find .blend file
blendFile, err := workspace.FindFirstBlendFile(ctx.WorkDir)
if err != nil {
return fmt.Errorf("failed to find blend file: %w", err)
}
// Get Blender binary
blenderBinary := "blender"
if version := ctx.GetBlenderVersion(); version != "" {
ctx.Info(fmt.Sprintf("Job requires Blender %s", version))
binaryPath, err := ctx.Blender.GetBinaryPath(version)
if err != nil {
ctx.Warn(fmt.Sprintf("Could not get Blender %s, using system blender: %v", version, err))
} else {
blenderBinary = binaryPath
ctx.Info(fmt.Sprintf("Using Blender binary: %s", blenderBinary))
}
} else {
ctx.Info("No Blender version specified, using system blender")
}
// Create output directory
outputDir := filepath.Join(ctx.WorkDir, "output")
if err := os.MkdirAll(outputDir, 0755); err != nil {
return fmt.Errorf("failed to create output directory: %w", err)
}
// Create home directory for Blender inside workspace
blenderHome := filepath.Join(ctx.WorkDir, "home")
if err := os.MkdirAll(blenderHome, 0755); err != nil {
return fmt.Errorf("failed to create Blender home directory: %w", err)
}
// Determine render format
outputFormat := ctx.GetOutputFormat()
renderFormat := outputFormat
if outputFormat == "EXR_264_MP4" || outputFormat == "EXR_AV1_MP4" || outputFormat == "EXR_VP9_WEBM" {
renderFormat = "EXR" // Use EXR for maximum quality
}
// Create render script
if err := p.createRenderScript(ctx, renderFormat); err != nil {
return err
}
// Render
ctx.Info(fmt.Sprintf("Starting Blender render for frame %d...", ctx.Frame))
if err := p.runBlender(ctx, blenderBinary, blendFile, outputDir, renderFormat, blenderHome); err != nil {
ctx.Error(fmt.Sprintf("Blender render failed: %v", err))
return err
}
// Verify output
if _, err := p.findOutputFile(ctx, outputDir, renderFormat); err != nil {
ctx.Error(fmt.Sprintf("Output verification failed: %v", err))
return err
}
ctx.Info(fmt.Sprintf("Blender render completed for frame %d", ctx.Frame))
return nil
}
func (p *RenderProcessor) createRenderScript(ctx *Context, renderFormat string) error {
formatFilePath := filepath.Join(ctx.WorkDir, "output_format.txt")
renderSettingsFilePath := filepath.Join(ctx.WorkDir, "render_settings.json")
// Build unhide code conditionally
unhideCode := ""
if ctx.ShouldUnhideObjects() {
unhideCode = scripts.UnhideObjects
}
// Load template and replace placeholders
scriptContent := scripts.RenderBlenderTemplate
scriptContent = strings.ReplaceAll(scriptContent, "{{UNHIDE_CODE}}", unhideCode)
scriptContent = strings.ReplaceAll(scriptContent, "{{FORMAT_FILE_PATH}}", fmt.Sprintf("%q", formatFilePath))
scriptContent = strings.ReplaceAll(scriptContent, "{{RENDER_SETTINGS_FILE}}", fmt.Sprintf("%q", renderSettingsFilePath))
scriptPath := filepath.Join(ctx.WorkDir, "enable_gpu.py")
if err := os.WriteFile(scriptPath, []byte(scriptContent), 0644); err != nil {
errMsg := fmt.Sprintf("failed to create GPU enable script: %v", err)
ctx.Error(errMsg)
return errors.New(errMsg)
}
// Write output format
outputFormat := ctx.GetOutputFormat()
ctx.Info(fmt.Sprintf("Writing output format '%s' to format file", outputFormat))
if err := os.WriteFile(formatFilePath, []byte(outputFormat), 0644); err != nil {
errMsg := fmt.Sprintf("failed to create format file: %v", err)
ctx.Error(errMsg)
return errors.New(errMsg)
}
// Write render settings if available
if ctx.Metadata != nil && ctx.Metadata.RenderSettings.EngineSettings != nil {
settingsJSON, err := json.Marshal(ctx.Metadata.RenderSettings)
if err == nil {
if err := os.WriteFile(renderSettingsFilePath, settingsJSON, 0644); err != nil {
ctx.Warn(fmt.Sprintf("Failed to write render settings file: %v", err))
}
}
}
return nil
}
func (p *RenderProcessor) runBlender(ctx *Context, blenderBinary, blendFile, outputDir, renderFormat, blenderHome string) error {
scriptPath := filepath.Join(ctx.WorkDir, "enable_gpu.py")
args := []string{"-b", blendFile, "--python", scriptPath}
if ctx.ShouldEnableExecution() {
args = append(args, "--enable-autoexec")
}
// Output pattern
outputPattern := filepath.Join(outputDir, fmt.Sprintf("frame_####.%s", strings.ToLower(renderFormat)))
outputAbsPattern, _ := filepath.Abs(outputPattern)
args = append(args, "-o", outputAbsPattern)
args = append(args, "-f", fmt.Sprintf("%d", ctx.Frame))
// Wrap with xvfb-run
xvfbArgs := []string{"-a", "-s", "-screen 0 800x600x24", blenderBinary}
xvfbArgs = append(xvfbArgs, args...)
cmd := exec.Command("xvfb-run", xvfbArgs...)
cmd.Dir = ctx.WorkDir
// Set up environment with custom HOME directory
env := os.Environ()
// Remove existing HOME if present and add our custom one
newEnv := make([]string, 0, len(env)+1)
for _, e := range env {
if !strings.HasPrefix(e, "HOME=") {
newEnv = append(newEnv, e)
}
}
newEnv = append(newEnv, fmt.Sprintf("HOME=%s", blenderHome))
cmd.Env = newEnv
// Set up pipes
stdoutPipe, err := cmd.StdoutPipe()
if err != nil {
return fmt.Errorf("failed to create stdout pipe: %w", err)
}
stderrPipe, err := cmd.StderrPipe()
if err != nil {
return fmt.Errorf("failed to create stderr pipe: %w", err)
}
if err := cmd.Start(); err != nil {
return fmt.Errorf("failed to start blender: %w", err)
}
// Track process
ctx.Processes.Track(ctx.TaskID, cmd)
defer ctx.Processes.Untrack(ctx.TaskID)
// Stream stdout
stdoutDone := make(chan bool)
go func() {
defer close(stdoutDone)
scanner := bufio.NewScanner(stdoutPipe)
for scanner.Scan() {
line := scanner.Text()
if line != "" {
shouldFilter, logLevel := blender.FilterLog(line)
if !shouldFilter {
ctx.Log(logLevel, line)
}
}
}
}()
// Stream stderr
stderrDone := make(chan bool)
go func() {
defer close(stderrDone)
scanner := bufio.NewScanner(stderrPipe)
for scanner.Scan() {
line := scanner.Text()
if line != "" {
shouldFilter, logLevel := blender.FilterLog(line)
if !shouldFilter {
if logLevel == types.LogLevelInfo {
logLevel = types.LogLevelWarn
}
ctx.Log(logLevel, line)
}
}
}
}()
// Wait for completion
err = cmd.Wait()
<-stdoutDone
<-stderrDone
if err != nil {
if exitErr, ok := err.(*exec.ExitError); ok {
if exitErr.ExitCode() == 137 {
return errors.New("Blender was killed due to excessive memory usage (OOM)")
}
}
return fmt.Errorf("blender failed: %w", err)
}
return nil
}
func (p *RenderProcessor) findOutputFile(ctx *Context, outputDir, renderFormat string) (string, error) {
entries, err := os.ReadDir(outputDir)
if err != nil {
return "", fmt.Errorf("failed to read output directory: %w", err)
}
ctx.Info("Checking output directory for files...")
// Try exact match first
expectedFile := filepath.Join(outputDir, fmt.Sprintf("frame_%04d.%s", ctx.Frame, strings.ToLower(renderFormat)))
if _, err := os.Stat(expectedFile); err == nil {
ctx.Info(fmt.Sprintf("Found output file: %s", filepath.Base(expectedFile)))
return expectedFile, nil
}
// Try without zero padding
altFile := filepath.Join(outputDir, fmt.Sprintf("frame_%d.%s", ctx.Frame, strings.ToLower(renderFormat)))
if _, err := os.Stat(altFile); err == nil {
ctx.Info(fmt.Sprintf("Found output file: %s", filepath.Base(altFile)))
return altFile, nil
}
// Try just frame number
altFile2 := filepath.Join(outputDir, fmt.Sprintf("%04d.%s", ctx.Frame, strings.ToLower(renderFormat)))
if _, err := os.Stat(altFile2); err == nil {
ctx.Info(fmt.Sprintf("Found output file: %s", filepath.Base(altFile2)))
return altFile2, nil
}
// Search through all files
for _, entry := range entries {
if !entry.IsDir() {
fileName := entry.Name()
if strings.Contains(fileName, "%04d") || strings.Contains(fileName, "%d") {
ctx.Warn(fmt.Sprintf("Skipping file with literal pattern: %s", fileName))
continue
}
frameStr := fmt.Sprintf("%d", ctx.Frame)
frameStrPadded := fmt.Sprintf("%04d", ctx.Frame)
if strings.Contains(fileName, frameStrPadded) ||
(strings.Contains(fileName, frameStr) && strings.HasSuffix(strings.ToLower(fileName), strings.ToLower(renderFormat))) {
outputFile := filepath.Join(outputDir, fileName)
ctx.Info(fmt.Sprintf("Found output file: %s", fileName))
return outputFile, nil
}
}
}
// Not found
fileList := []string{}
for _, entry := range entries {
if !entry.IsDir() {
fileList = append(fileList, entry.Name())
}
}
return "", fmt.Errorf("output file not found: %s\nFiles in output directory: %v", expectedFile, fileList)
}

View File

@@ -0,0 +1,146 @@
package workspace
import (
"archive/tar"
"fmt"
"io"
"log"
"os"
"path/filepath"
"strings"
)
// ExtractTar extracts a tar archive from a reader to a directory.
func ExtractTar(reader io.Reader, destDir string) error {
if err := os.MkdirAll(destDir, 0755); err != nil {
return fmt.Errorf("failed to create destination directory: %w", err)
}
tarReader := tar.NewReader(reader)
for {
header, err := tarReader.Next()
if err == io.EOF {
break
}
if err != nil {
return fmt.Errorf("failed to read tar header: %w", err)
}
// Sanitize path to prevent directory traversal
targetPath := filepath.Join(destDir, header.Name)
if !strings.HasPrefix(filepath.Clean(targetPath), filepath.Clean(destDir)+string(os.PathSeparator)) {
return fmt.Errorf("invalid file path in tar: %s", header.Name)
}
switch header.Typeflag {
case tar.TypeDir:
if err := os.MkdirAll(targetPath, os.FileMode(header.Mode)); err != nil {
return fmt.Errorf("failed to create directory: %w", err)
}
case tar.TypeReg:
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
return fmt.Errorf("failed to create parent directory: %w", err)
}
outFile, err := os.Create(targetPath)
if err != nil {
return fmt.Errorf("failed to create file: %w", err)
}
if _, err := io.Copy(outFile, tarReader); err != nil {
outFile.Close()
return fmt.Errorf("failed to write file: %w", err)
}
outFile.Close()
if err := os.Chmod(targetPath, os.FileMode(header.Mode)); err != nil {
log.Printf("Warning: failed to set file permissions: %v", err)
}
}
}
return nil
}
// ExtractTarStripPrefix extracts a tar archive, stripping the top-level directory.
// Useful for Blender archives like "blender-4.2.3-linux-x64/".
func ExtractTarStripPrefix(reader io.Reader, destDir string) error {
if err := os.MkdirAll(destDir, 0755); err != nil {
return err
}
tarReader := tar.NewReader(reader)
stripPrefix := ""
for {
header, err := tarReader.Next()
if err == io.EOF {
break
}
if err != nil {
return err
}
// Determine strip prefix from first entry (e.g., "blender-4.2.3-linux-x64/")
if stripPrefix == "" {
parts := strings.SplitN(header.Name, "/", 2)
if len(parts) > 0 {
stripPrefix = parts[0] + "/"
}
}
// Strip the top-level directory
name := strings.TrimPrefix(header.Name, stripPrefix)
if name == "" {
continue
}
targetPath := filepath.Join(destDir, name)
switch header.Typeflag {
case tar.TypeDir:
if err := os.MkdirAll(targetPath, os.FileMode(header.Mode)); err != nil {
return err
}
case tar.TypeReg:
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
return err
}
outFile, err := os.OpenFile(targetPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, os.FileMode(header.Mode))
if err != nil {
return err
}
if _, err := io.Copy(outFile, tarReader); err != nil {
outFile.Close()
return err
}
outFile.Close()
case tar.TypeSymlink:
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
return err
}
os.Remove(targetPath) // Remove existing symlink if present
if err := os.Symlink(header.Linkname, targetPath); err != nil {
return err
}
}
}
return nil
}
// ExtractTarFile extracts a tar file to a directory.
func ExtractTarFile(tarPath, destDir string) error {
file, err := os.Open(tarPath)
if err != nil {
return fmt.Errorf("failed to open tar file: %w", err)
}
defer file.Close()
return ExtractTar(file, destDir)
}

View File

@@ -0,0 +1,217 @@
// Package workspace manages runner workspace directories.
package workspace
import (
"fmt"
"log"
"os"
"path/filepath"
"strings"
)
// Manager handles workspace directory operations.
type Manager struct {
baseDir string
runnerName string
}
// NewManager creates a new workspace manager.
func NewManager(runnerName string) *Manager {
m := &Manager{
runnerName: sanitizeName(runnerName),
}
m.init()
return m
}
func sanitizeName(name string) string {
name = strings.ReplaceAll(name, " ", "_")
name = strings.ReplaceAll(name, "/", "_")
name = strings.ReplaceAll(name, "\\", "_")
name = strings.ReplaceAll(name, ":", "_")
return name
}
func (m *Manager) init() {
// Prefer current directory if writable, otherwise use temp
baseDir := os.TempDir()
if cwd, err := os.Getwd(); err == nil {
baseDir = cwd
}
m.baseDir = filepath.Join(baseDir, "jiggablend-workspaces", m.runnerName)
if err := os.MkdirAll(m.baseDir, 0755); err != nil {
log.Printf("Warning: Failed to create workspace directory %s: %v", m.baseDir, err)
// Fallback to temp directory
m.baseDir = filepath.Join(os.TempDir(), "jiggablend-workspaces", m.runnerName)
if err := os.MkdirAll(m.baseDir, 0755); err != nil {
log.Printf("Error: Failed to create fallback workspace directory: %v", err)
// Last resort
m.baseDir = filepath.Join(os.TempDir(), "jiggablend-runner")
os.MkdirAll(m.baseDir, 0755)
}
}
log.Printf("Runner workspace initialized at: %s", m.baseDir)
}
// BaseDir returns the base workspace directory.
func (m *Manager) BaseDir() string {
return m.baseDir
}
// JobDir returns the directory for a specific job.
func (m *Manager) JobDir(jobID int64) string {
return filepath.Join(m.baseDir, fmt.Sprintf("job-%d", jobID))
}
// VideoDir returns the directory for encoding.
func (m *Manager) VideoDir(jobID int64) string {
return filepath.Join(m.baseDir, fmt.Sprintf("job-%d-video", jobID))
}
// BlenderDir returns the directory for Blender installations.
func (m *Manager) BlenderDir() string {
return filepath.Join(m.baseDir, "blender-versions")
}
// CreateJobDir creates and returns the job directory.
func (m *Manager) CreateJobDir(jobID int64) (string, error) {
dir := m.JobDir(jobID)
if err := os.MkdirAll(dir, 0755); err != nil {
return "", fmt.Errorf("failed to create job directory: %w", err)
}
return dir, nil
}
// CreateVideoDir creates and returns the encode directory.
func (m *Manager) CreateVideoDir(jobID int64) (string, error) {
dir := m.VideoDir(jobID)
if err := os.MkdirAll(dir, 0755); err != nil {
return "", fmt.Errorf("failed to create video directory: %w", err)
}
return dir, nil
}
// CleanupJobDir removes a job directory.
func (m *Manager) CleanupJobDir(jobID int64) error {
dir := m.JobDir(jobID)
return os.RemoveAll(dir)
}
// CleanupVideoDir removes an encode directory.
func (m *Manager) CleanupVideoDir(jobID int64) error {
dir := m.VideoDir(jobID)
return os.RemoveAll(dir)
}
// Cleanup removes the entire workspace directory.
func (m *Manager) Cleanup() {
if m.baseDir != "" {
log.Printf("Cleaning up workspace directory: %s", m.baseDir)
if err := os.RemoveAll(m.baseDir); err != nil {
log.Printf("Warning: Failed to remove workspace directory %s: %v", m.baseDir, err)
} else {
log.Printf("Successfully removed workspace directory: %s", m.baseDir)
}
}
// Also clean up any orphaned jiggablend directories
cleanupOrphanedWorkspaces()
}
// cleanupOrphanedWorkspaces removes any jiggablend workspace directories
// that might be left behind from previous runs or crashes.
func cleanupOrphanedWorkspaces() {
log.Printf("Cleaning up orphaned jiggablend workspace directories...")
dirsToCheck := []string{".", os.TempDir()}
for _, baseDir := range dirsToCheck {
workspaceDir := filepath.Join(baseDir, "jiggablend-workspaces")
if _, err := os.Stat(workspaceDir); err == nil {
log.Printf("Removing orphaned workspace directory: %s", workspaceDir)
if err := os.RemoveAll(workspaceDir); err != nil {
log.Printf("Warning: Failed to remove workspace directory %s: %v", workspaceDir, err)
} else {
log.Printf("Successfully removed workspace directory: %s", workspaceDir)
}
}
}
}
// FindBlendFiles finds all .blend files in a directory.
func FindBlendFiles(dir string) ([]string, error) {
var blendFiles []string
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".blend") {
// Check it's not a Blender save file (.blend1, .blend2, etc.)
lower := strings.ToLower(info.Name())
idx := strings.LastIndex(lower, ".blend")
if idx != -1 {
suffix := lower[idx+len(".blend"):]
isSaveFile := false
if len(suffix) > 0 {
isSaveFile = true
for _, r := range suffix {
if r < '0' || r > '9' {
isSaveFile = false
break
}
}
}
if !isSaveFile {
relPath, _ := filepath.Rel(dir, path)
blendFiles = append(blendFiles, relPath)
}
}
}
return nil
})
return blendFiles, err
}
// FindFirstBlendFile finds the first .blend file in a directory.
func FindFirstBlendFile(dir string) (string, error) {
var blendFile string
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".blend") {
lower := strings.ToLower(info.Name())
idx := strings.LastIndex(lower, ".blend")
if idx != -1 {
suffix := lower[idx+len(".blend"):]
isSaveFile := false
if len(suffix) > 0 {
isSaveFile = true
for _, r := range suffix {
if r < '0' || r > '9' {
isSaveFile = false
break
}
}
}
if !isSaveFile {
blendFile = path
return filepath.SkipAll
}
}
}
return nil
})
if err != nil {
return "", err
}
if blendFile == "" {
return "", fmt.Errorf("no .blend file found in %s", dir)
}
return blendFile, nil
}