diff --git a/.gitea/workflows/test-pr.yaml b/.gitea/workflows/test-pr.yaml index 6da108e..ca7b4f5 100644 --- a/.gitea/workflows/test-pr.yaml +++ b/.gitea/workflows/test-pr.yaml @@ -12,6 +12,5 @@ jobs: go-version-file: 'go.mod' - uses: FedericoCarboni/setup-ffmpeg@v3 - run: go mod tidy - - run: cd web && npm install && npm run build - run: go build ./... - run: go test -race -v -shuffle=on ./... \ No newline at end of file diff --git a/.gitignore b/.gitignore index a30a403..eb208e1 100644 --- a/.gitignore +++ b/.gitignore @@ -43,16 +43,6 @@ runner-secrets-*.json jiggablend-storage/ jiggablend-workspaces/ -# Node.js -web/node_modules/ -web/dist/ -web/.vite/ -npm-debug.log* -yarn-debug.log* -yarn-error.log* -pnpm-debug.log* -lerna-debug.log* - # IDE .vscode/ .idea/ diff --git a/.goreleaser.yaml b/.goreleaser.yaml index 3c8db37..eb8cf3d 100644 --- a/.goreleaser.yaml +++ b/.goreleaser.yaml @@ -3,7 +3,6 @@ version: 2 before: hooks: - go mod tidy -v - - sh -c "cd web && npm install && npm run build" builds: - id: default diff --git a/Makefile b/Makefile index 0f9b421..d1e6d98 100644 --- a/Makefile +++ b/Makefile @@ -5,11 +5,8 @@ build: @echo "Building with GoReleaser..." goreleaser build --clean --snapshot --single-target @mkdir -p bin - @find dist -name jiggablend -type f -exec cp {} bin/jiggablend \; - -# Build web UI -build-web: clean-web - cd web && npm install && npm run build + @find dist -name jiggablend -type f -exec cp {} bin/jiggablend.new \; + @mv -f bin/jiggablend.new bin/jiggablend # Cleanup manager logs cleanup-manager: @@ -63,7 +60,7 @@ clean-bin: # Clean web build artifacts clean-web: - rm -rf web/dist/ + @echo "No generated web artifacts to clean." # Run tests test: @@ -75,7 +72,7 @@ help: @echo "" @echo "Build targets:" @echo " build - Build jiggablend binary with embedded web UI" - @echo " build-web - Build web UI only" + @echo " build-web - Validate web UI assets (no build required)" @echo "" @echo "Run targets:" @echo " run - Run manager and runner in parallel (for testing)" @@ -90,7 +87,7 @@ help: @echo "" @echo "Other targets:" @echo " clean-bin - Clean build artifacts" - @echo " clean-web - Clean web build artifacts" + @echo " clean-web - Clean generated web artifacts (currently none)" @echo " test - Run Go tests" @echo " help - Show this help" @echo "" diff --git a/README.md b/README.md index 8a06a4f..4153ec8 100644 --- a/README.md +++ b/README.md @@ -12,20 +12,20 @@ Both manager and runner are part of a single binary (`jiggablend`) with subcomma ## Features - **Authentication**: OAuth (Google and Discord) and local authentication with user management -- **Web UI**: Modern React-based interface for job submission and monitoring +- **Web UI**: Server-rendered Go templates with HTMX fragments for job submission and monitoring - **Distributed Rendering**: Scale across multiple runners with automatic job distribution -- **Real-time Updates**: WebSocket-based progress tracking and job status updates -- **Video Encoding**: Automatic video encoding from EXR/PNG sequences with multiple codec support: - - H.264 (MP4) - SDR and HDR support - - AV1 (MP4) - With alpha channel support - - VP9 (WebM) - With alpha channel and HDR support -- **Output Formats**: PNG, JPEG, EXR, and video formats (MP4, WebM) +- **Real-time Updates**: Polling-based UI updates with lightweight HTMX refreshes +- **Video Encoding**: Automatic video encoding from EXR sequences only. EXR→video always uses HDR (HLG, 10-bit); no option to disable. Codecs: + - H.264 (MP4) - HDR (HLG) + - AV1 (MP4) - Alpha channel support, HDR + - VP9 (WebM) - Alpha channel and HDR +- **Output Formats**: EXR frame sequence only, or EXR + video (H.264, AV1, VP9). Blender always renders EXR. - **Blender Version Management**: Support for multiple Blender versions with automatic detection - **Metadata Extraction**: Automatic extraction of scene metadata from Blender files - **Admin Panel**: User and runner management interface - **Runner Management**: API key-based authentication for runners with health monitoring -- **HDR Support**: Preserve HDR range in video encoding with HLG transfer function -- **Alpha Channel**: Preserve alpha channel in video encoding (AV1 and VP9) +- **HDR**: EXR→video is always encoded as HDR (HLG, 10-bit). There is no option to turn it off; for SDR-only output, download the EXR frames and encode locally. +- **Alpha**: Alpha is always preserved in EXR frames. In video, alpha is preserved when present in the EXR for AV1 and VP9; H.264 MP4 does not support alpha. ## Prerequisites @@ -158,6 +158,15 @@ bin/jiggablend runner --manager http://localhost:8080 --name my-runner --api-key JIGGABLEND_MANAGER=http://localhost:8080 JIGGABLEND_API_KEY= bin/jiggablend runner ``` +### Render Chunk Size Note + +For one heavy production scene/profile, chunked rendering (`frames 800-804` in one Blender process) was much slower than one-frame tasks: + +- Chunked task (`800-804`): `27m49s` end-to-end (`Task assigned` -> last `Saved`) +- Single-frame tasks (`800`, `801`, `802`, `803`, `804`): `15m04s` wall clock total + +In that test, any chunk size greater than `1` caused a major slowdown after the first frame. Fresh installs should already have it set to `1`, but if you see similar performance degradation, try forcing one frame per task (hard reset Blender each frame): `jiggablend manager config set frames-per-render-task 1`. If `1` is worse on your scene/hardware, benchmark and use a higher chunk size instead. + ### Running Both (for Testing) ```bash @@ -217,9 +226,9 @@ jiggablend/ │ ├── executils/ # Execution utilities │ ├── scripts/ # Python scripts for Blender │ └── types/ # Shared types and models -├── web/ # React web UI -│ ├── src/ # Source files -│ └── dist/ # Built files (embedded in binary) +├── web/ # Embedded templates + static assets +│ ├── templates/ # Go HTML templates and partials +│ └── static/ # CSS/JS assets ├── go.mod └── Makefile ``` @@ -266,29 +275,25 @@ jiggablend/ - `GET /api/admin/stats` - System statistics ### WebSocket -- `WS /api/ws` - WebSocket connection for real-time updates - - Subscribe to job channels: `job:{jobId}` - - Receive job status updates, progress, and logs +- `WS /api/jobs/ws` - Optional API channel for advanced clients +- The default web UI uses polling + HTMX for status updates and task views. ## Output Formats -The system supports the following output formats: +The system supports the following output formats. Blender always renders EXR (linear); the chosen format is the deliverable (frames only or frames + video). -### Image Formats -- **PNG** - Standard PNG output -- **JPEG** - JPEG output -- **EXR** - OpenEXR format (HDR) +### Deliverable Formats +- **EXR** - EXR frame sequence only (no video) +- **EXR_264_MP4** - EXR frames + H.264 MP4 (always HDR, HLG) +- **EXR_AV1_MP4** - EXR frames + AV1 MP4 (alpha support, always HDR) +- **EXR_VP9_WEBM** - EXR frames + VP9 WebM (alpha and HDR) -### Video Formats -- **EXR_264_MP4** - H.264 encoded MP4 from EXR sequence (SDR or HDR) -- **EXR_AV1_MP4** - AV1 encoded MP4 from EXR sequence (with alpha channel support) -- **EXR_VP9_WEBM** - VP9 encoded WebM from EXR sequence (with alpha channel and HDR support) +Video encoding (EXR→video) is always HDR (HLG, 10-bit); there is no option to output SDR video. For SDR-only, download the EXR frames and encode locally. Video encoding features: - 2-pass encoding for optimal quality -- HDR preservation using HLG transfer function +- EXR→video only (no PNG source); always HLG (HDR), 10-bit, full range - Alpha channel preservation (AV1 and VP9 only) -- Automatic detection of source format (EXR or PNG) - Software encoding (libx264, libaom-av1, libvpx-vp9) ## Storage Structure @@ -320,16 +325,8 @@ go test ./... -timeout 30s ### Web UI Development -The web UI is built with React and Vite. To develop the UI: - -```bash -cd web -npm install -npm run dev # Development server -npm run build # Build for production -``` - -The built files are embedded in the Go binary using `embed.FS`. +The web UI is server-rendered from embedded templates and static assets in `web/templates` and `web/static`. +No Node/Vite build step is required. ## License diff --git a/cmd/jiggablend/cmd/managerconfig.go b/cmd/jiggablend/cmd/managerconfig.go index 5bf3b4c..62d345b 100644 --- a/cmd/jiggablend/cmd/managerconfig.go +++ b/cmd/jiggablend/cmd/managerconfig.go @@ -8,6 +8,7 @@ import ( "encoding/hex" "fmt" "os" + "strconv" "strings" "jiggablend/internal/config" @@ -381,6 +382,25 @@ var setGoogleOAuthCmd = &cobra.Command{ var setDiscordOAuthRedirectURL string +var setFramesPerRenderTaskCmd = &cobra.Command{ + Use: "frames-per-render-task ", + Short: "Set number of frames per render task (min 1)", + Long: `Set how many frames to batch into each render task. Job frame range is divided into chunks of this size. Default is 10.`, + Args: cobra.ExactArgs(1), + Run: func(cmd *cobra.Command, args []string) { + n, err := strconv.Atoi(args[0]) + if err != nil || n < 1 { + exitWithError("frames-per-render-task must be a positive integer") + } + withConfig(func(cfg *config.Config, db *database.DB) { + if err := cfg.SetInt(config.KeyFramesPerRenderTask, n); err != nil { + exitWithError("Failed to set frames_per_render_task: %v", err) + } + fmt.Printf("Frames per render task set to %d\n", n) + }) + }, +} + var setDiscordOAuthCmd = &cobra.Command{ Use: "discord-oauth ", Short: "Set Discord OAuth credentials", @@ -558,6 +578,7 @@ func init() { configCmd.AddCommand(setCmd) setCmd.AddCommand(setFixedAPIKeyCmd) setCmd.AddCommand(setAllowedOriginsCmd) + setCmd.AddCommand(setFramesPerRenderTaskCmd) setCmd.AddCommand(setGoogleOAuthCmd) setCmd.AddCommand(setDiscordOAuthCmd) diff --git a/examples/frame_0800.png b/examples/frame_0800.png deleted file mode 100644 index 6b84436..0000000 Binary files a/examples/frame_0800.png and /dev/null differ diff --git a/internal/config/config.go b/internal/config/config.go index 652ee8f..c66a0b6 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -21,7 +21,8 @@ const ( KeyFixedAPIKey = "fixed_api_key" KeyRegistrationEnabled = "registration_enabled" KeyProductionMode = "production_mode" - KeyAllowedOrigins = "allowed_origins" + KeyAllowedOrigins = "allowed_origins" + KeyFramesPerRenderTask = "frames_per_render_task" ) // Config manages application configuration stored in the database @@ -301,3 +302,12 @@ func (c *Config) AllowedOrigins() string { return c.GetWithDefault(KeyAllowedOrigins, "") } +// GetFramesPerRenderTask returns how many frames to include per render task (min 1, default 1). +func (c *Config) GetFramesPerRenderTask() int { + n := c.GetIntWithDefault(KeyFramesPerRenderTask, 1) + if n < 1 { + return 1 + } + return n +} + diff --git a/internal/database/migrations/000002_add_task_frame_end.down.sql b/internal/database/migrations/000002_add_task_frame_end.down.sql new file mode 100644 index 0000000..1c331a4 --- /dev/null +++ b/internal/database/migrations/000002_add_task_frame_end.down.sql @@ -0,0 +1,31 @@ +-- SQLite does not support DROP COLUMN directly; recreate table without frame_end +CREATE TABLE tasks_new ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + job_id INTEGER NOT NULL, + runner_id INTEGER, + frame INTEGER NOT NULL, + status TEXT NOT NULL DEFAULT 'pending', + output_path TEXT, + task_type TEXT NOT NULL DEFAULT 'render', + current_step TEXT, + retry_count INTEGER NOT NULL DEFAULT 0, + max_retries INTEGER NOT NULL DEFAULT 3, + runner_failure_count INTEGER NOT NULL DEFAULT 0, + timeout_seconds INTEGER, + condition TEXT, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + started_at TIMESTAMP, + completed_at TIMESTAMP, + error_message TEXT, + FOREIGN KEY (job_id) REFERENCES jobs(id), + FOREIGN KEY (runner_id) REFERENCES runners(id) +); +INSERT INTO tasks_new (id, job_id, runner_id, frame, status, output_path, task_type, current_step, retry_count, max_retries, runner_failure_count, timeout_seconds, condition, created_at, started_at, completed_at, error_message) +SELECT id, job_id, runner_id, frame, status, output_path, task_type, current_step, retry_count, max_retries, runner_failure_count, timeout_seconds, condition, created_at, started_at, completed_at, error_message FROM tasks; +DROP TABLE tasks; +ALTER TABLE tasks_new RENAME TO tasks; +CREATE INDEX idx_tasks_job_id ON tasks(job_id); +CREATE INDEX idx_tasks_runner_id ON tasks(runner_id); +CREATE INDEX idx_tasks_status ON tasks(status); +CREATE INDEX idx_tasks_job_status ON tasks(job_id, status); +CREATE INDEX idx_tasks_started_at ON tasks(started_at); diff --git a/internal/database/migrations/000002_add_task_frame_end.up.sql b/internal/database/migrations/000002_add_task_frame_end.up.sql new file mode 100644 index 0000000..6cafeb6 --- /dev/null +++ b/internal/database/migrations/000002_add_task_frame_end.up.sql @@ -0,0 +1,2 @@ +-- Add frame_end to tasks for range-based render tasks (NULL = single frame, same as frame) +ALTER TABLE tasks ADD COLUMN frame_end INTEGER; diff --git a/internal/manager/admin.go b/internal/manager/admin.go index 0211c55..284d1e4 100644 --- a/internal/manager/admin.go +++ b/internal/manager/admin.go @@ -121,37 +121,6 @@ func (s *Manager) handleDeleteRunnerAPIKey(w http.ResponseWriter, r *http.Reques s.respondJSON(w, http.StatusOK, map[string]string{"message": "API key deleted"}) } -// handleVerifyRunner manually verifies a runner -func (s *Manager) handleVerifyRunner(w http.ResponseWriter, r *http.Request) { - runnerID, err := parseID(r, "id") - if err != nil { - s.respondError(w, http.StatusBadRequest, err.Error()) - return - } - - // Check if runner exists - var exists bool - err = s.db.With(func(conn *sql.DB) error { - return conn.QueryRow("SELECT EXISTS(SELECT 1 FROM runners WHERE id = ?)", runnerID).Scan(&exists) - }) - if err != nil || !exists { - s.respondError(w, http.StatusNotFound, "Runner not found") - return - } - - // Mark runner as verified - err = s.db.With(func(conn *sql.DB) error { - _, err := conn.Exec("UPDATE runners SET verified = 1 WHERE id = ?", runnerID) - return err - }) - if err != nil { - s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to verify runner: %v", err)) - return - } - - s.respondJSON(w, http.StatusOK, map[string]string{"message": "Runner verified"}) -} - // handleDeleteRunner removes a runner func (s *Manager) handleDeleteRunner(w http.ResponseWriter, r *http.Request) { runnerID, err := parseID(r, "id") @@ -415,6 +384,12 @@ func (s *Manager) handleSetRegistrationEnabled(w http.ResponseWriter, r *http.Re // handleSetUserAdminStatus sets a user's admin status (admin only) func (s *Manager) handleSetUserAdminStatus(w http.ResponseWriter, r *http.Request) { + currentUserID, err := getUserID(r) + if err != nil { + s.respondError(w, http.StatusUnauthorized, err.Error()) + return + } + targetUserID, err := parseID(r, "id") if err != nil { s.respondError(w, http.StatusBadRequest, err.Error()) @@ -429,6 +404,12 @@ func (s *Manager) handleSetUserAdminStatus(w http.ResponseWriter, r *http.Reques return } + // Prevent admins from revoking their own admin status. + if targetUserID == currentUserID && !req.IsAdmin { + s.respondError(w, http.StatusBadRequest, "You cannot revoke your own admin status") + return + } + if err := s.auth.SetUserAdminStatus(targetUserID, req.IsAdmin); err != nil { s.respondError(w, http.StatusBadRequest, err.Error()) return diff --git a/internal/manager/blender.go b/internal/manager/blender.go index 6fad4e3..e6bb55f 100644 --- a/internal/manager/blender.go +++ b/internal/manager/blender.go @@ -331,8 +331,9 @@ func (s *Manager) GetBlenderArchivePath(version *BlenderVersion) (string, error) // Need to download and decompress log.Printf("Downloading Blender %s from %s", version.Full, version.URL) + // 60-minute timeout for large Blender tarballs; stream to disk via io.Copy below client := &http.Client{ - Timeout: 0, // No timeout for large downloads + Timeout: 60 * time.Minute, } resp, err := client.Get(version.URL) if err != nil { diff --git a/internal/manager/jobs.go b/internal/manager/jobs.go index d431475..8189af6 100644 --- a/internal/manager/jobs.go +++ b/internal/manager/jobs.go @@ -2,6 +2,7 @@ package api import ( "archive/tar" + "archive/zip" "crypto/md5" "database/sql" _ "embed" @@ -26,6 +27,7 @@ import ( "jiggablend/pkg/scripts" "jiggablend/pkg/types" + "github.com/google/uuid" "github.com/gorilla/websocket" ) @@ -50,6 +52,50 @@ func isAdminUser(r *http.Request) bool { return authpkg.IsAdmin(r.Context()) } +func uploadSessionPhase(status string) string { + switch status { + case "uploading": + return "upload" + case "completed": + return "ready" + case "error": + return "error" + case "select_blend": + return "action_required" + default: + return "processing" + } +} + +func (s *Manager) updateUploadSessionState(sessionID string, progress float64, status, message string) (int64, bool) { + s.uploadSessionsMu.Lock() + defer s.uploadSessionsMu.Unlock() + + session, exists := s.uploadSessions[sessionID] + if !exists { + return 0, false + } + session.Progress = progress + session.Status = status + session.Phase = uploadSessionPhase(status) + session.Message = message + return session.UserID, true +} + +func (s *Manager) failUploadSession(sessionID, errorMessage string) (int64, bool) { + userID, exists := s.updateUploadSessionState(sessionID, 0, "error", errorMessage) + if !exists { + return 0, false + } + + s.uploadSessionsMu.Lock() + if session, ok := s.uploadSessions[sessionID]; ok { + session.ErrorMessage = errorMessage + } + s.uploadSessionsMu.Unlock() + return userID, true +} + // handleCreateJob creates a new job func (s *Manager) handleCreateJob(w http.ResponseWriter, r *http.Request) { userID, err := getUserID(r) @@ -70,10 +116,30 @@ func (s *Manager) handleCreateJob(w http.ResponseWriter, r *http.Request) { return } + req.Name = strings.TrimSpace(req.Name) if req.Name == "" { s.respondError(w, http.StatusBadRequest, "Job name is required") return } + nameTaken := false + err = s.db.With(func(conn *sql.DB) error { + return conn.QueryRow( + `SELECT EXISTS( + SELECT 1 + FROM jobs + WHERE user_id = ? AND LOWER(TRIM(name)) = LOWER(?) + )`, + userID, req.Name, + ).Scan(&nameTaken) + }) + if err != nil { + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to validate job name: %v", err)) + return + } + if nameTaken { + s.respondError(w, http.StatusBadRequest, "You already have a job with this name") + return + } // Validate render job requirements if req.JobType == types.JobTypeRender { @@ -100,23 +166,26 @@ func (s *Manager) handleCreateJob(w http.ResponseWriter, r *http.Request) { return } if req.OutputFormat == nil || *req.OutputFormat == "" { - defaultFormat := "PNG" + defaultFormat := "EXR" req.OutputFormat = &defaultFormat } + // Blender outputs only EXR; allowed deliverable formats + allowedFormats := map[string]bool{"EXR": true, "EXR_264_MP4": true, "EXR_AV1_MP4": true, "EXR_VP9_WEBM": true} + if *req.OutputFormat != "" && !allowedFormats[*req.OutputFormat] { + s.respondError(w, http.StatusBadRequest, "Invalid output_format. Allowed: EXR, EXR_264_MP4, EXR_AV1_MP4, EXR_VP9_WEBM") + return + } } - // Store render settings, unhide_objects, enable_execution, blender_version, preserve_hdr, and preserve_alpha flags in blend_metadata if provided - // Always include output_format in metadata so tasks can access it + // Store render settings, unhide_objects, enable_execution, and blender_version in blend_metadata if provided. var blendMetadataJSON *string - if req.RenderSettings != nil || req.UnhideObjects != nil || req.EnableExecution != nil || req.BlenderVersion != nil || req.OutputFormat != nil || req.PreserveHDR != nil || req.PreserveAlpha != nil { + if req.RenderSettings != nil || req.UnhideObjects != nil || req.EnableExecution != nil || req.BlenderVersion != nil || req.OutputFormat != nil { metadata := types.BlendMetadata{ FrameStart: *req.FrameStart, FrameEnd: *req.FrameEnd, RenderSettings: types.RenderSettings{}, UnhideObjects: req.UnhideObjects, EnableExecution: req.EnableExecution, - PreserveHDR: req.PreserveHDR, - PreserveAlpha: req.PreserveAlpha, } if req.RenderSettings != nil { metadata.RenderSettings = *req.RenderSettings @@ -160,110 +229,114 @@ func (s *Manager) handleCreateJob(w http.ResponseWriter, r *http.Request) { // If upload session ID is provided, move the context archive from temp to job directory if req.UploadSessionID != nil && *req.UploadSessionID != "" { log.Printf("Processing upload session for job %d: %s", jobID, *req.UploadSessionID) - // Session ID is the full temp directory path - tempDir := *req.UploadSessionID - tempContextPath := filepath.Join(tempDir, "context.tar") + var uploadSession *UploadSession + s.uploadSessionsMu.RLock() + uploadSession = s.uploadSessions[*req.UploadSessionID] + s.uploadSessionsMu.RUnlock() - if _, err := os.Stat(tempContextPath); err == nil { - log.Printf("Found context archive at %s, moving to job %d directory", tempContextPath, jobID) - // Move context to job directory - jobPath := s.storage.JobPath(jobID) - if err := os.MkdirAll(jobPath, 0755); err != nil { - log.Printf("ERROR: Failed to create job directory for job %d: %v", jobID, err) - s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to create job directory: %v", err)) - return - } + if uploadSession == nil || uploadSession.UserID != userID { + s.respondError(w, http.StatusBadRequest, "Invalid upload session. Please upload the file again.") + return + } + if uploadSession.Status != "completed" { + s.respondError(w, http.StatusBadRequest, "Upload session is not ready yet. Wait for processing to complete.") + return + } + if uploadSession.TempDir == "" { + s.respondError(w, http.StatusBadRequest, "Upload session is missing context data. Please upload again.") + return + } - jobContextPath := filepath.Join(jobPath, "context.tar") - - // Copy file instead of rename (works across filesystems) - srcFile, err := os.Open(tempContextPath) - if err != nil { - log.Printf("ERROR: Failed to open source context archive %s: %v", tempContextPath, err) - s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to open context archive: %v", err)) - return - } - defer srcFile.Close() - - dstFile, err := os.Create(jobContextPath) - if err != nil { - log.Printf("ERROR: Failed to create destination context archive %s: %v", jobContextPath, err) - s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to create context archive: %v", err)) - return - } - defer dstFile.Close() - - _, err = io.Copy(dstFile, srcFile) - if err != nil { - dstFile.Close() - os.Remove(jobContextPath) // Clean up partial file - log.Printf("ERROR: Failed to copy context archive from %s to %s: %v", tempContextPath, jobContextPath, err) - s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to copy context archive: %v", err)) - return - } - - // Close files before deleting source - srcFile.Close() - if err := dstFile.Close(); err != nil { - log.Printf("ERROR: Failed to close destination file: %v", err) - s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to finalize context archive: %v", err)) - return - } - - // Delete source file after successful copy - if err := os.Remove(tempContextPath); err != nil { - log.Printf("Warning: Failed to remove source context archive %s: %v", tempContextPath, err) - // Don't fail the operation if cleanup fails - } - - log.Printf("Successfully copied context archive to %s", jobContextPath) - - // Record context archive in database - contextInfo, err := os.Stat(jobContextPath) - if err != nil { - log.Printf("ERROR: Failed to stat context archive after move: %v", err) - s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to verify context archive: %v", err)) - return - } - - var fileID int64 - err = s.db.With(func(conn *sql.DB) error { - result, err := conn.Exec( - `INSERT INTO job_files (job_id, file_type, file_path, file_name, file_size) - VALUES (?, ?, ?, ?, ?)`, - jobID, types.JobFileTypeInput, jobContextPath, filepath.Base(jobContextPath), contextInfo.Size(), - ) - if err != nil { - return err - } - fileID, err = result.LastInsertId() - return err - }) - if err != nil { - log.Printf("ERROR: Failed to record context archive in database for job %d: %v", jobID, err) - s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to record context archive: %v", err)) - return - } - - log.Printf("Successfully recorded context archive in database for job %d (file ID: %d, size: %d bytes)", jobID, fileID, contextInfo.Size()) - - // Broadcast file addition - s.broadcastJobUpdate(jobID, "file_added", map[string]interface{}{ - "file_id": fileID, - "file_type": types.JobFileTypeInput, - "file_name": filepath.Base(jobContextPath), - "file_size": contextInfo.Size(), - }) - - // Clean up temp directory - if err := os.RemoveAll(tempDir); err != nil { - log.Printf("Warning: Failed to clean up temp directory %s: %v", tempDir, err) - } - } else { - log.Printf("ERROR: Context archive not found at %s for session %s: %v", tempContextPath, *req.UploadSessionID, err) + tempContextPath := filepath.Join(uploadSession.TempDir, "context.tar") + if _, statErr := os.Stat(tempContextPath); statErr != nil { + log.Printf("ERROR: Context archive not found at %s for session %s: %v", tempContextPath, *req.UploadSessionID, statErr) s.respondError(w, http.StatusBadRequest, "Context archive not found for upload session. Please upload the file again.") return } + + log.Printf("Found context archive at %s, moving to job %d directory", tempContextPath, jobID) + jobPath := s.storage.JobPath(jobID) + if err := os.MkdirAll(jobPath, 0755); err != nil { + log.Printf("ERROR: Failed to create job directory for job %d: %v", jobID, err) + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to create job directory: %v", err)) + return + } + + jobContextPath := filepath.Join(jobPath, "context.tar") + srcFile, err := os.Open(tempContextPath) + if err != nil { + log.Printf("ERROR: Failed to open source context archive %s: %v", tempContextPath, err) + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to open context archive: %v", err)) + return + } + defer srcFile.Close() + + dstFile, err := os.Create(jobContextPath) + if err != nil { + log.Printf("ERROR: Failed to create destination context archive %s: %v", jobContextPath, err) + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to create context archive: %v", err)) + return + } + defer dstFile.Close() + + if _, err = io.Copy(dstFile, srcFile); err != nil { + dstFile.Close() + os.Remove(jobContextPath) + log.Printf("ERROR: Failed to copy context archive from %s to %s: %v", tempContextPath, jobContextPath, err) + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to copy context archive: %v", err)) + return + } + + srcFile.Close() + if err := dstFile.Close(); err != nil { + log.Printf("ERROR: Failed to close destination file: %v", err) + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to finalize context archive: %v", err)) + return + } + if err := os.Remove(tempContextPath); err != nil { + log.Printf("Warning: Failed to remove source context archive %s: %v", tempContextPath, err) + } + + contextInfo, err := os.Stat(jobContextPath) + if err != nil { + log.Printf("ERROR: Failed to stat context archive after move: %v", err) + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to verify context archive: %v", err)) + return + } + + var fileID int64 + err = s.db.With(func(conn *sql.DB) error { + result, err := conn.Exec( + `INSERT INTO job_files (job_id, file_type, file_path, file_name, file_size) + VALUES (?, ?, ?, ?, ?)`, + jobID, types.JobFileTypeInput, jobContextPath, filepath.Base(jobContextPath), contextInfo.Size(), + ) + if err != nil { + return err + } + fileID, err = result.LastInsertId() + return err + }) + if err != nil { + log.Printf("ERROR: Failed to record context archive in database for job %d: %v", jobID, err) + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to record context archive: %v", err)) + return + } + + s.broadcastJobUpdate(jobID, "file_added", map[string]interface{}{ + "file_id": fileID, + "file_type": types.JobFileTypeInput, + "file_name": filepath.Base(jobContextPath), + "file_size": contextInfo.Size(), + }) + + if err := os.RemoveAll(uploadSession.TempDir); err != nil { + log.Printf("Warning: Failed to clean up temp directory %s: %v", uploadSession.TempDir, err) + } + + s.uploadSessionsMu.Lock() + delete(s.uploadSessions, *req.UploadSessionID) + s.uploadSessionsMu.Unlock() } else { log.Printf("Warning: No upload session ID provided for job %d - job created without input files", jobID) } @@ -276,30 +349,42 @@ func (s *Manager) handleCreateJob(w http.ResponseWriter, r *http.Request) { taskTimeout = VideoEncodeTimeout // 24 hours for encoding } - // Create tasks for the job - // Create one task per frame (all tasks are single-frame) + // Create tasks for the job (batch INSERT in a single transaction) + // Chunk job frame range by frames_per_render_task config + framesPerTask := s.cfg.GetFramesPerRenderTask() + totalFrames := *req.FrameEnd - *req.FrameStart + 1 + if framesPerTask > totalFrames { + framesPerTask = totalFrames + } var createdTaskIDs []int64 - for frame := *req.FrameStart; frame <= *req.FrameEnd; frame++ { - var taskID int64 - err = s.db.With(func(conn *sql.DB) error { - result, err := conn.Exec( - `INSERT INTO tasks (job_id, frame, task_type, status, timeout_seconds, max_retries) - VALUES (?, ?, ?, ?, ?, ?)`, - jobID, frame, types.TaskTypeRender, types.TaskStatusPending, taskTimeout, 3, - ) + err = s.db.With(func(conn *sql.DB) error { + stmt, err := conn.Prepare(`INSERT INTO tasks (job_id, frame, frame_end, task_type, status, timeout_seconds, max_retries) VALUES (?, ?, ?, ?, ?, ?, ?)`) + if err != nil { + return err + } + defer stmt.Close() + for start := *req.FrameStart; start <= *req.FrameEnd; start += framesPerTask { + end := start + framesPerTask - 1 + if end > *req.FrameEnd { + end = *req.FrameEnd + } + result, err := stmt.Exec(jobID, start, end, types.TaskTypeRender, types.TaskStatusPending, taskTimeout, 3) if err != nil { return err } - taskID, err = result.LastInsertId() - return err - }) - if err != nil { - s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to create tasks: %v", err)) - return + taskID, err := result.LastInsertId() + if err != nil { + return err + } + createdTaskIDs = append(createdTaskIDs, taskID) } - createdTaskIDs = append(createdTaskIDs, taskID) + return nil + }) + if err != nil { + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to create tasks: %v", err)) + return } - log.Printf("Created %d render tasks for job %d (frames %d-%d)", *req.FrameEnd-*req.FrameStart+1, jobID, *req.FrameStart, *req.FrameEnd) + log.Printf("Created %d render tasks for job %d (frames %d-%d, %d frames per task)", len(createdTaskIDs), jobID, *req.FrameStart, *req.FrameEnd, framesPerTask) // Create encode task immediately if output format requires it // The task will have a condition that prevents it from being assigned until all render tasks are completed @@ -941,13 +1026,7 @@ func (s *Manager) handleCancelJob(w http.ResponseWriter, r *http.Request) { if rowsAffected == 0 { return sql.ErrNoRows } - - // Cancel all pending tasks - _, err = conn.Exec( - `UPDATE tasks SET status = ?, runner_id = NULL WHERE job_id = ? AND status = ?`, - types.TaskStatusFailed, jobID, types.TaskStatusPending, - ) - return err + return nil }) if err == sql.ErrNoRows { s.respondError(w, http.StatusNotFound, "Job not found") @@ -958,6 +1037,12 @@ func (s *Manager) handleCancelJob(w http.ResponseWriter, r *http.Request) { return } + // Cancel both pending and running tasks so they cannot be reassigned or keep executing logically. + if err := s.cancelActiveTasksForJob(jobID); err != nil { + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to cancel active tasks: %v", err)) + return + } + log.Printf("Cancelling job %d (type: %s)", jobID, jobType) s.respondJSON(w, http.StatusOK, map[string]string{"message": "Job cancelled"}) @@ -1482,16 +1567,18 @@ func (s *Manager) handleUploadFileForJobCreation(w http.ResponseWriter, r *http. return } - // Generate session ID (use temp directory path as session ID) - sessionID = tmpDir + // Generate opaque session ID and keep temp path internal. + sessionID = uuid.NewString() // Create upload session s.uploadSessionsMu.Lock() s.uploadSessions[sessionID] = &UploadSession{ SessionID: sessionID, UserID: userID, + TempDir: tmpDir, Progress: 0.0, Status: "uploading", + Phase: uploadSessionPhase("uploading"), Message: "Uploading file...", CreatedAt: time.Now(), } @@ -1581,40 +1668,72 @@ func (s *Manager) handleUploadFileForJobCreation(w http.ResponseWriter, r *http. return } - // Process everything synchronously and return metadata in HTTP response - // Client will show upload progress during upload, then processing progress while waiting filename := header.Filename fileSize := header.Size mainBlendParam := formValues["main_blend_file"] - var processedMainBlendFile string - var processedExtractedFiles []string - var processedMetadata *types.BlendMetadata + // Return immediately so the client gets a quick response and can poll for status. + // ZIP extraction, blend detection, context creation, and metadata extraction run in background. + log.Printf("Upload received for session %s, returning 200 immediately (ZIP/metadata will run in background)", sessionID) + s.updateUploadSessionState(sessionID, 0.2, "processing", "Extracting and preparing...") + s.broadcastUploadProgressSync(userID, sessionID, 0.2, "processing", "Extracting and preparing...") - // Process ZIP extraction if needed + response := map[string]interface{}{ + "session_id": sessionID, + "file_name": filename, + "file_size": fileSize, + "status": "processing", + "phase": uploadSessionPhase("processing"), + } + s.respondJSON(w, http.StatusOK, response) + + go s.runBackgroundUploadProcessing(tmpDir, sessionID, userID, filename, fileSize, mainBlendParam, mainBlendFile) +} + +// runBackgroundUploadProcessing runs ZIP extraction (if needed), blend detection, context creation, and metadata extraction. +// Called in a goroutine after the upload handler returns; updates upload session and broadcasts when done. +func (s *Manager) runBackgroundUploadProcessing(tmpDir, sessionID string, userID int64, filename string, fileSize int64, mainBlendParam string, mainBlendFile string) { + defer func() { + if r := recover(); r != nil { + log.Printf("Panic in runBackgroundUploadProcessing: %v", r) + errMsg := fmt.Sprintf("%v", r) + if ownerUserID, ok := s.failUploadSession(sessionID, "Processing failed: "+errMsg); ok { + s.broadcastUploadProgressSync(ownerUserID, sessionID, 0, "error", "Processing failed: "+errMsg) + } + } + }() + + var processedMainBlendFile string + var excludeFiles []string + extractedFilesCount := 0 if strings.HasSuffix(strings.ToLower(filename), ".zip") { + excludeFiles = append(excludeFiles, filename) + s.broadcastUploadProgressSync(userID, sessionID, 0.25, "processing", "Extracting ZIP...") zipPath := filepath.Join(tmpDir, filename) log.Printf("Extracting ZIP file: %s", zipPath) - processedExtractedFiles, err = s.storage.ExtractZip(zipPath, tmpDir) + processedExtractedFiles, err := s.storage.ExtractZip(zipPath, tmpDir) if err != nil { log.Printf("ERROR: Failed to extract ZIP file: %v", err) os.RemoveAll(tmpDir) - s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to extract ZIP file: %v", err)) + if ownerUserID, ok := s.failUploadSession(sessionID, err.Error()); ok { + s.broadcastUploadProgressSync(ownerUserID, sessionID, 0, "error", err.Error()) + } return } - log.Printf("Successfully extracted %d files from ZIP", len(processedExtractedFiles)) + extractedFilesCount = len(processedExtractedFiles) + log.Printf("Successfully extracted %d files from ZIP", extractedFilesCount) - // Find main blend file if mainBlendParam != "" { processedMainBlendFile = filepath.Join(tmpDir, mainBlendParam) if _, err := os.Stat(processedMainBlendFile); err != nil { log.Printf("ERROR: Specified main blend file not found: %s", mainBlendParam) - os.RemoveAll(tmpDir) - s.respondError(w, http.StatusBadRequest, fmt.Sprintf("Specified main blend file not found: %s", mainBlendParam)) + errMsg := "Specified main blend file not found: " + mainBlendParam + if ownerUserID, ok := s.failUploadSession(sessionID, errMsg); ok { + s.broadcastUploadProgressSync(ownerUserID, sessionID, 0, "error", errMsg) + } return } } else { - // Auto-detect: find blend files in root directory blendFiles := []string{} err := filepath.Walk(tmpDir, func(path string, info os.FileInfo, err error) error { if err != nil { @@ -1628,25 +1747,34 @@ func (s *Manager) handleUploadFileForJobCreation(w http.ResponseWriter, r *http. } return nil }) - if err == nil && len(blendFiles) == 1 { + if err != nil { + if ownerUserID, ok := s.failUploadSession(sessionID, err.Error()); ok { + s.broadcastUploadProgressSync(ownerUserID, sessionID, 0, "error", err.Error()) + } + return + } + if len(blendFiles) == 1 { processedMainBlendFile = blendFiles[0] } else if len(blendFiles) > 1 { - // Multiple blend files - return response with list for user to select blendFileNames := []string{} for _, f := range blendFiles { rel, _ := filepath.Rel(tmpDir, f) blendFileNames = append(blendFileNames, rel) } - // Return response indicating multiple blend files found - response := map[string]interface{}{ - "session_id": sessionID, - "file_name": filename, - "file_size": fileSize, - "status": "select_blend", - "zip_extracted": true, - "blend_files": blendFileNames, + s.uploadSessionsMu.Lock() + if session, exists := s.uploadSessions[sessionID]; exists { + session.Status = "select_blend" + session.Phase = uploadSessionPhase("select_blend") + session.Message = "Select main blend file" + session.Progress = 0.5 + session.ResultBlendFiles = blendFileNames + session.ResultFileName = filename + session.ResultFileSize = fileSize + session.ResultZipExtracted = true + session.ResultExtractedFilesCnt = extractedFilesCount } - s.respondJSON(w, http.StatusOK, response) + s.uploadSessionsMu.Unlock() + s.broadcastUploadProgressSync(userID, sessionID, 0.5, "select_blend", "Select main blend file") return } } @@ -1654,65 +1782,102 @@ func (s *Manager) handleUploadFileForJobCreation(w http.ResponseWriter, r *http. processedMainBlendFile = mainBlendFile } - // Create context archive - var excludeFiles []string - if strings.HasSuffix(strings.ToLower(filename), ".zip") { - excludeFiles = append(excludeFiles, filename) - } - - log.Printf("Creating context archive for session %s", sessionID) + s.broadcastUploadProgressSync(userID, sessionID, 0.4, "creating_context", "Creating context archive...") contextPath := filepath.Join(tmpDir, "context.tar") - contextPath, err = s.createContextFromDir(tmpDir, contextPath, excludeFiles...) + contextPath, err := s.createContextFromDir(tmpDir, contextPath, excludeFiles...) if err != nil { log.Printf("ERROR: Failed to create context archive: %v", err) - os.RemoveAll(tmpDir) - s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to create context archive: %v", err)) + if ownerUserID, ok := s.failUploadSession(sessionID, err.Error()); ok { + s.broadcastUploadProgressSync(ownerUserID, sessionID, 0, "error", err.Error()) + } return } - // Extract metadata from context archive - log.Printf("Extracting metadata from context archive for session %s", sessionID) - processedMetadata, err = s.extractMetadataFromTempContext(contextPath) + s.broadcastUploadProgressSync(userID, sessionID, 0.7, "extracting_metadata", "Extracting metadata...") + processedMetadata, err := s.extractMetadataFromTempContext(contextPath) if err != nil { log.Printf("Warning: Failed to extract metadata: %v", err) - // Continue anyway - user can fill in manually processedMetadata = nil } - // Build response with all results - response := map[string]interface{}{ - "session_id": sessionID, - "file_name": filename, - "file_size": fileSize, - "context_archive": filepath.Base(contextPath), - "status": "completed", + var mainBlendRel string + if processedMainBlendFile != "" { + mainBlendRel, _ = filepath.Rel(tmpDir, processedMainBlendFile) } - - if strings.HasSuffix(strings.ToLower(filename), ".zip") { - response["zip_extracted"] = true - response["extracted_files_count"] = len(processedExtractedFiles) - if processedMainBlendFile != "" { - relPath, _ := filepath.Rel(tmpDir, processedMainBlendFile) - response["main_blend_file"] = relPath - } - } else if processedMainBlendFile != "" { - relPath, _ := filepath.Rel(tmpDir, processedMainBlendFile) - response["main_blend_file"] = relPath - } - - if processedMetadata != nil { - response["metadata"] = processedMetadata - response["metadata_extracted"] = true - } else { - response["metadata_extracted"] = false - } - - // Clean up upload session immediately (no longer needed for WebSocket) s.uploadSessionsMu.Lock() - delete(s.uploadSessions, sessionID) + if session, exists := s.uploadSessions[sessionID]; exists { + session.Status = "completed" + session.Phase = uploadSessionPhase("completed") + session.Message = "Ready" + session.Progress = 1.0 + session.ResultContextArchive = filepath.Base(contextPath) + session.ResultMetadata = processedMetadata + session.ResultMainBlendFile = mainBlendRel + session.ResultFileName = filename + session.ResultFileSize = fileSize + session.ResultZipExtracted = strings.HasSuffix(strings.ToLower(filename), ".zip") + session.ResultExtractedFilesCnt = extractedFilesCount + session.ResultMetadataExtracted = processedMetadata != nil + if err != nil { + session.ResultMetadataError = err.Error() + } + } s.uploadSessionsMu.Unlock() + s.broadcastUploadProgressSync(userID, sessionID, 1.0, "completed", "Ready") +} - // Return response with metadata +// handleUploadStatus returns the current status of an upload session (for polling when processing is async). +func (s *Manager) handleUploadStatus(w http.ResponseWriter, r *http.Request) { + userID, err := getUserID(r) + if err != nil { + s.respondError(w, http.StatusUnauthorized, err.Error()) + return + } + sessionID := r.URL.Query().Get("session_id") + if sessionID == "" { + s.respondError(w, http.StatusBadRequest, "session_id required") + return + } + s.uploadSessionsMu.RLock() + session, exists := s.uploadSessions[sessionID] + s.uploadSessionsMu.RUnlock() + if !exists || session.UserID != userID { + s.respondError(w, http.StatusNotFound, "Upload session not found") + return + } + phase := session.Phase + if phase == "" { + phase = uploadSessionPhase(session.Status) + } + response := map[string]interface{}{ + "session_id": sessionID, + "status": session.Status, + "phase": phase, + "progress": session.Progress, + "message": session.Message, + } + if session.Status == "completed" { + response["file_name"] = session.ResultFileName + response["file_size"] = session.ResultFileSize + response["context_archive"] = session.ResultContextArchive + response["main_blend_file"] = session.ResultMainBlendFile + response["zip_extracted"] = session.ResultZipExtracted + response["extracted_files_count"] = session.ResultExtractedFilesCnt + response["metadata_extracted"] = session.ResultMetadataExtracted + if session.ResultMetadataError != "" { + response["metadata_error"] = session.ResultMetadataError + } + if session.ResultMetadata != nil { + response["metadata"] = session.ResultMetadata + } + } else if session.Status == "error" { + response["error"] = session.ErrorMessage + } else if session.Status == "select_blend" { + response["zip_extracted"] = true + response["blend_files"] = session.ResultBlendFiles + response["file_name"] = session.ResultFileName + response["file_size"] = session.ResultFileSize + } s.respondJSON(w, http.StatusOK, response) } @@ -1929,181 +2094,7 @@ func (s *Manager) runBlenderMetadataExtraction(blendFile, workDir, blenderVersio // createContextFromDir creates a context archive from a source directory to a specific destination path func (s *Manager) createContextFromDir(sourceDir, destPath string, excludeFiles ...string) (string, error) { - // Build set of files to exclude - excludeSet := make(map[string]bool) - for _, excludeFile := range excludeFiles { - excludePath := filepath.Clean(excludeFile) - excludeSet[excludePath] = true - excludeSet[filepath.ToSlash(excludePath)] = true - } - - // Collect all files from source directory - var filesToInclude []string - err := filepath.Walk(sourceDir, func(path string, info os.FileInfo, err error) error { - if err != nil { - return err - } - if info.IsDir() { - return nil - } - - // Skip Blender save files - lower := strings.ToLower(info.Name()) - idx := strings.LastIndex(lower, ".blend") - if idx != -1 { - suffix := lower[idx+len(".blend"):] - if len(suffix) > 0 { - isSaveFile := true - for _, r := range suffix { - if r < '0' || r > '9' { - isSaveFile = false - break - } - } - if isSaveFile { - return nil - } - } - } - - relPath, err := filepath.Rel(sourceDir, path) - if err != nil { - return err - } - cleanRelPath := filepath.Clean(relPath) - if strings.HasPrefix(cleanRelPath, "..") { - return fmt.Errorf("invalid file path: %s", relPath) - } - - if excludeSet[cleanRelPath] || excludeSet[filepath.ToSlash(cleanRelPath)] { - return nil - } - - filesToInclude = append(filesToInclude, path) - return nil - }) - if err != nil { - return "", fmt.Errorf("failed to walk source directory: %w", err) - } - - if len(filesToInclude) == 0 { - return "", fmt.Errorf("no files found to include in context archive") - } - - // Collect relative paths to find common prefix - relPaths := make([]string, 0, len(filesToInclude)) - for _, filePath := range filesToInclude { - relPath, err := filepath.Rel(sourceDir, filePath) - if err != nil { - return "", fmt.Errorf("failed to get relative path: %w", err) - } - relPaths = append(relPaths, relPath) - } - - // Find and strip common leading directory - commonPrefix := "" - if len(relPaths) > 0 { - firstComponents := make([]string, 0, len(relPaths)) - for _, path := range relPaths { - parts := strings.Split(filepath.ToSlash(path), "/") - if len(parts) > 0 && parts[0] != "" { - firstComponents = append(firstComponents, parts[0]) - } else { - firstComponents = nil - break - } - } - if len(firstComponents) > 0 { - commonFirst := firstComponents[0] - allSame := true - for _, comp := range firstComponents { - if comp != commonFirst { - allSame = false - break - } - } - if allSame { - commonPrefix = commonFirst + "/" - } - } - } - - // Validate single .blend file at root - blendFilesAtRoot := 0 - for _, relPath := range relPaths { - tarPath := filepath.ToSlash(relPath) - if commonPrefix != "" && strings.HasPrefix(tarPath, commonPrefix) { - tarPath = strings.TrimPrefix(tarPath, commonPrefix) - } - if strings.HasSuffix(strings.ToLower(tarPath), ".blend") && !strings.Contains(tarPath, "/") { - blendFilesAtRoot++ - } - } - - if blendFilesAtRoot == 0 { - return "", fmt.Errorf("no .blend file found at root level in context archive - .blend files must be at the root level of the uploaded archive, not in subdirectories") - } - if blendFilesAtRoot > 1 { - return "", fmt.Errorf("multiple .blend files found at root level in context archive (found %d, expected 1)", blendFilesAtRoot) - } - - // Create the tar file - contextFile, err := os.Create(destPath) - if err != nil { - return "", fmt.Errorf("failed to create context file: %w", err) - } - defer contextFile.Close() - - tarWriter := tar.NewWriter(contextFile) - defer tarWriter.Close() - - // Add each file to the tar archive - for i, filePath := range filesToInclude { - file, err := os.Open(filePath) - if err != nil { - return "", fmt.Errorf("failed to open file: %w", err) - } - - info, err := file.Stat() - if err != nil { - file.Close() - return "", fmt.Errorf("failed to stat file: %w", err) - } - - relPath := relPaths[i] - tarPath := filepath.ToSlash(relPath) - if commonPrefix != "" && strings.HasPrefix(tarPath, commonPrefix) { - tarPath = strings.TrimPrefix(tarPath, commonPrefix) - } - - header, err := tar.FileInfoHeader(info, "") - if err != nil { - file.Close() - return "", fmt.Errorf("failed to create tar header: %w", err) - } - header.Name = tarPath - - if err := tarWriter.WriteHeader(header); err != nil { - file.Close() - return "", fmt.Errorf("failed to write tar header: %w", err) - } - - if _, err := io.Copy(tarWriter, file); err != nil { - file.Close() - return "", fmt.Errorf("failed to write file to tar: %w", err) - } - - file.Close() - } - - if err := tarWriter.Close(); err != nil { - return "", fmt.Errorf("failed to close tar writer: %w", err) - } - if err := contextFile.Close(); err != nil { - return "", fmt.Errorf("failed to close context file: %w", err) - } - - return destPath, nil + return s.storage.CreateContextArchiveFromDirToPath(sourceDir, destPath, excludeFiles...) } // handleListJobFiles lists files for a job with pagination @@ -2606,6 +2597,166 @@ func (s *Manager) handleDownloadJobFile(w http.ResponseWriter, r *http.Request) io.Copy(w, file) } +// handleDownloadEXRZip downloads all EXR output files for a job as a ZIP archive. +func (s *Manager) handleDownloadEXRZip(w http.ResponseWriter, r *http.Request) { + userID, err := getUserID(r) + if err != nil { + s.respondError(w, http.StatusUnauthorized, err.Error()) + return + } + + jobID, err := parseID(r, "id") + if err != nil { + s.respondError(w, http.StatusBadRequest, err.Error()) + return + } + + var jobName string + + // Verify job belongs to user (unless admin) + isAdmin := isAdminUser(r) + if !isAdmin { + var jobUserID int64 + err = s.db.With(func(conn *sql.DB) error { + return conn.QueryRow("SELECT user_id, name FROM jobs WHERE id = ?", jobID).Scan(&jobUserID, &jobName) + }) + if err == sql.ErrNoRows { + s.respondError(w, http.StatusNotFound, "Job not found") + return + } + if err != nil { + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to verify job: %v", err)) + return + } + if jobUserID != userID { + s.respondError(w, http.StatusForbidden, "Access denied") + return + } + } else { + var exists bool + err = s.db.With(func(conn *sql.DB) error { + return conn.QueryRow("SELECT EXISTS(SELECT 1 FROM jobs WHERE id = ?), COALESCE((SELECT name FROM jobs WHERE id = ?), '')", jobID, jobID).Scan(&exists, &jobName) + }) + if err != nil || !exists { + s.respondError(w, http.StatusNotFound, "Job not found") + return + } + } + + type zipFile struct { + Path string + Name string + } + files := make([]zipFile, 0) + err = s.db.With(func(conn *sql.DB) error { + rows, qErr := conn.Query( + `SELECT file_path, file_name + FROM job_files + WHERE job_id = ? AND file_type = ? AND LOWER(file_name) LIKE '%.exr' + ORDER BY created_at ASC`, + jobID, types.JobFileTypeOutput, + ) + if qErr != nil { + return qErr + } + defer rows.Close() + for rows.Next() { + var item zipFile + if scanErr := rows.Scan(&item.Path, &item.Name); scanErr != nil { + return scanErr + } + files = append(files, item) + } + return rows.Err() + }) + if err != nil { + s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to query EXR files: %v", err)) + return + } + if len(files) == 0 { + s.respondError(w, http.StatusNotFound, "No EXR output files found for this job") + return + } + + safeJobName := strings.TrimSpace(jobName) + if safeJobName == "" { + safeJobName = fmt.Sprintf("job-%d", jobID) + } + safeJobName = strings.Map(func(r rune) rune { + switch { + case r >= 'a' && r <= 'z': + return r + case r >= 'A' && r <= 'Z': + return r + case r >= '0' && r <= '9': + return r + case r == '-', r == '_', r == '.': + return r + case r == ' ': + return '-' + default: + return '-' + } + }, safeJobName) + for strings.Contains(safeJobName, "--") { + safeJobName = strings.ReplaceAll(safeJobName, "--", "-") + } + safeJobName = strings.Trim(safeJobName, "-_.") + if safeJobName == "" { + safeJobName = fmt.Sprintf("job-%d", jobID) + } + + fileName := fmt.Sprintf("%s-exr.zip", safeJobName) + w.Header().Set("Content-Type", "application/zip") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s", fileName)) + + zipWriter := zip.NewWriter(w) + defer zipWriter.Close() + + usedNames := make(map[string]int) + for _, item := range files { + file, openErr := s.storage.GetFile(item.Path) + if openErr != nil { + log.Printf("Warning: skipping missing EXR file for job %d: %s (%v)", jobID, item.Path, openErr) + continue + } + + entryName := strings.TrimSpace(item.Name) + if entryName == "" { + entryName = filepath.Base(item.Path) + } + entryName = strings.ReplaceAll(entryName, "\\", "/") + entryName = strings.TrimLeft(entryName, "/") + if entryName == "" || strings.Contains(entryName, "..") { + entryName = filepath.Base(item.Path) + } + if entryName == "" { + entryName = fmt.Sprintf("frame-%d.exr", len(usedNames)+1) + } + + baseEntryName := entryName + if n := usedNames[baseEntryName]; n > 0 { + ext := filepath.Ext(entryName) + base := strings.TrimSuffix(entryName, ext) + entryName = fmt.Sprintf("%s (%d)%s", base, n+1, ext) + } + usedNames[baseEntryName]++ + + entryWriter, createErr := zipWriter.Create(entryName) + if createErr != nil { + file.Close() + log.Printf("Warning: skipping EXR file entry for job %d: %s (%v)", jobID, entryName, createErr) + continue + } + if _, copyErr := io.Copy(entryWriter, file); copyErr != nil { + file.Close() + log.Printf("Warning: failed to add EXR file to zip for job %d: %s (%v)", jobID, entryName, copyErr) + continue + } + file.Close() + } +} + // handlePreviewEXR converts an EXR file to PNG for browser preview // Uses ImageMagick to convert with HDR tone mapping and alpha preservation func (s *Manager) handlePreviewEXR(w http.ResponseWriter, r *http.Request) { @@ -2697,42 +2848,21 @@ func (s *Manager) handlePreviewEXR(w http.ResponseWriter, r *http.Request) { tmpFile.Close() defer os.Remove(tmpPath) - // Convert EXR to PNG using ImageMagick - // -colorspace sRGB: Convert from linear RGB to sRGB (matches SDR encoding pipeline) - // -depth 16: Use 16-bit depth for better quality - // -alpha on: Preserve alpha channel - // Note: Removed -auto-level to avoid automatic tone mapping that changes colors - result, err := executils.RunCommand( - "magick", - []string{ - filePath, - "-colorspace", "sRGB", - "-depth", "16", - "-alpha", "on", - tmpPath, - }, - "", // dir - nil, // env - 0, // taskID - nil, // tracker - ) - + // Convert EXR to PNG for in-browser preview. EXR is linear; we convert to sRGB for display. + // -resize 1920x1080>: only shrink if larger, limits preview size and conversion time + // -colorspace sRGB, -depth 16, -alpha on: quality and pipeline match + exrPreviewTimeout := 60 * time.Second + magickArgs := []string{ + filePath, + "-resize", "1920x1080>", + "-colorspace", "sRGB", + "-depth", "16", + "-alpha", "on", + tmpPath, + } + result, err := executils.RunCommandWithTimeout(exrPreviewTimeout, "magick", magickArgs, "", nil, 0, nil) if err != nil { - // Try with 'convert' command (older ImageMagick) - result, err = executils.RunCommand( - "convert", - []string{ - filePath, - "-colorspace", "sRGB", - "-depth", "16", - "-alpha", "on", - tmpPath, - }, - "", // dir - nil, // env - 0, // taskID - nil, // tracker - ) + result, err = executils.RunCommandWithTimeout(exrPreviewTimeout, "convert", magickArgs, "", nil, 0, nil) if err != nil { log.Printf("EXR conversion failed: %v, output: %s %s", err, result.Stdout, result.Stderr) s.respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to convert EXR: %v", err)) @@ -2950,7 +3080,7 @@ func (s *Manager) handleListJobTasks(w http.ResponseWriter, r *http.Request) { } // Build query with filters - query := `SELECT id, job_id, runner_id, frame, status, task_type, + query := `SELECT id, job_id, runner_id, frame, frame_end, status, task_type, current_step, retry_count, max_retries, output_path, created_at, started_at, completed_at, error_message, timeout_seconds FROM tasks WHERE job_id = ?` @@ -3029,6 +3159,7 @@ func (s *Manager) handleListJobTasks(w http.ResponseWriter, r *http.Request) { for rows.Next() { var task types.Task var runnerID sql.NullInt64 + var frameEnd sql.NullInt64 var startedAt, completedAt sql.NullTime var timeoutSeconds sql.NullInt64 var errorMessage sql.NullString @@ -3036,7 +3167,7 @@ func (s *Manager) handleListJobTasks(w http.ResponseWriter, r *http.Request) { var outputPath sql.NullString err := rows.Scan( - &task.ID, &task.JobID, &runnerID, &task.Frame, + &task.ID, &task.JobID, &runnerID, &task.Frame, &frameEnd, &task.Status, &task.TaskType, ¤tStep, &task.RetryCount, &task.MaxRetries, &outputPath, &task.CreatedAt, &startedAt, &completedAt, &errorMessage, &timeoutSeconds, @@ -3049,6 +3180,10 @@ func (s *Manager) handleListJobTasks(w http.ResponseWriter, r *http.Request) { if runnerID.Valid { task.RunnerID = &runnerID.Int64 } + if frameEnd.Valid { + fe := int(frameEnd.Int64) + task.FrameEnd = &fe + } if startedAt.Valid { task.StartedAt = &startedAt.Time } @@ -3174,7 +3309,7 @@ func (s *Manager) handleListJobTasksSummary(w http.ResponseWriter, r *http.Reque } // Build query - only select summary fields - query := `SELECT id, frame, status, task_type, runner_id + query := `SELECT id, frame, frame_end, status, task_type, runner_id FROM tasks WHERE job_id = ?` args := []interface{}{jobID} @@ -3236,6 +3371,7 @@ func (s *Manager) handleListJobTasksSummary(w http.ResponseWriter, r *http.Reque type TaskSummary struct { ID int64 `json:"id"` Frame int `json:"frame"` + FrameEnd *int `json:"frame_end,omitempty"` Status string `json:"status"` TaskType string `json:"task_type"` RunnerID *int64 `json:"runner_id,omitempty"` @@ -3245,9 +3381,10 @@ func (s *Manager) handleListJobTasksSummary(w http.ResponseWriter, r *http.Reque for rows.Next() { var summary TaskSummary var runnerID sql.NullInt64 + var frameEnd sql.NullInt64 err := rows.Scan( - &summary.ID, &summary.Frame, + &summary.ID, &summary.Frame, &frameEnd, &summary.Status, &summary.TaskType, &runnerID, ) if err != nil { @@ -3255,6 +3392,10 @@ func (s *Manager) handleListJobTasksSummary(w http.ResponseWriter, r *http.Reque return } + if frameEnd.Valid { + fe := int(frameEnd.Int64) + summary.FrameEnd = &fe + } if runnerID.Valid { summary.RunnerID = &runnerID.Int64 } @@ -3342,7 +3483,7 @@ func (s *Manager) handleBatchGetTasks(w http.ResponseWriter, r *http.Request) { args[i+1] = taskID } - query := fmt.Sprintf(`SELECT id, job_id, runner_id, frame, status, task_type, + query := fmt.Sprintf(`SELECT id, job_id, runner_id, frame, frame_end, status, task_type, current_step, retry_count, max_retries, output_path, created_at, started_at, completed_at, error_message, timeout_seconds FROM tasks WHERE job_id = ? AND id IN (%s) ORDER BY frame ASC`, strings.Join(placeholders, ",")) @@ -3363,6 +3504,7 @@ func (s *Manager) handleBatchGetTasks(w http.ResponseWriter, r *http.Request) { for rows.Next() { var task types.Task var runnerID sql.NullInt64 + var frameEnd sql.NullInt64 var startedAt, completedAt sql.NullTime var timeoutSeconds sql.NullInt64 var errorMessage sql.NullString @@ -3370,7 +3512,7 @@ func (s *Manager) handleBatchGetTasks(w http.ResponseWriter, r *http.Request) { var outputPath sql.NullString err := rows.Scan( - &task.ID, &task.JobID, &runnerID, &task.Frame, + &task.ID, &task.JobID, &runnerID, &task.Frame, &frameEnd, &task.Status, &task.TaskType, ¤tStep, &task.RetryCount, &task.MaxRetries, &outputPath, &task.CreatedAt, &startedAt, &completedAt, &errorMessage, &timeoutSeconds, @@ -3383,6 +3525,10 @@ func (s *Manager) handleBatchGetTasks(w http.ResponseWriter, r *http.Request) { if runnerID.Valid { task.RunnerID = &runnerID.Int64 } + if frameEnd.Valid { + fe := int(frameEnd.Int64) + task.FrameEnd = &fe + } if startedAt.Valid { task.StartedAt = &startedAt.Time } @@ -3483,10 +3629,17 @@ func (s *Manager) handleGetTaskLogs(w http.ResponseWriter, r *http.Request) { logLevel := r.URL.Query().Get("log_level") sinceIDStr := r.URL.Query().Get("since_id") limitStr := r.URL.Query().Get("limit") - limit := 100 // default (reduced from 1000) + limit := 100 // default + unlimited := false if limitStr != "" { - if l, err := strconv.Atoi(limitStr); err == nil && l > 0 && l <= 10000 { - limit = l + if l, err := strconv.Atoi(limitStr); err == nil { + if l == 0 { + unlimited = true // explicit no-limit mode + } else if l > 0 && l <= 10000 { + limit = l + } else if l > 10000 { + limit = 10000 + } } } @@ -3511,8 +3664,11 @@ func (s *Manager) handleGetTaskLogs(w http.ResponseWriter, r *http.Request) { query += " AND log_level = ?" args = append(args, logLevel) } - query += " ORDER BY id ASC LIMIT ?" - args = append(args, limit) + query += " ORDER BY id ASC" + if !unlimited { + query += " LIMIT ?" + args = append(args, limit) + } var rows *sql.Rows err = s.db.With(func(conn *sql.DB) error { @@ -3553,7 +3709,12 @@ func (s *Manager) handleGetTaskLogs(w http.ResponseWriter, r *http.Request) { response := map[string]interface{}{ "logs": logs, "last_id": lastID, - "limit": limit, + "limit": func() int { + if unlimited { + return 0 + } + return limit + }(), } s.respondJSON(w, http.StatusOK, response) } @@ -4403,7 +4564,7 @@ func (s *Manager) sendInitialState(clientConn *ClientConnection, channel string) // Get and send tasks (no limit - send all) err = s.db.With(func(conn *sql.DB) error { rows, err2 := conn.Query( - `SELECT id, job_id, runner_id, frame, status, task_type, + `SELECT id, job_id, runner_id, frame, frame_end, status, task_type, current_step, retry_count, max_retries, output_path, created_at, started_at, completed_at, error_message, timeout_seconds FROM tasks WHERE job_id = ? ORDER BY frame ASC`, @@ -4416,6 +4577,7 @@ func (s *Manager) sendInitialState(clientConn *ClientConnection, channel string) for rows.Next() { var task types.Task var runnerID sql.NullInt64 + var frameEnd sql.NullInt64 var startedAt, completedAt sql.NullTime var timeoutSeconds sql.NullInt64 var errorMessage sql.NullString @@ -4423,7 +4585,7 @@ func (s *Manager) sendInitialState(clientConn *ClientConnection, channel string) var outputPath sql.NullString err := rows.Scan( - &task.ID, &task.JobID, &runnerID, &task.Frame, + &task.ID, &task.JobID, &runnerID, &task.Frame, &frameEnd, &task.Status, &task.TaskType, ¤tStep, &task.RetryCount, &task.MaxRetries, &outputPath, &task.CreatedAt, &startedAt, &completedAt, &errorMessage, &timeoutSeconds, @@ -4435,6 +4597,10 @@ func (s *Manager) sendInitialState(clientConn *ClientConnection, channel string) if runnerID.Valid { task.RunnerID = &runnerID.Int64 } + if frameEnd.Valid { + fe := int(frameEnd.Int64) + task.FrameEnd = &fe + } if startedAt.Valid { task.StartedAt = &startedAt.Time } @@ -5121,22 +5287,11 @@ func (s *Manager) broadcastToAllClients(channel string, msg map[string]interface // This function updates the session synchronously (quick operation) but broadcasts // asynchronously to avoid blocking the upload handler on slow WebSocket writes. func (s *Manager) broadcastUploadProgress(sessionID string, progress float64, status, message string) { - s.uploadSessionsMu.RLock() - session, exists := s.uploadSessions[sessionID] - s.uploadSessionsMu.RUnlock() - + userID, exists := s.updateUploadSessionState(sessionID, progress, status, message) if !exists { return } - // Update session synchronously (quick operation - just updating struct fields) - s.uploadSessionsMu.Lock() - session.Progress = progress - session.Status = status - session.Message = message - userID := session.UserID // Capture userID before releasing lock - s.uploadSessionsMu.Unlock() - // Broadcast asynchronously to avoid blocking upload handler on slow WebSocket writes // This prevents the entire HTTP server from freezing during large file uploads go func() { @@ -5152,6 +5307,7 @@ func (s *Manager) broadcastUploadProgress(sessionID string, progress float64, st "data": map[string]interface{}{ "progress": progress, "status": status, + "phase": uploadSessionPhase(status), "message": message, }, "timestamp": time.Now().Unix(), @@ -5168,14 +5324,7 @@ func (s *Manager) broadcastUploadProgress(sessionID string, progress float64, st // broadcastUploadProgressSync sends upload progress synchronously (for completion messages) // This ensures the message is sent immediately and not lost func (s *Manager) broadcastUploadProgressSync(userID int64, sessionID string, progress float64, status, message string) { - // Update session synchronously - s.uploadSessionsMu.Lock() - if session, exists := s.uploadSessions[sessionID]; exists { - session.Progress = progress - session.Status = status - session.Message = message - } - s.uploadSessionsMu.Unlock() + s.updateUploadSessionState(sessionID, progress, status, message) // Determine message type msgType := "upload_progress" @@ -5189,6 +5338,7 @@ func (s *Manager) broadcastUploadProgressSync(userID int64, sessionID string, pr "data": map[string]interface{}{ "progress": progress, "status": status, + "phase": uploadSessionPhase(status), "message": message, }, "timestamp": time.Now().Unix(), diff --git a/internal/manager/manager.go b/internal/manager/manager.go index ab77f11..6aaa788 100644 --- a/internal/manager/manager.go +++ b/internal/manager/manager.go @@ -59,6 +59,7 @@ type Manager struct { secrets *authpkg.Secrets storage *storage.Storage router *chi.Mux + ui *uiRenderer // WebSocket connections wsUpgrader websocket.Upgrader @@ -125,10 +126,24 @@ type ClientConnection struct { type UploadSession struct { SessionID string UserID int64 + TempDir string Progress float64 Status string // "uploading", "processing", "extracting_metadata", "creating_context", "completed", "error" + Phase string // "upload", "processing", "ready", "error", "action_required" Message string CreatedAt time.Time + // Result fields set when Status is "completed" (for async processing) + ResultContextArchive string + ResultMetadata interface{} // *types.BlendMetadata when set + ResultMainBlendFile string + ResultFileName string + ResultFileSize int64 + ResultZipExtracted bool + ResultExtractedFilesCnt int + ResultMetadataExtracted bool + ResultMetadataError string // set when Status is "completed" but metadata extraction failed + ErrorMessage string // set when Status is "error" + ResultBlendFiles []string // set when Status is "select_blend" (relative paths for user to pick) } // NewManager creates a new manager server @@ -137,6 +152,10 @@ func NewManager(db *database.DB, cfg *config.Config, auth *authpkg.Auth, storage if err != nil { return nil, fmt.Errorf("failed to initialize secrets: %w", err) } + ui, err := newUIRenderer() + if err != nil { + return nil, fmt.Errorf("failed to initialize UI renderer: %w", err) + } s := &Manager{ db: db, @@ -145,6 +164,7 @@ func NewManager(db *database.DB, cfg *config.Config, auth *authpkg.Auth, storage secrets: secrets, storage: storage, router: chi.NewRouter(), + ui: ui, startTime: time.Now(), wsUpgrader: websocket.Upgrader{ CheckOrigin: checkWebSocketOrigin, @@ -450,6 +470,7 @@ func (w *gzipResponseWriter) WriteHeader(statusCode int) { func (s *Manager) setupRoutes() { // Health check endpoint (unauthenticated) s.router.Get("/api/health", s.handleHealthCheck) + s.setupUIRoutes() // Public routes (with stricter rate limiting for auth endpoints) s.router.Route("/api/auth", func(r chi.Router) { @@ -477,6 +498,7 @@ func (s *Manager) setupRoutes() { }) r.Post("/", s.handleCreateJob) r.Post("/upload", s.handleUploadFileForJobCreation) // Upload before job creation + r.Get("/upload/status", s.handleUploadStatus) // Poll upload processing status (session_id query param) r.Get("/", s.handleListJobs) r.Get("/summary", s.handleListJobsSummary) r.Post("/batch", s.handleBatchGetJobs) @@ -487,6 +509,7 @@ func (s *Manager) setupRoutes() { r.Get("/{id}/files", s.handleListJobFiles) r.Get("/{id}/files/count", s.handleGetJobFilesCount) r.Get("/{id}/context", s.handleListContextArchive) + r.Get("/{id}/files/exr-zip", s.handleDownloadEXRZip) r.Get("/{id}/files/{fileId}/download", s.handleDownloadJobFile) r.Get("/{id}/files/{fileId}/preview-exr", s.handlePreviewEXR) r.Get("/{id}/video", s.handleStreamVideo) @@ -522,7 +545,6 @@ func (s *Manager) setupRoutes() { r.Delete("/{id}", s.handleDeleteRunnerAPIKey) }) r.Get("/", s.handleListRunnersAdmin) - r.Post("/{id}/verify", s.handleVerifyRunner) r.Delete("/{id}", s.handleDeleteRunner) }) r.Route("/users", func(r chi.Router) { @@ -555,6 +577,7 @@ func (s *Manager) setupRoutes() { return http.HandlerFunc(s.runnerAuthMiddleware(next.ServeHTTP)) }) r.Get("/blender/download", s.handleDownloadBlender) + r.Get("/jobs/{jobId}/status", s.handleGetJobStatusForRunner) r.Get("/jobs/{jobId}/files", s.handleGetJobFilesForRunner) r.Get("/jobs/{jobId}/metadata", s.handleGetJobMetadataForRunner) r.Get("/files/{jobId}/{fileName}", s.handleDownloadFileForRunner) @@ -564,8 +587,8 @@ func (s *Manager) setupRoutes() { // Blender versions API (public, for job submission page) s.router.Get("/api/blender/versions", s.handleGetBlenderVersions) - // Serve static files (embedded React app with SPA fallback) - s.router.Handle("/*", web.SPAHandler()) + // Static assets for server-rendered UI. + s.router.Handle("/assets/*", web.StaticHandler()) } // ServeHTTP implements http.Handler diff --git a/internal/manager/renderer.go b/internal/manager/renderer.go new file mode 100644 index 0000000..03a9f02 --- /dev/null +++ b/internal/manager/renderer.go @@ -0,0 +1,104 @@ +package api + +import ( + "fmt" + "html/template" + "net/http" + "strings" + "time" + + authpkg "jiggablend/internal/auth" + "jiggablend/web" +) + +type uiRenderer struct { + templates *template.Template +} + +type pageData struct { + Title string + CurrentPath string + ContentTemplate string + PageScript string + User *authpkg.Session + Error string + Notice string + Data interface{} +} + +func newUIRenderer() (*uiRenderer, error) { + tpl, err := template.New("base").Funcs(template.FuncMap{ + "formatTime": func(t time.Time) string { + if t.IsZero() { + return "-" + } + return t.Local().Format("2006-01-02 15:04:05") + }, + "statusClass": func(status string) string { + switch status { + case "completed": + return "status-completed" + case "running": + return "status-running" + case "failed": + return "status-failed" + case "cancelled": + return "status-cancelled" + case "online": + return "status-online" + case "offline": + return "status-offline" + case "busy": + return "status-busy" + default: + return "status-pending" + } + }, + "progressInt": func(v float64) int { + if v < 0 { + return 0 + } + if v > 100 { + return 100 + } + return int(v) + }, + "derefInt": func(v *int) string { + if v == nil { + return "" + } + return fmt.Sprintf("%d", *v) + }, + "derefString": func(v *string) string { + if v == nil { + return "" + } + return *v + }, + "hasSuffixFold": func(value, suffix string) bool { + return strings.HasSuffix(strings.ToLower(value), strings.ToLower(suffix)) + }, + }).ParseFS( + web.GetTemplateFS(), + "templates/*.html", + "templates/partials/*.html", + ) + if err != nil { + return nil, fmt.Errorf("parse templates: %w", err) + } + return &uiRenderer{templates: tpl}, nil +} + +func (r *uiRenderer) render(w http.ResponseWriter, data pageData) { + w.Header().Set("Content-Type", "text/html; charset=utf-8") + if err := r.templates.ExecuteTemplate(w, "base", data); err != nil { + http.Error(w, "template render error", http.StatusInternalServerError) + } +} + +func (r *uiRenderer) renderTemplate(w http.ResponseWriter, templateName string, data interface{}) { + w.Header().Set("Content-Type", "text/html; charset=utf-8") + if err := r.templates.ExecuteTemplate(w, templateName, data); err != nil { + http.Error(w, "template render error", http.StatusInternalServerError) + } +} diff --git a/internal/manager/renderer_test.go b/internal/manager/renderer_test.go new file mode 100644 index 0000000..b82e5f2 --- /dev/null +++ b/internal/manager/renderer_test.go @@ -0,0 +1,13 @@ +package api + +import "testing" + +func TestNewUIRendererParsesTemplates(t *testing.T) { + renderer, err := newUIRenderer() + if err != nil { + t.Fatalf("newUIRenderer returned error: %v", err) + } + if renderer == nil || renderer.templates == nil { + t.Fatalf("renderer/templates should not be nil") + } +} diff --git a/internal/manager/runners.go b/internal/manager/runners.go index 0a55a9f..f6f4f4c 100644 --- a/internal/manager/runners.go +++ b/internal/manager/runners.go @@ -275,7 +275,8 @@ type NextJobTaskInfo struct { TaskID int64 `json:"task_id"` JobID int64 `json:"job_id"` JobName string `json:"job_name"` - Frame int `json:"frame"` + Frame int `json:"frame"` // frame start (inclusive) + FrameEnd int `json:"frame_end"` // frame end (inclusive); same as Frame for single-frame TaskType string `json:"task_type"` Metadata *types.BlendMetadata `json:"metadata,omitempty"` } @@ -376,6 +377,7 @@ func (s *Manager) handleNextJob(w http.ResponseWriter, r *http.Request) { TaskID int64 JobID int64 Frame int + FrameEnd sql.NullInt64 TaskType string JobName string JobUserID int64 @@ -385,7 +387,7 @@ func (s *Manager) handleNextJob(w http.ResponseWriter, r *http.Request) { err = s.db.With(func(conn *sql.DB) error { rows, err := conn.Query( - `SELECT t.id, t.job_id, t.frame, t.task_type, + `SELECT t.id, t.job_id, t.frame, t.frame_end, t.task_type, j.name as job_name, j.user_id, j.blend_metadata, t.condition FROM tasks t @@ -403,7 +405,7 @@ func (s *Manager) handleNextJob(w http.ResponseWriter, r *http.Request) { for rows.Next() { var task taskCandidate var condition sql.NullString - err := rows.Scan(&task.TaskID, &task.JobID, &task.Frame, &task.TaskType, + err := rows.Scan(&task.TaskID, &task.JobID, &task.Frame, &task.FrameEnd, &task.TaskType, &task.JobName, &task.JobUserID, &task.BlendMetadata, &condition) if err != nil { continue @@ -549,6 +551,11 @@ func (s *Manager) handleNextJob(w http.ResponseWriter, r *http.Request) { // Update job status s.updateJobStatusFromTasks(selectedTask.JobID) + // Frame end for response: use task range or single frame (NULL frame_end) + frameEnd := selectedTask.Frame + if selectedTask.FrameEnd.Valid { + frameEnd = int(selectedTask.FrameEnd.Int64) + } // Build response response := NextJobResponse{ JobToken: jobToken, @@ -558,6 +565,7 @@ func (s *Manager) handleNextJob(w http.ResponseWriter, r *http.Request) { JobID: selectedTask.JobID, JobName: selectedTask.JobName, Frame: selectedTask.Frame, + FrameEnd: frameEnd, TaskType: selectedTask.TaskType, Metadata: metadata, }, @@ -1959,6 +1967,12 @@ func (s *Manager) updateJobStatusFromTasks(jobID int64) { return } + // Cancellation is terminal from the user's perspective. + // Do not allow asynchronous task updates to revive cancelled jobs. + if currentStatus == string(types.JobStatusCancelled) { + return + } + // Count total tasks and completed tasks var totalTasks, completedTasks int err = s.db.With(func(conn *sql.DB) error { diff --git a/internal/manager/ui.go b/internal/manager/ui.go new file mode 100644 index 0000000..ae9f04f --- /dev/null +++ b/internal/manager/ui.go @@ -0,0 +1,556 @@ +package api + +import ( + "database/sql" + "fmt" + "net/http" + "strconv" + "strings" + "time" + + authpkg "jiggablend/internal/auth" + + "github.com/go-chi/chi/v5" +) + +type uiJobSummary struct { + ID int64 + Name string + Status string + Progress float64 + FrameStart *int + FrameEnd *int + OutputFormat *string + CreatedAt time.Time +} + +type uiTaskSummary struct { + ID int64 + TaskType string + Status string + Frame int + FrameEnd *int + CurrentStep string + RetryCount int + Error string + StartedAt *time.Time + CompletedAt *time.Time +} + +type uiFileSummary struct { + ID int64 + FileName string + FileType string + FileSize int64 + CreatedAt time.Time +} + +func (s *Manager) setupUIRoutes() { + s.router.Get("/", s.handleUIRoot) + s.router.Get("/login", s.handleUILoginPage) + s.router.Post("/logout", s.handleUILogout) + + s.router.Group(func(r chi.Router) { + r.Use(func(next http.Handler) http.Handler { + return http.HandlerFunc(s.auth.Middleware(next.ServeHTTP)) + }) + r.Get("/jobs", s.handleUIJobsPage) + r.Get("/jobs/new", s.handleUINewJobPage) + r.Get("/jobs/{id}", s.handleUIJobDetailPage) + + r.Get("/ui/fragments/jobs", s.handleUIJobsFragment) + r.Get("/ui/fragments/jobs/{id}/tasks", s.handleUIJobTasksFragment) + r.Get("/ui/fragments/jobs/{id}/files", s.handleUIJobFilesFragment) + }) + + s.router.Group(func(r chi.Router) { + r.Use(func(next http.Handler) http.Handler { + return http.HandlerFunc(s.auth.AdminMiddleware(next.ServeHTTP)) + }) + r.Get("/admin", s.handleUIAdminPage) + r.Get("/ui/fragments/admin/runners", s.handleUIAdminRunnersFragment) + r.Get("/ui/fragments/admin/users", s.handleUIAdminUsersFragment) + r.Get("/ui/fragments/admin/apikeys", s.handleUIAdminAPIKeysFragment) + }) +} + +func (s *Manager) sessionFromRequest(r *http.Request) (*authpkg.Session, bool) { + cookie, err := r.Cookie("session_id") + if err != nil { + return nil, false + } + return s.auth.GetSession(cookie.Value) +} + +func (s *Manager) handleUIRoot(w http.ResponseWriter, r *http.Request) { + if _, ok := s.sessionFromRequest(r); ok { + http.Redirect(w, r, "/jobs", http.StatusFound) + return + } + http.Redirect(w, r, "/login", http.StatusFound) +} + +func (s *Manager) handleUILoginPage(w http.ResponseWriter, r *http.Request) { + if _, ok := s.sessionFromRequest(r); ok { + http.Redirect(w, r, "/jobs", http.StatusFound) + return + } + + s.ui.render(w, pageData{ + Title: "Login", + CurrentPath: "/login", + ContentTemplate: "page_login", + PageScript: "/assets/login.js", + Data: map[string]interface{}{ + "google_enabled": s.auth.IsGoogleOAuthConfigured(), + "discord_enabled": s.auth.IsDiscordOAuthConfigured(), + "local_enabled": s.auth.IsLocalLoginEnabled(), + "error": r.URL.Query().Get("error"), + }, + }) +} + +func (s *Manager) handleUILogout(w http.ResponseWriter, r *http.Request) { + cookie, err := r.Cookie("session_id") + if err == nil { + s.auth.DeleteSession(cookie.Value) + } + expired := &http.Cookie{ + Name: "session_id", + Value: "", + Path: "/", + MaxAge: -1, + HttpOnly: true, + SameSite: http.SameSiteLaxMode, + } + if s.cfg.IsProductionMode() { + expired.Secure = true + } + http.SetCookie(w, expired) + http.Redirect(w, r, "/login", http.StatusFound) +} + +func (s *Manager) handleUIJobsPage(w http.ResponseWriter, r *http.Request) { + user, _ := s.sessionFromRequest(r) + s.ui.render(w, pageData{ + Title: "Jobs", + CurrentPath: "/jobs", + ContentTemplate: "page_jobs", + PageScript: "/assets/jobs.js", + User: user, + }) +} + +func (s *Manager) handleUINewJobPage(w http.ResponseWriter, r *http.Request) { + user, _ := s.sessionFromRequest(r) + s.ui.render(w, pageData{ + Title: "New Job", + CurrentPath: "/jobs/new", + ContentTemplate: "page_jobs_new", + PageScript: "/assets/job_new.js", + User: user, + }) +} + +func (s *Manager) handleUIJobDetailPage(w http.ResponseWriter, r *http.Request) { + userID, err := getUserID(r) + if err != nil { + http.Redirect(w, r, "/login", http.StatusFound) + return + } + isAdmin := authpkg.IsAdmin(r.Context()) + jobID, err := parseID(r, "id") + if err != nil { + http.NotFound(w, r) + return + } + + job, err := s.getUIJob(jobID, userID, isAdmin) + if err != nil { + http.NotFound(w, r) + return + } + user, _ := s.sessionFromRequest(r) + s.ui.render(w, pageData{ + Title: fmt.Sprintf("Job %d", jobID), + CurrentPath: "/jobs", + ContentTemplate: "page_job_show", + PageScript: "/assets/job_show.js", + User: user, + Data: map[string]interface{}{"job": job}, + }) +} + +func (s *Manager) handleUIAdminPage(w http.ResponseWriter, r *http.Request) { + user, _ := s.sessionFromRequest(r) + regEnabled, _ := s.auth.IsRegistrationEnabled() + s.ui.render(w, pageData{ + Title: "Admin", + CurrentPath: "/admin", + ContentTemplate: "page_admin", + PageScript: "/assets/admin.js", + User: user, + Data: map[string]interface{}{ + "registration_enabled": regEnabled, + }, + }) +} + +func (s *Manager) handleUIJobsFragment(w http.ResponseWriter, r *http.Request) { + userID, err := getUserID(r) + if err != nil { + http.Error(w, "unauthorized", http.StatusUnauthorized) + return + } + jobs, err := s.listUIJobSummaries(userID, 50, 0) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + s.ui.renderTemplate(w, "partial_jobs_table", map[string]interface{}{"jobs": jobs}) +} + +func (s *Manager) handleUIJobTasksFragment(w http.ResponseWriter, r *http.Request) { + userID, err := getUserID(r) + if err != nil { + http.Error(w, "unauthorized", http.StatusUnauthorized) + return + } + isAdmin := authpkg.IsAdmin(r.Context()) + jobID, err := parseID(r, "id") + if err != nil { + http.Error(w, "invalid job id", http.StatusBadRequest) + return + } + + if _, err := s.getUIJob(jobID, userID, isAdmin); err != nil { + http.Error(w, "job not found", http.StatusNotFound) + return + } + + tasks, err := s.listUITasks(jobID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + s.ui.renderTemplate(w, "partial_job_tasks", map[string]interface{}{ + "job_id": jobID, + "tasks": tasks, + }) +} + +func (s *Manager) handleUIJobFilesFragment(w http.ResponseWriter, r *http.Request) { + userID, err := getUserID(r) + if err != nil { + http.Error(w, "unauthorized", http.StatusUnauthorized) + return + } + isAdmin := authpkg.IsAdmin(r.Context()) + jobID, err := parseID(r, "id") + if err != nil { + http.Error(w, "invalid job id", http.StatusBadRequest) + return + } + + if _, err := s.getUIJob(jobID, userID, isAdmin); err != nil { + http.Error(w, "job not found", http.StatusNotFound) + return + } + + files, err := s.listUIFiles(jobID, 100) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + outputFiles := make([]uiFileSummary, 0, len(files)) + adminInputFiles := make([]uiFileSummary, 0) + for _, file := range files { + if strings.EqualFold(file.FileType, "output") { + outputFiles = append(outputFiles, file) + continue + } + if isAdmin { + adminInputFiles = append(adminInputFiles, file) + } + } + + s.ui.renderTemplate(w, "partial_job_files", map[string]interface{}{ + "job_id": jobID, + "files": outputFiles, + "is_admin": isAdmin, + "admin_input_files": adminInputFiles, + }) +} + +func (s *Manager) handleUIAdminRunnersFragment(w http.ResponseWriter, r *http.Request) { + var rows *sql.Rows + err := s.db.With(func(conn *sql.DB) error { + var qErr error + rows, qErr = conn.Query(`SELECT id, name, hostname, status, last_heartbeat, priority, created_at FROM runners ORDER BY created_at DESC`) + return qErr + }) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + type runner struct { + ID int64 + Name string + Hostname string + Status string + LastHeartbeat time.Time + Priority int + CreatedAt time.Time + } + all := make([]runner, 0) + for rows.Next() { + var item runner + if scanErr := rows.Scan(&item.ID, &item.Name, &item.Hostname, &item.Status, &item.LastHeartbeat, &item.Priority, &item.CreatedAt); scanErr != nil { + http.Error(w, scanErr.Error(), http.StatusInternalServerError) + return + } + all = append(all, item) + } + s.ui.renderTemplate(w, "partial_admin_runners", map[string]interface{}{"runners": all}) +} + +func (s *Manager) handleUIAdminUsersFragment(w http.ResponseWriter, r *http.Request) { + currentUserID, _ := getUserID(r) + firstUserID, _ := s.auth.GetFirstUserID() + var rows *sql.Rows + err := s.db.With(func(conn *sql.DB) error { + var qErr error + rows, qErr = conn.Query(`SELECT id, email, name, oauth_provider, is_admin, created_at FROM users ORDER BY created_at DESC`) + return qErr + }) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + type user struct { + ID int64 + Email string + Name string + OAuthProvider string + IsAdmin bool + IsFirstUser bool + CreatedAt time.Time + } + all := make([]user, 0) + for rows.Next() { + var item user + if scanErr := rows.Scan(&item.ID, &item.Email, &item.Name, &item.OAuthProvider, &item.IsAdmin, &item.CreatedAt); scanErr != nil { + http.Error(w, scanErr.Error(), http.StatusInternalServerError) + return + } + item.IsFirstUser = item.ID == firstUserID + all = append(all, item) + } + s.ui.renderTemplate(w, "partial_admin_users", map[string]interface{}{ + "users": all, + "current_user_id": currentUserID, + }) +} + +func (s *Manager) handleUIAdminAPIKeysFragment(w http.ResponseWriter, r *http.Request) { + keys, err := s.secrets.ListRunnerAPIKeys() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + type item struct { + ID int64 + Name string + Scope string + Key string + IsActive bool + CreatedAt time.Time + } + out := make([]item, 0, len(keys)) + for _, key := range keys { + out = append(out, item{ + ID: key.ID, + Name: key.Name, + Scope: key.Scope, + Key: key.Key, + IsActive: key.IsActive, + CreatedAt: key.CreatedAt, + }) + } + s.ui.renderTemplate(w, "partial_admin_apikeys", map[string]interface{}{"keys": out}) +} + +func (s *Manager) listUIJobSummaries(userID int64, limit int, offset int) ([]uiJobSummary, error) { + rows := &sql.Rows{} + err := s.db.With(func(conn *sql.DB) error { + var qErr error + rows, qErr = conn.Query( + `SELECT id, name, status, progress, frame_start, frame_end, output_format, created_at + FROM jobs WHERE user_id = ? ORDER BY created_at DESC LIMIT ? OFFSET ?`, + userID, limit, offset, + ) + return qErr + }) + if err != nil { + return nil, err + } + defer rows.Close() + + out := make([]uiJobSummary, 0) + for rows.Next() { + var item uiJobSummary + var frameStart, frameEnd sql.NullInt64 + var outputFormat sql.NullString + if scanErr := rows.Scan(&item.ID, &item.Name, &item.Status, &item.Progress, &frameStart, &frameEnd, &outputFormat, &item.CreatedAt); scanErr != nil { + return nil, scanErr + } + if frameStart.Valid { + v := int(frameStart.Int64) + item.FrameStart = &v + } + if frameEnd.Valid { + v := int(frameEnd.Int64) + item.FrameEnd = &v + } + if outputFormat.Valid { + item.OutputFormat = &outputFormat.String + } + out = append(out, item) + } + return out, nil +} + +func (s *Manager) getUIJob(jobID int64, userID int64, isAdmin bool) (uiJobSummary, error) { + var item uiJobSummary + var frameStart, frameEnd sql.NullInt64 + var outputFormat sql.NullString + + err := s.db.With(func(conn *sql.DB) error { + if isAdmin { + return conn.QueryRow( + `SELECT id, name, status, progress, frame_start, frame_end, output_format, created_at + FROM jobs WHERE id = ?`, + jobID, + ).Scan(&item.ID, &item.Name, &item.Status, &item.Progress, &frameStart, &frameEnd, &outputFormat, &item.CreatedAt) + } + return conn.QueryRow( + `SELECT id, name, status, progress, frame_start, frame_end, output_format, created_at + FROM jobs WHERE id = ? AND user_id = ?`, + jobID, userID, + ).Scan(&item.ID, &item.Name, &item.Status, &item.Progress, &frameStart, &frameEnd, &outputFormat, &item.CreatedAt) + }) + if err != nil { + return uiJobSummary{}, err + } + if frameStart.Valid { + v := int(frameStart.Int64) + item.FrameStart = &v + } + if frameEnd.Valid { + v := int(frameEnd.Int64) + item.FrameEnd = &v + } + if outputFormat.Valid { + item.OutputFormat = &outputFormat.String + } + return item, nil +} + +func (s *Manager) listUITasks(jobID int64) ([]uiTaskSummary, error) { + var rows *sql.Rows + err := s.db.With(func(conn *sql.DB) error { + var qErr error + rows, qErr = conn.Query( + `SELECT id, task_type, status, frame, frame_end, current_step, retry_count, error_message, started_at, completed_at + FROM tasks WHERE job_id = ? ORDER BY id ASC`, + jobID, + ) + return qErr + }) + if err != nil { + return nil, err + } + defer rows.Close() + + out := make([]uiTaskSummary, 0) + for rows.Next() { + var item uiTaskSummary + var frameEnd sql.NullInt64 + var currentStep sql.NullString + var errMsg sql.NullString + var startedAt, completedAt sql.NullTime + if scanErr := rows.Scan( + &item.ID, &item.TaskType, &item.Status, &item.Frame, &frameEnd, + ¤tStep, &item.RetryCount, &errMsg, &startedAt, &completedAt, + ); scanErr != nil { + return nil, scanErr + } + if frameEnd.Valid { + v := int(frameEnd.Int64) + item.FrameEnd = &v + } + if currentStep.Valid { + item.CurrentStep = currentStep.String + } + if errMsg.Valid { + item.Error = errMsg.String + } + if startedAt.Valid { + item.StartedAt = &startedAt.Time + } + if completedAt.Valid { + item.CompletedAt = &completedAt.Time + } + out = append(out, item) + } + return out, nil +} + +func (s *Manager) listUIFiles(jobID int64, limit int) ([]uiFileSummary, error) { + var rows *sql.Rows + err := s.db.With(func(conn *sql.DB) error { + var qErr error + rows, qErr = conn.Query( + `SELECT id, file_name, file_type, file_size, created_at + FROM job_files WHERE job_id = ? ORDER BY created_at DESC LIMIT ?`, + jobID, limit, + ) + return qErr + }) + if err != nil { + return nil, err + } + defer rows.Close() + + out := make([]uiFileSummary, 0) + for rows.Next() { + var item uiFileSummary + if scanErr := rows.Scan(&item.ID, &item.FileName, &item.FileType, &item.FileSize, &item.CreatedAt); scanErr != nil { + return nil, scanErr + } + out = append(out, item) + } + return out, nil +} + +func parseBoolForm(r *http.Request, key string) bool { + v := strings.TrimSpace(strings.ToLower(r.FormValue(key))) + return v == "1" || v == "true" || v == "on" || v == "yes" +} + +func parseIntQuery(r *http.Request, key string, fallback int) int { + raw := strings.TrimSpace(r.URL.Query().Get(key)) + if raw == "" { + return fallback + } + v, err := strconv.Atoi(raw) + if err != nil || v < 0 { + return fallback + } + return v +} diff --git a/internal/manager/ui_test.go b/internal/manager/ui_test.go new file mode 100644 index 0000000..946a456 --- /dev/null +++ b/internal/manager/ui_test.go @@ -0,0 +1,37 @@ +package api + +import ( + "net/http/httptest" + "testing" +) + +func TestParseBoolForm(t *testing.T) { + req := httptest.NewRequest("POST", "/?flag=true", nil) + req.ParseForm() + req.Form.Set("enabled", "true") + if !parseBoolForm(req, "enabled") { + t.Fatalf("expected true for enabled=true") + } + + req.Form.Set("enabled", "no") + if parseBoolForm(req, "enabled") { + t.Fatalf("expected false for enabled=no") + } +} + +func TestParseIntQuery(t *testing.T) { + req := httptest.NewRequest("GET", "/?limit=42", nil) + if got := parseIntQuery(req, "limit", 10); got != 42 { + t.Fatalf("expected 42, got %d", got) + } + + req = httptest.NewRequest("GET", "/?limit=-1", nil) + if got := parseIntQuery(req, "limit", 10); got != 10 { + t.Fatalf("expected fallback 10, got %d", got) + } + + req = httptest.NewRequest("GET", "/?limit=abc", nil) + if got := parseIntQuery(req, "limit", 10); got != 10 { + t.Fatalf("expected fallback 10, got %d", got) + } +} diff --git a/internal/runner/api/manager.go b/internal/runner/api/manager.go index 9d1c437..c735dc3 100644 --- a/internal/runner/api/manager.go +++ b/internal/runner/api/manager.go @@ -196,7 +196,8 @@ type NextJobTaskInfo struct { TaskID int64 `json:"task_id"` JobID int64 `json:"job_id"` JobName string `json:"job_name"` - Frame int `json:"frame"` + Frame int `json:"frame"` // frame start (inclusive) + FrameEnd int `json:"frame_end"` // frame end (inclusive); same as Frame for single-frame TaskType string `json:"task_type"` Metadata *types.BlendMetadata `json:"metadata,omitempty"` } @@ -315,6 +316,28 @@ func (m *ManagerClient) GetJobMetadata(jobID int64) (*types.BlendMetadata, error return &metadata, nil } +// GetJobStatus retrieves the current status of a job. +func (m *ManagerClient) GetJobStatus(jobID int64) (types.JobStatus, error) { + path := fmt.Sprintf("/api/runner/jobs/%d/status?runner_id=%d", jobID, m.runnerID) + resp, err := m.Request("GET", path, nil) + if err != nil { + return "", err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + return "", fmt.Errorf("failed to get job status: %s", string(body)) + } + + var job types.Job + if err := json.NewDecoder(resp.Body).Decode(&job); err != nil { + return "", err + } + + return job.Status, nil +} + // JobFile represents a file associated with a job. type JobFile struct { ID int64 `json:"id"` diff --git a/internal/runner/encoding/encoder.go b/internal/runner/encoding/encoder.go index dfdae9e..3d1f2aa 100644 --- a/internal/runner/encoding/encoder.go +++ b/internal/runner/encoding/encoder.go @@ -20,10 +20,8 @@ type EncodeConfig struct { StartFrame int // Starting frame number FrameRate float64 // Frame rate WorkDir string // Working directory - UseAlpha bool // Whether to preserve alpha channel - TwoPass bool // Whether to use 2-pass encoding - SourceFormat string // Source format: "exr" or "png" (defaults to "exr") - PreserveHDR bool // Whether to preserve HDR range for EXR (uses HLG with bt709 primaries) + UseAlpha bool // Whether to preserve alpha channel + TwoPass bool // Whether to use 2-pass encoding } // Selector selects the software encoder. diff --git a/internal/runner/encoding/encoders.go b/internal/runner/encoding/encoders.go index 49106f7..f24af6b 100644 --- a/internal/runner/encoding/encoders.go +++ b/internal/runner/encoding/encoders.go @@ -1,5 +1,7 @@ package encoding +// Pipeline: Blender outputs only EXR (linear). Encode is EXR only: linear -> sRGB -> HLG (video), 10-bit, full range. + import ( "fmt" "log" @@ -56,97 +58,34 @@ func (e *SoftwareEncoder) Available() bool { } func (e *SoftwareEncoder) BuildCommand(config *EncodeConfig) *exec.Cmd { - // Use HDR pixel formats for EXR, SDR for PNG - var pixFmt string - var colorPrimaries, colorTrc, colorspace string - if config.SourceFormat == "png" { - // PNG: SDR format - pixFmt = "yuv420p" - if config.UseAlpha { - pixFmt = "yuva420p" - } - colorPrimaries = "bt709" - colorTrc = "bt709" - colorspace = "bt709" - } else { - // EXR: Use HDR encoding if PreserveHDR is true, otherwise SDR (like PNG) - if config.PreserveHDR { - // HDR: Use HLG transfer with bt709 primaries to preserve HDR range while matching PNG color - pixFmt = "yuv420p10le" // 10-bit to preserve HDR range - if config.UseAlpha { - pixFmt = "yuva420p10le" - } - colorPrimaries = "bt709" // bt709 primaries to match PNG color appearance - colorTrc = "arib-std-b67" // HLG transfer function - preserves HDR range, works on SDR displays - colorspace = "bt709" // bt709 colorspace to match PNG - } else { - // SDR: Treat as SDR (like PNG) - encode as bt709 - pixFmt = "yuv420p" - if config.UseAlpha { - pixFmt = "yuva420p" - } - colorPrimaries = "bt709" - colorTrc = "bt709" - colorspace = "bt709" - } + // EXR only: HDR path (HLG, 10-bit, full range) + pixFmt := "yuv420p10le" + if config.UseAlpha { + pixFmt = "yuva420p10le" } + colorPrimaries, colorTrc, colorspace, colorRange := "bt709", "arib-std-b67", "bt709", "pc" var codecArgs []string switch e.codec { case "libaom-av1": codecArgs = []string{"-crf", strconv.Itoa(CRFAV1), "-b:v", "0", "-tiles", "2x2", "-g", "240"} case "libvpx-vp9": - // VP9 supports alpha and HDR, use good quality settings codecArgs = []string{"-crf", strconv.Itoa(CRFVP9), "-b:v", "0", "-row-mt", "1", "-g", "240"} default: - // H.264: Use High 10 profile for HDR EXR (10-bit), High profile for SDR - if config.SourceFormat != "png" && config.PreserveHDR { - codecArgs = []string{"-preset", "veryslow", "-crf", strconv.Itoa(CRFH264), "-profile:v", "high10", "-level", "5.2", "-tune", "film", "-keyint_min", "24", "-g", "240", "-bf", "2", "-refs", "4"} - } else { - codecArgs = []string{"-preset", "veryslow", "-crf", strconv.Itoa(CRFH264), "-profile:v", "high", "-level", "5.2", "-tune", "film", "-keyint_min", "24", "-g", "240", "-bf", "2", "-refs", "4"} - } + codecArgs = []string{"-preset", "veryslow", "-crf", strconv.Itoa(CRFH264), "-profile:v", "high10", "-level", "5.2", "-tune", "film", "-keyint_min", "24", "-g", "240", "-bf", "2", "-refs", "4"} } - args := []string{ - "-y", - "-f", "image2", - "-start_number", fmt.Sprintf("%d", config.StartFrame), - "-framerate", fmt.Sprintf("%.2f", config.FrameRate), - "-i", config.InputPattern, - "-c:v", e.codec, - "-pix_fmt", pixFmt, - "-r", fmt.Sprintf("%.2f", config.FrameRate), - "-color_primaries", colorPrimaries, - "-color_trc", colorTrc, - "-colorspace", colorspace, - "-color_range", "tv", - } + args := []string{"-y", "-f", "image2", "-start_number", fmt.Sprintf("%d", config.StartFrame), "-framerate", fmt.Sprintf("%.2f", config.FrameRate), + "-color_trc", "linear", "-color_primaries", "bt709"} + args = append(args, "-i", config.InputPattern, "-c:v", e.codec, "-pix_fmt", pixFmt, "-r", fmt.Sprintf("%.2f", config.FrameRate), "-color_primaries", colorPrimaries, "-color_trc", colorTrc, "-colorspace", colorspace, "-color_range", colorRange) - // Add video filter for EXR: convert linear RGB based on HDR setting - // PNG doesn't need any filter as it's already in sRGB - if config.SourceFormat != "png" { - var vf string - if config.PreserveHDR { - // HDR: Convert linear RGB -> sRGB -> HLG with bt709 primaries - // This preserves HDR range while matching PNG color appearance - vf = "format=gbrpf32le,zscale=transferin=8:transfer=13:primariesin=1:primaries=1:matrixin=0:matrix=1:rangein=full:range=full,zscale=transferin=13:transfer=18:primariesin=1:primaries=1:matrixin=1:matrix=1:rangein=full:range=full" - if config.UseAlpha { - vf += ",format=yuva420p10le" - } else { - vf += ",format=yuv420p10le" - } - } else { - // SDR: Convert linear RGB (EXR) to sRGB (bt709) - simple conversion like Krita does - // zscale: linear (8) -> sRGB (13) with bt709 primaries/matrix - vf = "format=gbrpf32le,zscale=transferin=8:transfer=13:primariesin=1:primaries=1:matrixin=0:matrix=1:rangein=full:range=full" - if config.UseAlpha { - vf += ",format=yuva420p" - } else { - vf += ",format=yuv420p" - } - } - args = append(args, "-vf", vf) + vf := "format=gbrpf32le,zscale=transferin=8:transfer=13:primariesin=1:primaries=1:matrixin=0:matrix=1:rangein=full:range=full,zscale=transferin=13:transfer=18:primariesin=1:primaries=1:matrixin=1:matrix=1:rangein=full:range=full" + if config.UseAlpha { + vf += ",format=yuva420p10le" + } else { + vf += ",format=yuv420p10le" } + args = append(args, "-vf", vf) args = append(args, codecArgs...) if config.TwoPass { @@ -168,97 +107,33 @@ func (e *SoftwareEncoder) BuildCommand(config *EncodeConfig) *exec.Cmd { // BuildPass1Command builds the first pass command for 2-pass encoding. func (e *SoftwareEncoder) BuildPass1Command(config *EncodeConfig) *exec.Cmd { - // Use HDR pixel formats for EXR, SDR for PNG - var pixFmt string - var colorPrimaries, colorTrc, colorspace string - if config.SourceFormat == "png" { - // PNG: SDR format - pixFmt = "yuv420p" - if config.UseAlpha { - pixFmt = "yuva420p" - } - colorPrimaries = "bt709" - colorTrc = "bt709" - colorspace = "bt709" - } else { - // EXR: Use HDR encoding if PreserveHDR is true, otherwise SDR (like PNG) - if config.PreserveHDR { - // HDR: Use HLG transfer with bt709 primaries to preserve HDR range while matching PNG color - pixFmt = "yuv420p10le" // 10-bit to preserve HDR range - if config.UseAlpha { - pixFmt = "yuva420p10le" - } - colorPrimaries = "bt709" // bt709 primaries to match PNG color appearance - colorTrc = "arib-std-b67" // HLG transfer function - preserves HDR range, works on SDR displays - colorspace = "bt709" // bt709 colorspace to match PNG - } else { - // SDR: Treat as SDR (like PNG) - encode as bt709 - pixFmt = "yuv420p" - if config.UseAlpha { - pixFmt = "yuva420p" - } - colorPrimaries = "bt709" - colorTrc = "bt709" - colorspace = "bt709" - } + pixFmt := "yuv420p10le" + if config.UseAlpha { + pixFmt = "yuva420p10le" } + colorPrimaries, colorTrc, colorspace, colorRange := "bt709", "arib-std-b67", "bt709", "pc" var codecArgs []string switch e.codec { case "libaom-av1": codecArgs = []string{"-crf", strconv.Itoa(CRFAV1), "-b:v", "0", "-tiles", "2x2", "-g", "240"} case "libvpx-vp9": - // VP9 supports alpha and HDR, use good quality settings codecArgs = []string{"-crf", strconv.Itoa(CRFVP9), "-b:v", "0", "-row-mt", "1", "-g", "240"} default: - // H.264: Use High 10 profile for HDR EXR (10-bit), High profile for SDR - if config.SourceFormat != "png" && config.PreserveHDR { - codecArgs = []string{"-preset", "veryslow", "-crf", strconv.Itoa(CRFH264), "-profile:v", "high10", "-level", "5.2", "-tune", "film", "-keyint_min", "24", "-g", "240", "-bf", "2", "-refs", "4"} - } else { - codecArgs = []string{"-preset", "veryslow", "-crf", strconv.Itoa(CRFH264), "-profile:v", "high", "-level", "5.2", "-tune", "film", "-keyint_min", "24", "-g", "240", "-bf", "2", "-refs", "4"} - } + codecArgs = []string{"-preset", "veryslow", "-crf", strconv.Itoa(CRFH264), "-profile:v", "high10", "-level", "5.2", "-tune", "film", "-keyint_min", "24", "-g", "240", "-bf", "2", "-refs", "4"} } - args := []string{ - "-y", - "-f", "image2", - "-start_number", fmt.Sprintf("%d", config.StartFrame), - "-framerate", fmt.Sprintf("%.2f", config.FrameRate), - "-i", config.InputPattern, - "-c:v", e.codec, - "-pix_fmt", pixFmt, - "-r", fmt.Sprintf("%.2f", config.FrameRate), - "-color_primaries", colorPrimaries, - "-color_trc", colorTrc, - "-colorspace", colorspace, - "-color_range", "tv", - } + args := []string{"-y", "-f", "image2", "-start_number", fmt.Sprintf("%d", config.StartFrame), "-framerate", fmt.Sprintf("%.2f", config.FrameRate), + "-color_trc", "linear", "-color_primaries", "bt709"} + args = append(args, "-i", config.InputPattern, "-c:v", e.codec, "-pix_fmt", pixFmt, "-r", fmt.Sprintf("%.2f", config.FrameRate), "-color_primaries", colorPrimaries, "-color_trc", colorTrc, "-colorspace", colorspace, "-color_range", colorRange) - // Add video filter for EXR: convert linear RGB based on HDR setting - // PNG doesn't need any filter as it's already in sRGB - if config.SourceFormat != "png" { - var vf string - if config.PreserveHDR { - // HDR: Convert linear RGB -> sRGB -> HLG with bt709 primaries - // This preserves HDR range while matching PNG color appearance - vf = "format=gbrpf32le,zscale=transferin=8:transfer=13:primariesin=1:primaries=1:matrixin=0:matrix=1:rangein=full:range=full,zscale=transferin=13:transfer=18:primariesin=1:primaries=1:matrixin=1:matrix=1:rangein=full:range=full" - if config.UseAlpha { - vf += ",format=yuva420p10le" - } else { - vf += ",format=yuv420p10le" - } - } else { - // SDR: Convert linear RGB (EXR) to sRGB (bt709) - simple conversion like Krita does - // zscale: linear (8) -> sRGB (13) with bt709 primaries/matrix - vf = "format=gbrpf32le,zscale=transferin=8:transfer=13:primariesin=1:primaries=1:matrixin=0:matrix=1:rangein=full:range=full" - if config.UseAlpha { - vf += ",format=yuva420p" - } else { - vf += ",format=yuv420p" - } - } - args = append(args, "-vf", vf) + vf := "format=gbrpf32le,zscale=transferin=8:transfer=13:primariesin=1:primaries=1:matrixin=0:matrix=1:rangein=full:range=full,zscale=transferin=13:transfer=18:primariesin=1:primaries=1:matrixin=1:matrix=1:rangein=full:range=full" + if config.UseAlpha { + vf += ",format=yuva420p10le" + } else { + vf += ",format=yuv420p10le" } + args = append(args, "-vf", vf) args = append(args, codecArgs...) args = append(args, "-pass", "1", "-f", "null", "/dev/null") diff --git a/internal/runner/encoding/encoders_test.go b/internal/runner/encoding/encoders_test.go index 9c0c917..8233c21 100644 --- a/internal/runner/encoding/encoders_test.go +++ b/internal/runner/encoding/encoders_test.go @@ -18,7 +18,6 @@ func TestSoftwareEncoder_BuildCommand_H264_EXR(t *testing.T) { WorkDir: "/tmp", UseAlpha: false, TwoPass: true, - SourceFormat: "exr", } cmd := encoder.BuildCommand(config) @@ -37,7 +36,7 @@ func TestSoftwareEncoder_BuildCommand_H264_EXR(t *testing.T) { args := cmd.Args[1:] // Skip "ffmpeg" argsStr := strings.Join(args, " ") - // Check required arguments + // EXR always uses HDR path: 10-bit, HLG, full range checks := []struct { name string expected string @@ -46,18 +45,19 @@ func TestSoftwareEncoder_BuildCommand_H264_EXR(t *testing.T) { {"image2 format", "-f image2"}, {"start number", "-start_number 1"}, {"framerate", "-framerate 24.00"}, + {"input color tag", "-color_trc linear"}, {"input pattern", "-i frame_%04d.exr"}, {"codec", "-c:v libx264"}, - {"pixel format", "-pix_fmt yuv420p"}, // EXR now treated as SDR (like PNG) + {"pixel format", "-pix_fmt yuv420p10le"}, {"frame rate", "-r 24.00"}, - {"color primaries", "-color_primaries bt709"}, // EXR now uses bt709 (SDR) - {"color trc", "-color_trc bt709"}, // EXR now uses bt709 (SDR) + {"color primaries", "-color_primaries bt709"}, + {"color trc", "-color_trc arib-std-b67"}, {"colorspace", "-colorspace bt709"}, - {"color range", "-color_range tv"}, + {"color range", "-color_range pc"}, {"video filter", "-vf"}, {"preset", "-preset veryslow"}, {"crf", "-crf 15"}, - {"profile", "-profile:v high"}, // EXR now uses high profile (SDR) + {"profile", "-profile:v high10"}, {"pass 2", "-pass 2"}, {"output path", "output.mp4"}, } @@ -68,40 +68,15 @@ func TestSoftwareEncoder_BuildCommand_H264_EXR(t *testing.T) { } } - // Verify filter is present for EXR (linear RGB to sRGB conversion, like Krita does) + // EXR: linear -> sRGB -> HLG filter if !strings.Contains(argsStr, "format=gbrpf32le") { t.Error("Expected format conversion filter for EXR source, but not found") } if !strings.Contains(argsStr, "zscale=transferin=8:transfer=13") { t.Error("Expected linear to sRGB conversion for EXR source, but not found") } -} - -func TestSoftwareEncoder_BuildCommand_H264_PNG(t *testing.T) { - encoder := &SoftwareEncoder{codec: "libx264"} - config := &EncodeConfig{ - InputPattern: "frame_%04d.png", - OutputPath: "output.mp4", - StartFrame: 1, - FrameRate: 24.0, - WorkDir: "/tmp", - UseAlpha: false, - TwoPass: true, - SourceFormat: "png", - } - - cmd := encoder.BuildCommand(config) - args := cmd.Args[1:] - argsStr := strings.Join(args, " ") - - // PNG should NOT have video filter - if strings.Contains(argsStr, "-vf") { - t.Error("PNG source should not have video filter, but -vf was found") - } - - // Should still have all other required args - if !strings.Contains(argsStr, "-c:v libx264") { - t.Error("Missing codec argument") + if !strings.Contains(argsStr, "transfer=18") { + t.Error("Expected sRGB to HLG conversion for EXR HDR, but not found") } } @@ -113,18 +88,17 @@ func TestSoftwareEncoder_BuildCommand_AV1_WithAlpha(t *testing.T) { StartFrame: 100, FrameRate: 30.0, WorkDir: "/tmp", - UseAlpha: true, - TwoPass: true, - SourceFormat: "exr", + UseAlpha: true, + TwoPass: true, } cmd := encoder.BuildCommand(config) args := cmd.Args[1:] argsStr := strings.Join(args, " ") - // Check alpha-specific settings - if !strings.Contains(argsStr, "-pix_fmt yuva420p") { - t.Error("Expected yuva420p pixel format for alpha, but not found") + // EXR with alpha: 10-bit HDR path + if !strings.Contains(argsStr, "-pix_fmt yuva420p10le") { + t.Error("Expected yuva420p10le pixel format for EXR alpha, but not found") } // Check AV1-specific arguments @@ -142,9 +116,9 @@ func TestSoftwareEncoder_BuildCommand_AV1_WithAlpha(t *testing.T) { } } - // Check tonemap filter includes alpha format - if !strings.Contains(argsStr, "format=yuva420p") { - t.Error("Expected tonemap filter to output yuva420p for alpha, but not found") + // Check tonemap filter includes alpha format (10-bit for EXR) + if !strings.Contains(argsStr, "format=yuva420p10le") { + t.Error("Expected tonemap filter to output yuva420p10le for EXR alpha, but not found") } } @@ -156,9 +130,8 @@ func TestSoftwareEncoder_BuildCommand_VP9(t *testing.T) { StartFrame: 1, FrameRate: 24.0, WorkDir: "/tmp", - UseAlpha: true, - TwoPass: true, - SourceFormat: "exr", + UseAlpha: true, + TwoPass: true, } cmd := encoder.BuildCommand(config) @@ -191,7 +164,6 @@ func TestSoftwareEncoder_BuildPass1Command(t *testing.T) { WorkDir: "/tmp", UseAlpha: false, TwoPass: true, - SourceFormat: "exr", } cmd := encoder.BuildPass1Command(config) @@ -227,7 +199,6 @@ func TestSoftwareEncoder_BuildPass1Command_AV1(t *testing.T) { WorkDir: "/tmp", UseAlpha: false, TwoPass: true, - SourceFormat: "exr", } cmd := encoder.BuildPass1Command(config) @@ -273,7 +244,6 @@ func TestSoftwareEncoder_BuildPass1Command_VP9(t *testing.T) { WorkDir: "/tmp", UseAlpha: false, TwoPass: true, - SourceFormat: "exr", } cmd := encoder.BuildPass1Command(config) @@ -319,7 +289,6 @@ func TestSoftwareEncoder_BuildCommand_NoTwoPass(t *testing.T) { WorkDir: "/tmp", UseAlpha: false, TwoPass: false, - SourceFormat: "exr", } cmd := encoder.BuildCommand(config) @@ -432,28 +401,6 @@ func TestSoftwareEncoder_Available(t *testing.T) { } } -func TestEncodeConfig_DefaultSourceFormat(t *testing.T) { - config := &EncodeConfig{ - InputPattern: "frame_%04d.exr", - OutputPath: "output.mp4", - StartFrame: 1, - FrameRate: 24.0, - WorkDir: "/tmp", - UseAlpha: false, - TwoPass: false, - // SourceFormat not set, should default to empty string (treated as exr) - } - - encoder := &SoftwareEncoder{codec: "libx264"} - cmd := encoder.BuildCommand(config) - args := strings.Join(cmd.Args[1:], " ") - - // Should still have tonemap filter when SourceFormat is empty (defaults to exr behavior) - if !strings.Contains(args, "-vf") { - t.Error("Empty SourceFormat should default to EXR behavior with tonemap filter") - } -} - func TestCommandOrder(t *testing.T) { encoder := &SoftwareEncoder{codec: "libx264"} config := &EncodeConfig{ @@ -464,7 +411,6 @@ func TestCommandOrder(t *testing.T) { WorkDir: "/tmp", UseAlpha: false, TwoPass: true, - SourceFormat: "exr", } cmd := encoder.BuildCommand(config) @@ -519,20 +465,18 @@ func TestCommand_ColorspaceMetadata(t *testing.T) { WorkDir: "/tmp", UseAlpha: false, TwoPass: false, - SourceFormat: "exr", - PreserveHDR: false, // SDR encoding } cmd := encoder.BuildCommand(config) args := cmd.Args[1:] argsStr := strings.Join(args, " ") - // Verify all SDR colorspace metadata is present for EXR (SDR encoding) + // EXR always uses HDR path: bt709 primaries, HLG, full range colorspaceArgs := []string{ - "-color_primaries bt709", // EXR uses bt709 (SDR) - "-color_trc bt709", // EXR uses bt709 (SDR) + "-color_primaries bt709", + "-color_trc arib-std-b67", "-colorspace bt709", - "-color_range tv", + "-color_range pc", } for _, arg := range colorspaceArgs { @@ -541,17 +485,11 @@ func TestCommand_ColorspaceMetadata(t *testing.T) { } } - // Verify SDR pixel format - if !strings.Contains(argsStr, "-pix_fmt yuv420p") { - t.Error("SDR encoding should use yuv420p pixel format") + if !strings.Contains(argsStr, "-pix_fmt yuv420p10le") { + t.Error("EXR encoding should use yuv420p10le pixel format") } - - // Verify H.264 high profile (not high10) - if !strings.Contains(argsStr, "-profile:v high") { - t.Error("SDR encoding should use high profile") - } - if strings.Contains(argsStr, "-profile:v high10") { - t.Error("SDR encoding should not use high10 profile") + if !strings.Contains(argsStr, "-profile:v high10") { + t.Error("EXR encoding should use high10 profile") } } @@ -565,20 +503,18 @@ func TestCommand_HDR_ColorspaceMetadata(t *testing.T) { WorkDir: "/tmp", UseAlpha: false, TwoPass: false, - SourceFormat: "exr", - PreserveHDR: true, // HDR encoding } cmd := encoder.BuildCommand(config) args := cmd.Args[1:] argsStr := strings.Join(args, " ") - // Verify all HDR colorspace metadata is present for EXR (HDR encoding) + // Verify all HDR colorspace metadata is present for EXR (full range to match zscale output) colorspaceArgs := []string{ - "-color_primaries bt709", // bt709 primaries to match PNG color appearance - "-color_trc arib-std-b67", // HLG transfer function for HDR/SDR compatibility - "-colorspace bt709", // bt709 colorspace to match PNG - "-color_range tv", + "-color_primaries bt709", + "-color_trc arib-std-b67", + "-colorspace bt709", + "-color_range pc", } for _, arg := range colorspaceArgs { @@ -656,7 +592,6 @@ func TestIntegration_Encode_EXR_H264(t *testing.T) { WorkDir: tmpDir, UseAlpha: false, TwoPass: false, // Use single pass for faster testing - SourceFormat: "exr", } // Build and run command @@ -687,77 +622,6 @@ func TestIntegration_Encode_EXR_H264(t *testing.T) { } } -func TestIntegration_Encode_PNG_H264(t *testing.T) { - if testing.Short() { - t.Skip("Skipping integration test in short mode") - } - - // Check if example file exists - exampleDir := filepath.Join("..", "..", "..", "examples") - pngFile := filepath.Join(exampleDir, "frame_0800.png") - if _, err := os.Stat(pngFile); os.IsNotExist(err) { - t.Skipf("Example file not found: %s", pngFile) - } - - // Get absolute paths - workspaceRoot, err := filepath.Abs(filepath.Join("..", "..", "..")) - if err != nil { - t.Fatalf("Failed to get workspace root: %v", err) - } - exampleDirAbs, err := filepath.Abs(exampleDir) - if err != nil { - t.Fatalf("Failed to get example directory: %v", err) - } - tmpDir := filepath.Join(workspaceRoot, "tmp") - if err := os.MkdirAll(tmpDir, 0755); err != nil { - t.Fatalf("Failed to create tmp directory: %v", err) - } - - encoder := &SoftwareEncoder{codec: "libx264"} - config := &EncodeConfig{ - InputPattern: filepath.Join(exampleDirAbs, "frame_%04d.png"), - OutputPath: filepath.Join(tmpDir, "test_png_h264.mp4"), - StartFrame: 800, - FrameRate: 24.0, - WorkDir: tmpDir, - UseAlpha: false, - TwoPass: false, // Use single pass for faster testing - SourceFormat: "png", - } - - // Build and run command - cmd := encoder.BuildCommand(config) - if cmd == nil { - t.Fatal("BuildCommand returned nil") - } - - // Verify no video filter is used for PNG - argsStr := strings.Join(cmd.Args, " ") - if strings.Contains(argsStr, "-vf") { - t.Error("PNG encoding should not use video filter, but -vf was found in command") - } - - // Run the command - cmdOutput, err := cmd.CombinedOutput() - if err != nil { - t.Errorf("FFmpeg command failed: %v\nCommand output: %s", err, string(cmdOutput)) - return - } - - // Verify output file was created - if _, err := os.Stat(config.OutputPath); os.IsNotExist(err) { - t.Errorf("Output file was not created: %s\nCommand output: %s", config.OutputPath, string(cmdOutput)) - } else { - t.Logf("Successfully created output file: %s", config.OutputPath) - info, _ := os.Stat(config.OutputPath) - if info.Size() == 0 { - t.Error("Output file was created but is empty") - } else { - t.Logf("Output file size: %d bytes", info.Size()) - } - } -} - func TestIntegration_Encode_EXR_VP9(t *testing.T) { if testing.Short() { t.Skip("Skipping integration test in short mode") @@ -800,7 +664,6 @@ func TestIntegration_Encode_EXR_VP9(t *testing.T) { WorkDir: tmpDir, UseAlpha: false, TwoPass: false, // Use single pass for faster testing - SourceFormat: "exr", } // Build and run command @@ -873,7 +736,6 @@ func TestIntegration_Encode_EXR_AV1(t *testing.T) { WorkDir: tmpDir, UseAlpha: false, TwoPass: false, - SourceFormat: "exr", } // Build and run command @@ -940,7 +802,6 @@ func TestIntegration_Encode_EXR_VP9_WithAlpha(t *testing.T) { WorkDir: tmpDir, UseAlpha: true, // Test with alpha TwoPass: false, // Use single pass for faster testing - SourceFormat: "exr", } // Build and run command diff --git a/internal/runner/runner.go b/internal/runner/runner.go index cb7e816..0b0ad33 100644 --- a/internal/runner/runner.go +++ b/internal/runner/runner.go @@ -4,6 +4,7 @@ package runner import ( "crypto/sha256" "encoding/hex" + "errors" "fmt" "log" "net" @@ -182,6 +183,24 @@ func (r *Runner) Cleanup() { } } +func (r *Runner) withJobWorkspace(jobID int64, fn func(workDir string) error) error { + workDir, err := r.workspace.CreateJobDir(jobID) + if err != nil { + return fmt.Errorf("failed to create job workspace: %w", err) + } + + defer func() { + if cleanupErr := r.workspace.CleanupJobDir(jobID); cleanupErr != nil { + log.Printf("Warning: failed to cleanup job workspace for job %d: %v", jobID, cleanupErr) + } + if cleanupErr := r.workspace.CleanupVideoDir(jobID); cleanupErr != nil { + log.Printf("Warning: failed to cleanup encode workspace for job %d: %v", jobID, cleanupErr) + } + }() + + return fn(workDir) +} + // executeJob handles a job using per-job WebSocket connection. func (r *Runner) executeJob(job *api.NextJobResponse) (err error) { // Recover from panics to prevent runner process crashes during task execution @@ -192,72 +211,82 @@ func (r *Runner) executeJob(job *api.NextJobResponse) (err error) { } }() - // Connect to job WebSocket (no runnerID needed - authentication handles it) - jobConn := api.NewJobConnection() - if err := jobConn.Connect(r.manager.GetBaseURL(), job.JobPath, job.JobToken); err != nil { - return fmt.Errorf("failed to connect job WebSocket: %w", err) - } - defer jobConn.Close() - - log.Printf("Job WebSocket authenticated for task %d", job.Task.TaskID) - - // Create task context - workDir := r.workspace.JobDir(job.Task.JobID) - ctx := tasks.NewContext( - job.Task.TaskID, - job.Task.JobID, - job.Task.JobName, - job.Task.Frame, - job.Task.TaskType, - workDir, - job.JobToken, - job.Task.Metadata, - r.manager, - jobConn, - r.workspace, - r.blender, - r.encoder, - r.processes, - ) - - ctx.Info(fmt.Sprintf("Task assignment received (job: %d, type: %s)", - job.Task.JobID, job.Task.TaskType)) - - // Get processor for task type - processor, ok := r.processors[job.Task.TaskType] - if !ok { - return fmt.Errorf("unknown task type: %s", job.Task.TaskType) - } - - // Process the task - var processErr error - switch job.Task.TaskType { - case "render": // this task has a upload outputs step because the frames are not uploaded by the render task directly we have to do it manually here TODO: maybe we should make it work like the encode task - // Download context - contextPath := job.JobPath + "/context.tar" - if err := r.downloadContext(job.Task.JobID, contextPath, job.JobToken); err != nil { - jobConn.Log(job.Task.TaskID, types.LogLevelError, fmt.Sprintf("Failed to download context: %v", err)) - jobConn.Complete(job.Task.TaskID, false, fmt.Errorf("failed to download context: %v", err)) - return fmt.Errorf("failed to download context: %w", err) + return r.withJobWorkspace(job.Task.JobID, func(workDir string) error { + // Connect to job WebSocket (no runnerID needed - authentication handles it) + jobConn := api.NewJobConnection() + if err := jobConn.Connect(r.manager.GetBaseURL(), job.JobPath, job.JobToken); err != nil { + return fmt.Errorf("failed to connect job WebSocket: %w", err) } - processErr = processor.Process(ctx) - if processErr == nil { - processErr = r.uploadOutputs(ctx, job) + defer jobConn.Close() + + log.Printf("Job WebSocket authenticated for task %d", job.Task.TaskID) + + // Create task context (frame range: Frame = start, FrameEnd = end; 0 or missing = single frame) + frameEnd := job.Task.FrameEnd + if frameEnd < job.Task.Frame { + frameEnd = job.Task.Frame } - case "encode": // this task doesn't have a upload outputs step because the video is already uploaded by the encode task - processErr = processor.Process(ctx) - default: - return fmt.Errorf("unknown task type: %s", job.Task.TaskType) - } + ctx := tasks.NewContext( + job.Task.TaskID, + job.Task.JobID, + job.Task.JobName, + job.Task.Frame, + frameEnd, + job.Task.TaskType, + workDir, + job.JobToken, + job.Task.Metadata, + r.manager, + jobConn, + r.workspace, + r.blender, + r.encoder, + r.processes, + ) - if processErr != nil { - ctx.Error(fmt.Sprintf("Task failed: %v", processErr)) - ctx.Complete(false, processErr) - return processErr - } + ctx.Info(fmt.Sprintf("Task assignment received (job: %d, type: %s)", + job.Task.JobID, job.Task.TaskType)) - ctx.Complete(true, nil) - return nil + // Get processor for task type + processor, ok := r.processors[job.Task.TaskType] + if !ok { + return fmt.Errorf("unknown task type: %s", job.Task.TaskType) + } + + // Process the task + var processErr error + switch job.Task.TaskType { + case "render": // this task has a upload outputs step because the frames are not uploaded by the render task directly we have to do it manually here TODO: maybe we should make it work like the encode task + // Download context + contextPath := job.JobPath + "/context.tar" + if err := r.downloadContext(job.Task.JobID, contextPath, job.JobToken); err != nil { + jobConn.Log(job.Task.TaskID, types.LogLevelError, fmt.Sprintf("Failed to download context: %v", err)) + jobConn.Complete(job.Task.TaskID, false, fmt.Errorf("failed to download context: %v", err)) + return fmt.Errorf("failed to download context: %w", err) + } + processErr = processor.Process(ctx) + if processErr == nil { + processErr = r.uploadOutputs(ctx, job) + } + case "encode": // this task doesn't have a upload outputs step because the video is already uploaded by the encode task + processErr = processor.Process(ctx) + default: + return fmt.Errorf("unknown task type: %s", job.Task.TaskType) + } + + if processErr != nil { + if errors.Is(processErr, tasks.ErrJobCancelled) { + ctx.Warn("Stopping task early because the job was cancelled") + return nil + } + ctx.Error(fmt.Sprintf("Task failed: %v", processErr)) + ctx.Complete(false, processErr) + return processErr + } + + ctx.Complete(true, nil) + return nil + }) } func (r *Runner) downloadContext(jobID int64, contextPath, jobToken string) error { diff --git a/internal/runner/tasks/encode.go b/internal/runner/tasks/encode.go index f0d6984..590400e 100644 --- a/internal/runner/tasks/encode.go +++ b/internal/runner/tasks/encode.go @@ -12,6 +12,7 @@ import ( "regexp" "sort" "strings" + "sync" "jiggablend/internal/runner/encoding" ) @@ -26,6 +27,10 @@ func NewEncodeProcessor() *EncodeProcessor { // Process executes an encode task. func (p *EncodeProcessor) Process(ctx *Context) error { + if err := ctx.CheckCancelled(); err != nil { + return err + } + ctx.Info(fmt.Sprintf("Starting encode task: job %d", ctx.JobID)) log.Printf("Processing encode task %d for job %d", ctx.TaskID, ctx.JobID) @@ -64,23 +69,18 @@ func (p *EncodeProcessor) Process(ctx *Context) error { ctx.Info(fmt.Sprintf("File: %s (type: %s, size: %d)", file.FileName, file.FileType, file.FileSize)) } - // Determine source format based on output format - sourceFormat := "exr" + // Encode from EXR frames only fileExt := ".exr" - - // Find and deduplicate frame files (EXR or PNG) frameFileSet := make(map[string]bool) var frameFilesList []string for _, file := range files { if file.FileType == "output" && strings.HasSuffix(strings.ToLower(file.FileName), fileExt) { - // Deduplicate by filename if !frameFileSet[file.FileName] { frameFileSet[file.FileName] = true frameFilesList = append(frameFilesList, file.FileName) } } } - if len(frameFilesList) == 0 { // Log why no files matched (deduplicate for error reporting) outputFileSet := make(map[string]bool) @@ -103,37 +103,61 @@ func (p *EncodeProcessor) Process(ctx *Context) error { } } } - ctx.Error(fmt.Sprintf("no %s frame files found for encode: found %d total files, %d unique output files, %d unique %s files (with other types)", strings.ToUpper(fileExt[1:]), len(files), len(outputFiles), len(frameFilesOtherType), strings.ToUpper(fileExt[1:]))) + ctx.Error(fmt.Sprintf("no EXR frame files found for encode: found %d total files, %d unique output files, %d unique EXR files (with other types)", len(files), len(outputFiles), len(frameFilesOtherType))) if len(outputFiles) > 0 { ctx.Error(fmt.Sprintf("Output files found: %v", outputFiles)) } if len(frameFilesOtherType) > 0 { - ctx.Error(fmt.Sprintf("%s files with wrong type: %v", strings.ToUpper(fileExt[1:]), frameFilesOtherType)) + ctx.Error(fmt.Sprintf("EXR files with wrong type: %v", frameFilesOtherType)) } - err := fmt.Errorf("no %s frame files found for encode", strings.ToUpper(fileExt[1:])) + err := fmt.Errorf("no EXR frame files found for encode") return err } - ctx.Info(fmt.Sprintf("Found %d %s frames for encode", len(frameFilesList), strings.ToUpper(fileExt[1:]))) + ctx.Info(fmt.Sprintf("Found %d EXR frames for encode", len(frameFilesList))) - // Download frames - ctx.Info(fmt.Sprintf("Downloading %d %s frames for encode...", len(frameFilesList), strings.ToUpper(fileExt[1:]))) + // Download frames with bounded parallelism (8 concurrent downloads) + const downloadWorkers = 8 + ctx.Info(fmt.Sprintf("Downloading %d EXR frames for encode...", len(frameFilesList))) + + type result struct { + path string + err error + } + results := make([]result, len(frameFilesList)) + var wg sync.WaitGroup + sem := make(chan struct{}, downloadWorkers) + for i, fileName := range frameFilesList { + wg.Add(1) + go func(i int, fileName string) { + defer wg.Done() + sem <- struct{}{} + defer func() { <-sem }() + framePath := filepath.Join(workDir, fileName) + err := ctx.Manager.DownloadFrame(ctx.JobID, fileName, framePath) + if err != nil { + ctx.Error(fmt.Sprintf("Failed to download EXR frame %s: %v", fileName, err)) + log.Printf("Failed to download EXR frame for encode %s: %v", fileName, err) + results[i] = result{"", err} + return + } + results[i] = result{framePath, nil} + }(i, fileName) + } + wg.Wait() var frameFiles []string - for i, fileName := range frameFilesList { - ctx.Info(fmt.Sprintf("Downloading frame %d/%d: %s", i+1, len(frameFilesList), fileName)) - framePath := filepath.Join(workDir, fileName) - if err := ctx.Manager.DownloadFrame(ctx.JobID, fileName, framePath); err != nil { - ctx.Error(fmt.Sprintf("Failed to download %s frame %s: %v", strings.ToUpper(fileExt[1:]), fileName, err)) - log.Printf("Failed to download %s frame for encode %s: %v", strings.ToUpper(fileExt[1:]), fileName, err) - continue + for _, r := range results { + if r.err == nil && r.path != "" { + frameFiles = append(frameFiles, r.path) } - ctx.Info(fmt.Sprintf("Successfully downloaded frame %d/%d: %s", i+1, len(frameFilesList), fileName)) - frameFiles = append(frameFiles, framePath) + } + if err := ctx.CheckCancelled(); err != nil { + return err } if len(frameFiles) == 0 { - err := fmt.Errorf("failed to download any %s frames for encode", strings.ToUpper(fileExt[1:])) + err := fmt.Errorf("failed to download any EXR frames for encode") ctx.Error(err.Error()) return err } @@ -141,11 +165,9 @@ func (p *EncodeProcessor) Process(ctx *Context) error { sort.Strings(frameFiles) ctx.Info(fmt.Sprintf("Downloaded %d frames", len(frameFiles))) - // Check if EXR files have alpha channel and HDR content (only for EXR source format) + // Check if EXR files have alpha channel (for encode decision) hasAlpha := false - hasHDR := false - if sourceFormat == "exr" { - // Check first frame for alpha channel and HDR using ffprobe + { firstFrame := frameFiles[0] hasAlpha = detectAlphaChannel(ctx, firstFrame) if hasAlpha { @@ -153,45 +175,28 @@ func (p *EncodeProcessor) Process(ctx *Context) error { } else { ctx.Info("No alpha channel detected in EXR files") } - - hasHDR = detectHDR(ctx, firstFrame) - if hasHDR { - ctx.Info("Detected HDR content in EXR files") - } else { - ctx.Info("No HDR content detected in EXR files (SDR range)") - } } // Generate video - // Use alpha if: - // 1. User explicitly enabled it OR source has alpha channel AND - // 2. Codec supports alpha (AV1 or VP9) - preserveAlpha := ctx.ShouldPreserveAlpha() - useAlpha := (preserveAlpha || hasAlpha) && (outputFormat == "EXR_AV1_MP4" || outputFormat == "EXR_VP9_WEBM") - if (preserveAlpha || hasAlpha) && outputFormat == "EXR_264_MP4" { - ctx.Warn("Alpha channel requested/detected but H.264 does not support alpha. Consider using EXR_AV1_MP4 or EXR_VP9_WEBM to preserve alpha.") - } - if preserveAlpha && !hasAlpha { - ctx.Warn("Alpha preservation requested but no alpha channel detected in EXR files.") + // Use alpha when source EXR has alpha and codec supports it (AV1 or VP9). H.264 does not support alpha. + useAlpha := hasAlpha && (outputFormat == "EXR_AV1_MP4" || outputFormat == "EXR_VP9_WEBM") + if hasAlpha && outputFormat == "EXR_264_MP4" { + ctx.Warn("Alpha channel detected in EXR but H.264 does not support alpha. Use EXR_AV1_MP4 or EXR_VP9_WEBM to preserve alpha in video.") } if useAlpha { - if preserveAlpha && hasAlpha { - ctx.Info("Alpha preservation enabled: Using alpha channel encoding") - } else if hasAlpha { - ctx.Info("Alpha channel detected - automatically enabling alpha encoding") - } + ctx.Info("Alpha channel detected - encoding with alpha (AV1/VP9)") } var outputExt string switch outputFormat { case "EXR_VP9_WEBM": outputExt = "webm" - ctx.Info("Encoding WebM video with VP9 codec (with alpha channel and HDR support)...") + ctx.Info("Encoding WebM video with VP9 codec (alpha, HDR)...") case "EXR_AV1_MP4": outputExt = "mp4" - ctx.Info("Encoding MP4 video with AV1 codec (with alpha channel)...") + ctx.Info("Encoding MP4 video with AV1 codec (alpha, HDR)...") default: outputExt = "mp4" - ctx.Info("Encoding MP4 video with H.264 codec...") + ctx.Info("Encoding MP4 video with H.264 codec (HDR, HLG)...") } outputVideo := filepath.Join(workDir, fmt.Sprintf("output_%d.%s", ctx.JobID, outputExt)) @@ -231,11 +236,6 @@ func (p *EncodeProcessor) Process(ctx *Context) error { // Pass 1 ctx.Info("Pass 1/2: Analyzing content for optimal encode...") softEncoder := encoder.(*encoding.SoftwareEncoder) - // Use HDR if: user explicitly enabled it OR HDR content was detected - preserveHDR := (ctx.ShouldPreserveHDR() || hasHDR) && sourceFormat == "exr" - if hasHDR && !ctx.ShouldPreserveHDR() { - ctx.Info("HDR content detected - automatically enabling HDR preservation") - } pass1Cmd := softEncoder.BuildPass1Command(&encoding.EncodeConfig{ InputPattern: patternPath, OutputPath: outputVideo, @@ -244,8 +244,6 @@ func (p *EncodeProcessor) Process(ctx *Context) error { WorkDir: workDir, UseAlpha: useAlpha, TwoPass: true, - SourceFormat: sourceFormat, - PreserveHDR: preserveHDR, }) if err := pass1Cmd.Run(); err != nil { ctx.Warn(fmt.Sprintf("Pass 1 completed (warnings expected): %v", err)) @@ -254,15 +252,6 @@ func (p *EncodeProcessor) Process(ctx *Context) error { // Pass 2 ctx.Info("Pass 2/2: Encoding with optimal quality...") - preserveHDR = (ctx.ShouldPreserveHDR() || hasHDR) && sourceFormat == "exr" - if preserveHDR { - if hasHDR && !ctx.ShouldPreserveHDR() { - ctx.Info("HDR preservation enabled (auto-detected): Using HLG transfer with bt709 primaries") - } else { - ctx.Info("HDR preservation enabled: Using HLG transfer with bt709 primaries") - } - } - config := &encoding.EncodeConfig{ InputPattern: patternPath, OutputPath: outputVideo, @@ -271,8 +260,6 @@ func (p *EncodeProcessor) Process(ctx *Context) error { WorkDir: workDir, UseAlpha: useAlpha, TwoPass: true, // Software encoding always uses 2-pass for quality - SourceFormat: sourceFormat, - PreserveHDR: preserveHDR, } cmd := encoder.BuildCommand(config) @@ -294,6 +281,8 @@ func (p *EncodeProcessor) Process(ctx *Context) error { if err := cmd.Start(); err != nil { return fmt.Errorf("failed to start encode command: %w", err) } + stopMonitor := ctx.StartCancellationMonitor(cmd, "encode") + defer stopMonitor() ctx.Processes.Track(ctx.TaskID, cmd) defer ctx.Processes.Untrack(ctx.TaskID) @@ -329,6 +318,9 @@ func (p *EncodeProcessor) Process(ctx *Context) error { <-stderrDone if err != nil { + if cancelled, checkErr := ctx.IsJobCancelled(); checkErr == nil && cancelled { + return ErrJobCancelled + } var errMsg string if exitErr, ok := err.(*exec.ExitError); ok { if exitErr.ExitCode() == 137 { diff --git a/internal/runner/tasks/processor.go b/internal/runner/tasks/processor.go index 5b0e32f..83d68ca 100644 --- a/internal/runner/tasks/processor.go +++ b/internal/runner/tasks/processor.go @@ -2,12 +2,17 @@ package tasks import ( + "errors" + "fmt" "jiggablend/internal/runner/api" "jiggablend/internal/runner/blender" "jiggablend/internal/runner/encoding" "jiggablend/internal/runner/workspace" "jiggablend/pkg/executils" "jiggablend/pkg/types" + "os/exec" + "sync" + "time" ) // Processor handles a specific task type. @@ -20,7 +25,8 @@ type Context struct { TaskID int64 JobID int64 JobName string - Frame int + Frame int // frame start (inclusive); kept for backward compat + FrameEnd int // frame end (inclusive); same as Frame for single-frame TaskType string WorkDir string JobToken string @@ -34,11 +40,14 @@ type Context struct { Processes *executils.ProcessTracker } -// NewContext creates a new task context. +// ErrJobCancelled indicates the manager-side job was cancelled during execution. +var ErrJobCancelled = errors.New("job cancelled") + +// NewContext creates a new task context. frameEnd should be >= frame; if 0 or less than frame, it is treated as single-frame (frameEnd = frame). func NewContext( taskID, jobID int64, jobName string, - frame int, + frameStart, frameEnd int, taskType string, workDir string, jobToken string, @@ -50,11 +59,15 @@ func NewContext( encoder *encoding.Selector, processes *executils.ProcessTracker, ) *Context { + if frameEnd < frameStart { + frameEnd = frameStart + } return &Context{ TaskID: taskID, JobID: jobID, JobName: jobName, - Frame: frame, + Frame: frameStart, + FrameEnd: frameEnd, TaskType: taskType, WorkDir: workDir, JobToken: jobToken, @@ -145,12 +158,65 @@ func (c *Context) ShouldEnableExecution() bool { return c.Metadata != nil && c.Metadata.EnableExecution != nil && *c.Metadata.EnableExecution } -// ShouldPreserveHDR returns whether to preserve HDR range for EXR encoding. -func (c *Context) ShouldPreserveHDR() bool { - return c.Metadata != nil && c.Metadata.PreserveHDR != nil && *c.Metadata.PreserveHDR +// IsJobCancelled checks whether the manager marked this job as cancelled. +func (c *Context) IsJobCancelled() (bool, error) { + if c.Manager == nil { + return false, nil + } + status, err := c.Manager.GetJobStatus(c.JobID) + if err != nil { + return false, err + } + return status == types.JobStatusCancelled, nil } -// ShouldPreserveAlpha returns whether to preserve alpha channel for EXR encoding. -func (c *Context) ShouldPreserveAlpha() bool { - return c.Metadata != nil && c.Metadata.PreserveAlpha != nil && *c.Metadata.PreserveAlpha +// CheckCancelled returns ErrJobCancelled if the job was cancelled. +func (c *Context) CheckCancelled() error { + cancelled, err := c.IsJobCancelled() + if err != nil { + return fmt.Errorf("failed to check job status: %w", err) + } + if cancelled { + return ErrJobCancelled + } + return nil +} + +// StartCancellationMonitor polls manager status and kills cmd if job is cancelled. +// Caller must invoke returned stop function when cmd exits. +func (c *Context) StartCancellationMonitor(cmd *exec.Cmd, taskLabel string) func() { + stop := make(chan struct{}) + var once sync.Once + + go func() { + ticker := time.NewTicker(2 * time.Second) + defer ticker.Stop() + + for { + select { + case <-stop: + return + case <-ticker.C: + cancelled, err := c.IsJobCancelled() + if err != nil { + c.Warn(fmt.Sprintf("Could not check cancellation for %s task: %v", taskLabel, err)) + continue + } + if !cancelled { + continue + } + c.Warn(fmt.Sprintf("Job %d was cancelled, stopping %s task early", c.JobID, taskLabel)) + if cmd != nil && cmd.Process != nil { + _ = cmd.Process.Kill() + } + return + } + } + }() + + return func() { + once.Do(func() { + close(stop) + }) + } } diff --git a/internal/runner/tasks/render.go b/internal/runner/tasks/render.go index a7d5b48..4e9460f 100644 --- a/internal/runner/tasks/render.go +++ b/internal/runner/tasks/render.go @@ -27,9 +27,19 @@ func NewRenderProcessor() *RenderProcessor { // Process executes a render task. func (p *RenderProcessor) Process(ctx *Context) error { - ctx.Info(fmt.Sprintf("Starting task: job %d, frame %d, format: %s", - ctx.JobID, ctx.Frame, ctx.GetOutputFormat())) - log.Printf("Processing task %d: job %d, frame %d", ctx.TaskID, ctx.JobID, ctx.Frame) + if err := ctx.CheckCancelled(); err != nil { + return err + } + + if ctx.FrameEnd > ctx.Frame { + ctx.Info(fmt.Sprintf("Starting task: job %d, frames %d-%d, format: %s", + ctx.JobID, ctx.Frame, ctx.FrameEnd, ctx.GetOutputFormat())) + log.Printf("Processing task %d: job %d, frames %d-%d", ctx.TaskID, ctx.JobID, ctx.Frame, ctx.FrameEnd) + } else { + ctx.Info(fmt.Sprintf("Starting task: job %d, frame %d, format: %s", + ctx.JobID, ctx.Frame, ctx.GetOutputFormat())) + log.Printf("Processing task %d: job %d, frame %d", ctx.TaskID, ctx.JobID, ctx.Frame) + } // Find .blend file blendFile, err := workspace.FindFirstBlendFile(ctx.WorkDir) @@ -64,12 +74,8 @@ func (p *RenderProcessor) Process(ctx *Context) error { return fmt.Errorf("failed to create Blender home directory: %w", err) } - // Determine render format - outputFormat := ctx.GetOutputFormat() - renderFormat := outputFormat - if outputFormat == "EXR_264_MP4" || outputFormat == "EXR_AV1_MP4" || outputFormat == "EXR_VP9_WEBM" { - renderFormat = "EXR" // Use EXR for maximum quality - } + // We always render EXR (linear) for VFX accuracy; job output_format is the deliverable (EXR sequence or video). + renderFormat := "EXR" // Create render script if err := p.createRenderScript(ctx, renderFormat); err != nil { @@ -77,18 +83,30 @@ func (p *RenderProcessor) Process(ctx *Context) error { } // Render - ctx.Info(fmt.Sprintf("Starting Blender render for frame %d...", ctx.Frame)) + if ctx.FrameEnd > ctx.Frame { + ctx.Info(fmt.Sprintf("Starting Blender render for frames %d-%d...", ctx.Frame, ctx.FrameEnd)) + } else { + ctx.Info(fmt.Sprintf("Starting Blender render for frame %d...", ctx.Frame)) + } if err := p.runBlender(ctx, blenderBinary, blendFile, outputDir, renderFormat, blenderHome); err != nil { + if errors.Is(err, ErrJobCancelled) { + ctx.Warn("Render stopped because job was cancelled") + return err + } ctx.Error(fmt.Sprintf("Blender render failed: %v", err)) return err } - // Verify output - if _, err := p.findOutputFile(ctx, outputDir, renderFormat); err != nil { + // Verify output (range or single frame) + if err := p.verifyOutputRange(ctx, outputDir, renderFormat); err != nil { ctx.Error(fmt.Sprintf("Output verification failed: %v", err)) return err } - ctx.Info(fmt.Sprintf("Blender render completed for frame %d", ctx.Frame)) + if ctx.FrameEnd > ctx.Frame { + ctx.Info(fmt.Sprintf("Blender render completed for frames %d-%d", ctx.Frame, ctx.FrameEnd)) + } else { + ctx.Info(fmt.Sprintf("Blender render completed for frame %d", ctx.Frame)) + } return nil } @@ -116,10 +134,9 @@ func (p *RenderProcessor) createRenderScript(ctx *Context, renderFormat string) return errors.New(errMsg) } - // Write output format - outputFormat := ctx.GetOutputFormat() - ctx.Info(fmt.Sprintf("Writing output format '%s' to format file", outputFormat)) - if err := os.WriteFile(formatFilePath, []byte(outputFormat), 0644); err != nil { + // Write EXR to format file so Blender script sets OPEN_EXR (job output_format is for downstream deliverable only). + ctx.Info("Writing output format 'EXR' to format file") + if err := os.WriteFile(formatFilePath, []byte("EXR"), 0644); err != nil { errMsg := fmt.Sprintf("failed to create format file: %v", err) ctx.Error(errMsg) return errors.New(errMsg) @@ -151,7 +168,12 @@ func (p *RenderProcessor) runBlender(ctx *Context, blenderBinary, blendFile, out outputAbsPattern, _ := filepath.Abs(outputPattern) args = append(args, "-o", outputAbsPattern) - args = append(args, "-f", fmt.Sprintf("%d", ctx.Frame)) + // Render single frame or range: -f N for one frame, -s start -e end -a for range + if ctx.FrameEnd > ctx.Frame { + args = append(args, "-s", fmt.Sprintf("%d", ctx.Frame), "-e", fmt.Sprintf("%d", ctx.FrameEnd), "-a") + } else { + args = append(args, "-f", fmt.Sprintf("%d", ctx.Frame)) + } // Wrap with xvfb-run xvfbArgs := []string{"-a", "-s", "-screen 0 800x600x24", blenderBinary} @@ -185,6 +207,8 @@ func (p *RenderProcessor) runBlender(ctx *Context, blenderBinary, blendFile, out if err := cmd.Start(); err != nil { return fmt.Errorf("failed to start blender: %w", err) } + stopMonitor := ctx.StartCancellationMonitor(cmd, "render") + defer stopMonitor() // Track process ctx.Processes.Track(ctx.TaskID, cmd) @@ -231,6 +255,9 @@ func (p *RenderProcessor) runBlender(ctx *Context, blenderBinary, blendFile, out <-stderrDone if err != nil { + if cancelled, checkErr := ctx.IsJobCancelled(); checkErr == nil && cancelled { + return ErrJobCancelled + } if exitErr, ok := err.(*exec.ExitError); ok { if exitErr.ExitCode() == 137 { return errors.New("Blender was killed due to excessive memory usage (OOM)") @@ -242,60 +269,64 @@ func (p *RenderProcessor) runBlender(ctx *Context, blenderBinary, blendFile, out return nil } -func (p *RenderProcessor) findOutputFile(ctx *Context, outputDir, renderFormat string) (string, error) { +// verifyOutputRange checks that output files exist for the task's frame range (first and last at minimum). +func (p *RenderProcessor) verifyOutputRange(ctx *Context, outputDir, renderFormat string) error { entries, err := os.ReadDir(outputDir) if err != nil { - return "", fmt.Errorf("failed to read output directory: %w", err) + return fmt.Errorf("failed to read output directory: %w", err) } ctx.Info("Checking output directory for files...") + ext := strings.ToLower(renderFormat) - // Try exact match first - expectedFile := filepath.Join(outputDir, fmt.Sprintf("frame_%04d.%s", ctx.Frame, strings.ToLower(renderFormat))) - if _, err := os.Stat(expectedFile); err == nil { - ctx.Info(fmt.Sprintf("Found output file: %s", filepath.Base(expectedFile))) - return expectedFile, nil + // Check first and last frame in range (minimum required for range; single frame = one check) + framesToCheck := []int{ctx.Frame} + if ctx.FrameEnd > ctx.Frame { + framesToCheck = append(framesToCheck, ctx.FrameEnd) } - - // Try without zero padding - altFile := filepath.Join(outputDir, fmt.Sprintf("frame_%d.%s", ctx.Frame, strings.ToLower(renderFormat))) - if _, err := os.Stat(altFile); err == nil { - ctx.Info(fmt.Sprintf("Found output file: %s", filepath.Base(altFile))) - return altFile, nil - } - - // Try just frame number - altFile2 := filepath.Join(outputDir, fmt.Sprintf("%04d.%s", ctx.Frame, strings.ToLower(renderFormat))) - if _, err := os.Stat(altFile2); err == nil { - ctx.Info(fmt.Sprintf("Found output file: %s", filepath.Base(altFile2))) - return altFile2, nil - } - - // Search through all files - for _, entry := range entries { - if !entry.IsDir() { - fileName := entry.Name() - if strings.Contains(fileName, "%04d") || strings.Contains(fileName, "%d") { - ctx.Warn(fmt.Sprintf("Skipping file with literal pattern: %s", fileName)) - continue - } - frameStr := fmt.Sprintf("%d", ctx.Frame) - frameStrPadded := fmt.Sprintf("%04d", ctx.Frame) - if strings.Contains(fileName, frameStrPadded) || - (strings.Contains(fileName, frameStr) && strings.HasSuffix(strings.ToLower(fileName), strings.ToLower(renderFormat))) { - outputFile := filepath.Join(outputDir, fileName) - ctx.Info(fmt.Sprintf("Found output file: %s", fileName)) - return outputFile, nil + for _, frame := range framesToCheck { + found := false + // Try frame_0001.ext, frame_1.ext, 0001.ext + for _, name := range []string{ + fmt.Sprintf("frame_%04d.%s", frame, ext), + fmt.Sprintf("frame_%d.%s", frame, ext), + fmt.Sprintf("%04d.%s", frame, ext), + } { + if _, err := os.Stat(filepath.Join(outputDir, name)); err == nil { + found = true + ctx.Info(fmt.Sprintf("Found output file: %s", name)) + break } } - } - - // Not found - fileList := []string{} - for _, entry := range entries { - if !entry.IsDir() { - fileList = append(fileList, entry.Name()) + if !found { + // Search entries for this frame number + frameStr := fmt.Sprintf("%d", frame) + frameStrPadded := fmt.Sprintf("%04d", frame) + for _, entry := range entries { + if entry.IsDir() { + continue + } + fileName := entry.Name() + if strings.Contains(fileName, "%04d") || strings.Contains(fileName, "%d") { + continue + } + if (strings.Contains(fileName, frameStrPadded) || + strings.Contains(fileName, frameStr)) && strings.HasSuffix(strings.ToLower(fileName), ext) { + found = true + ctx.Info(fmt.Sprintf("Found output file: %s", fileName)) + break + } + } + } + if !found { + fileList := []string{} + for _, e := range entries { + if !e.IsDir() { + fileList = append(fileList, e.Name()) + } + } + return fmt.Errorf("output file for frame %d not found; files in output directory: %v", frame, fileList) } } - return "", fmt.Errorf("output file not found: %s\nFiles in output directory: %v", expectedFile, fileList) + return nil } diff --git a/internal/storage/storage.go b/internal/storage/storage.go index 464e3fe..c0d8995 100644 --- a/internal/storage/storage.go +++ b/internal/storage/storage.go @@ -60,7 +60,7 @@ func (s *Storage) TempDir(pattern string) (string, error) { if err := os.MkdirAll(s.tempPath(), 0755); err != nil { return "", fmt.Errorf("failed to create temp directory: %w", err) } - + // Create temp directory under storage base path return os.MkdirTemp(s.tempPath(), pattern) } @@ -166,12 +166,12 @@ func (s *Storage) GetFileSize(filePath string) (int64, error) { // Returns a list of all extracted file paths func (s *Storage) ExtractZip(zipPath, destDir string) ([]string, error) { log.Printf("Extracting ZIP archive: %s -> %s", zipPath, destDir) - + // Ensure destination directory exists if err := os.MkdirAll(destDir, 0755); err != nil { return nil, fmt.Errorf("failed to create destination directory: %w", err) } - + r, err := zip.OpenReader(zipPath) if err != nil { return nil, fmt.Errorf("failed to open ZIP file: %w", err) @@ -187,7 +187,7 @@ func (s *Storage) ExtractZip(zipPath, destDir string) ([]string, error) { for _, f := range r.File { // Sanitize file path to prevent directory traversal destPath := filepath.Join(destDir, f.Name) - + cleanDestPath := filepath.Clean(destPath) cleanDestDir := filepath.Clean(destDir) if !strings.HasPrefix(cleanDestPath, cleanDestDir+string(os.PathSeparator)) && cleanDestPath != cleanDestDir { @@ -520,7 +520,7 @@ func (s *Storage) CreateJobContextFromDir(sourceDir string, jobID int64, exclude if commonPrefix != "" && strings.HasPrefix(tarPath, commonPrefix) { tarPath = strings.TrimPrefix(tarPath, commonPrefix) } - + // Check if it's a .blend file at root (no path separators after prefix stripping) if strings.HasSuffix(strings.ToLower(tarPath), ".blend") { // Check if it's at root level (no directory separators) @@ -566,7 +566,7 @@ func (s *Storage) CreateJobContextFromDir(sourceDir string, jobID int64, exclude // Get relative path and strip common prefix if present relPath := relPaths[i] tarPath := filepath.ToSlash(relPath) - + // Strip common prefix if found if commonPrefix != "" && strings.HasPrefix(tarPath, commonPrefix) { tarPath = strings.TrimPrefix(tarPath, commonPrefix) @@ -608,3 +608,129 @@ func (s *Storage) CreateJobContextFromDir(sourceDir string, jobID int64, exclude return contextPath, nil } +// CreateContextArchiveFromDirToPath creates a context archive from files in sourceDir at destPath. +// This is used for pre-job upload sessions where the archive is staged before a job ID exists. +func (s *Storage) CreateContextArchiveFromDirToPath(sourceDir, destPath string, excludeFiles ...string) (string, error) { + excludeSet := make(map[string]bool) + for _, excludeFile := range excludeFiles { + excludePath := filepath.Clean(excludeFile) + excludeSet[excludePath] = true + excludeSet[filepath.ToSlash(excludePath)] = true + } + + var filesToInclude []string + err := filepath.Walk(sourceDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.IsDir() { + return nil + } + if isBlenderSaveFile(info.Name()) { + return nil + } + + relPath, err := filepath.Rel(sourceDir, path) + if err != nil { + return err + } + cleanRelPath := filepath.Clean(relPath) + if strings.HasPrefix(cleanRelPath, "..") { + return fmt.Errorf("invalid file path: %s", relPath) + } + if excludeSet[cleanRelPath] || excludeSet[filepath.ToSlash(cleanRelPath)] { + return nil + } + + filesToInclude = append(filesToInclude, path) + return nil + }) + if err != nil { + return "", fmt.Errorf("failed to walk source directory: %w", err) + } + if len(filesToInclude) == 0 { + return "", fmt.Errorf("no files found to include in context archive") + } + + relPaths := make([]string, 0, len(filesToInclude)) + for _, filePath := range filesToInclude { + relPath, err := filepath.Rel(sourceDir, filePath) + if err != nil { + return "", fmt.Errorf("failed to get relative path: %w", err) + } + relPaths = append(relPaths, relPath) + } + + commonPrefix := findCommonPrefix(relPaths) + blendFilesAtRoot := 0 + for _, relPath := range relPaths { + tarPath := filepath.ToSlash(relPath) + if commonPrefix != "" && strings.HasPrefix(tarPath, commonPrefix) { + tarPath = strings.TrimPrefix(tarPath, commonPrefix) + } + if strings.HasSuffix(strings.ToLower(tarPath), ".blend") && !strings.Contains(tarPath, "/") { + blendFilesAtRoot++ + } + } + if blendFilesAtRoot == 0 { + return "", fmt.Errorf("no .blend file found at root level in context archive - .blend files must be at the root level of the uploaded archive, not in subdirectories") + } + if blendFilesAtRoot > 1 { + return "", fmt.Errorf("multiple .blend files found at root level in context archive (found %d, expected 1)", blendFilesAtRoot) + } + + contextFile, err := os.Create(destPath) + if err != nil { + return "", fmt.Errorf("failed to create context file: %w", err) + } + defer contextFile.Close() + + tarWriter := tar.NewWriter(contextFile) + defer tarWriter.Close() + copyBuf := make([]byte, 32*1024) + + for i, filePath := range filesToInclude { + file, err := os.Open(filePath) + if err != nil { + return "", fmt.Errorf("failed to open file: %w", err) + } + + info, err := file.Stat() + if err != nil { + file.Close() + return "", fmt.Errorf("failed to stat file: %w", err) + } + + tarPath := filepath.ToSlash(relPaths[i]) + if commonPrefix != "" && strings.HasPrefix(tarPath, commonPrefix) { + tarPath = strings.TrimPrefix(tarPath, commonPrefix) + } + + header, err := tar.FileInfoHeader(info, "") + if err != nil { + file.Close() + return "", fmt.Errorf("failed to create tar header: %w", err) + } + header.Name = tarPath + + if err := tarWriter.WriteHeader(header); err != nil { + file.Close() + return "", fmt.Errorf("failed to write tar header: %w", err) + } + + if _, err := io.CopyBuffer(tarWriter, file, copyBuf); err != nil { + file.Close() + return "", fmt.Errorf("failed to write file to tar: %w", err) + } + file.Close() + } + + if err := tarWriter.Close(); err != nil { + return "", fmt.Errorf("failed to close tar writer: %w", err) + } + if err := contextFile.Close(); err != nil { + return "", fmt.Errorf("failed to close context file: %w", err) + } + + return destPath, nil +} diff --git a/pkg/executils/exec.go b/pkg/executils/exec.go index 3e4c18d..0ff9502 100644 --- a/pkg/executils/exec.go +++ b/pkg/executils/exec.go @@ -2,10 +2,13 @@ package executils import ( "bufio" + "context" "errors" "fmt" + "io" "os" "os/exec" + "strings" "sync" "time" @@ -107,6 +110,78 @@ type CommandResult struct { ExitCode int } +// RunCommandWithTimeout is like RunCommand but kills the process after timeout. +// A zero timeout means no timeout. +func RunCommandWithTimeout( + timeout time.Duration, + cmdPath string, + args []string, + dir string, + env []string, + taskID int64, + tracker *ProcessTracker, +) (*CommandResult, error) { + if timeout <= 0 { + return RunCommand(cmdPath, args, dir, env, taskID, tracker) + } + ctx, cancel := context.WithTimeout(context.Background(), timeout) + defer cancel() + cmd := exec.CommandContext(ctx, cmdPath, args...) + cmd.Dir = dir + if env != nil { + cmd.Env = env + } + stdoutPipe, err := cmd.StdoutPipe() + if err != nil { + return nil, fmt.Errorf("failed to create stdout pipe: %w", err) + } + stderrPipe, err := cmd.StderrPipe() + if err != nil { + return nil, fmt.Errorf("failed to create stderr pipe: %w", err) + } + if err := cmd.Start(); err != nil { + return nil, fmt.Errorf("failed to start command: %w", err) + } + if tracker != nil { + tracker.Track(taskID, cmd) + defer tracker.Untrack(taskID) + } + var stdoutBuf, stderrBuf []byte + var stdoutErr, stderrErr error + var wg sync.WaitGroup + wg.Add(2) + go func() { + defer wg.Done() + stdoutBuf, stdoutErr = readAll(stdoutPipe) + }() + go func() { + defer wg.Done() + stderrBuf, stderrErr = readAll(stderrPipe) + }() + waitErr := cmd.Wait() + wg.Wait() + if stdoutErr != nil && !isBenignPipeReadError(stdoutErr) { + return nil, fmt.Errorf("failed to read stdout: %w", stdoutErr) + } + if stderrErr != nil && !isBenignPipeReadError(stderrErr) { + return nil, fmt.Errorf("failed to read stderr: %w", stderrErr) + } + result := &CommandResult{Stdout: string(stdoutBuf), Stderr: string(stderrBuf)} + if waitErr != nil { + if exitErr, ok := waitErr.(*exec.ExitError); ok { + result.ExitCode = exitErr.ExitCode() + } else { + result.ExitCode = -1 + } + if ctx.Err() == context.DeadlineExceeded { + return result, fmt.Errorf("command timed out after %v: %w", timeout, waitErr) + } + return result, waitErr + } + result.ExitCode = 0 + return result, nil +} + // RunCommand executes a command and returns the output // If tracker is provided, the process will be registered for tracking // This is useful for commands where you need to capture output (like metadata extraction) @@ -164,10 +239,10 @@ func RunCommand( wg.Wait() // Check for read errors - if stdoutErr != nil { + if stdoutErr != nil && !isBenignPipeReadError(stdoutErr) { return nil, fmt.Errorf("failed to read stdout: %w", stdoutErr) } - if stderrErr != nil { + if stderrErr != nil && !isBenignPipeReadError(stderrErr) { return nil, fmt.Errorf("failed to read stderr: %w", stderrErr) } @@ -208,6 +283,18 @@ func readAll(r interface{ Read([]byte) (int, error) }) ([]byte, error) { return buf, nil } +// isBenignPipeReadError treats EOF-like pipe close races as non-fatal. +func isBenignPipeReadError(err error) bool { + if err == nil { + return false + } + if errors.Is(err, io.EOF) || errors.Is(err, os.ErrClosed) || errors.Is(err, io.ErrClosedPipe) { + return true + } + // Some platforms return wrapped messages that don't map cleanly to sentinel errors. + return strings.Contains(strings.ToLower(err.Error()), "file already closed") +} + // LogSender is a function type for sending logs type LogSender func(taskID int, level types.LogLevel, message string, stepName string) diff --git a/pkg/executils/exec_test.go b/pkg/executils/exec_test.go new file mode 100644 index 0000000..8ac1bb8 --- /dev/null +++ b/pkg/executils/exec_test.go @@ -0,0 +1,32 @@ +package executils + +import ( + "errors" + "io" + "os" + "testing" +) + +func TestIsBenignPipeReadError(t *testing.T) { + tests := []struct { + name string + err error + want bool + }{ + {name: "nil", err: nil, want: false}, + {name: "eof", err: io.EOF, want: true}, + {name: "closed", err: os.ErrClosed, want: true}, + {name: "closed pipe", err: io.ErrClosedPipe, want: true}, + {name: "wrapped closed", err: errors.New("read |0: file already closed"), want: true}, + {name: "other", err: errors.New("permission denied"), want: false}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got := isBenignPipeReadError(tc.err) + if got != tc.want { + t.Fatalf("got %v, want %v (err=%v)", got, tc.want, tc.err) + } + }) + } +} diff --git a/pkg/scripts/scripts/render_blender.py.template b/pkg/scripts/scripts/render_blender.py.template index 890f95d..437289c 100644 --- a/pkg/scripts/scripts/render_blender.py.template +++ b/pkg/scripts/scripts/render_blender.py.template @@ -95,31 +95,20 @@ if current_device: print(f"Blend file output format: {current_output_format}") # Override output format if specified -# The format file always takes precedence (it's written specifically for this job) +# Render output is EXR only and must remain linear for the encode pipeline (linear -> sRGB -> HLG). if output_format_override: - print(f"Overriding output format from '{current_output_format}' to '{output_format_override}'") - # Map common format names to Blender's format constants - # For video formats, we render as appropriate frame format first - format_to_use = output_format_override.upper() - if format_to_use in ['EXR_264_MP4', 'EXR_AV1_MP4', 'EXR_VP9_WEBM']: - format_to_use = 'EXR' # Render as EXR for EXR video formats - - format_map = { - 'PNG': 'PNG', - 'JPEG': 'JPEG', - 'JPG': 'JPEG', - 'EXR': 'OPEN_EXR', - 'OPEN_EXR': 'OPEN_EXR', - 'TARGA': 'TARGA', - 'TIFF': 'TIFF', - 'BMP': 'BMP', - } - blender_format = format_map.get(format_to_use, format_to_use) + print(f"Overriding output format from '{current_output_format}' to OPEN_EXR (always EXR for pipeline)") try: - scene.render.image_settings.file_format = blender_format - print(f"Successfully set output format to: {blender_format}") + scene.render.image_settings.file_format = 'OPEN_EXR' + # Lock output color space to linear (defense in depth; EXR is linear for encode pipeline) + if getattr(scene.render.image_settings, 'has_linear_colorspace', False) and hasattr(scene.render.image_settings, 'linear_colorspace_settings'): + try: + scene.render.image_settings.linear_colorspace_settings.name = 'Linear' + except Exception as ex: + print(f"Note: Could not set linear output: {ex}") + print("Successfully set output format to: OPEN_EXR") except Exception as e: - print(f"Warning: Could not set output format to {blender_format}: {e}") + print(f"Warning: Could not set output format to OPEN_EXR: {e}") print(f"Using blend file's format: {current_output_format}") else: print(f"Using blend file's output format: {current_output_format}") diff --git a/pkg/types/types.go b/pkg/types/types.go index b13a430..9789268 100644 --- a/pkg/types/types.go +++ b/pkg/types/types.go @@ -93,7 +93,8 @@ type Task struct { ID int64 `json:"id"` JobID int64 `json:"job_id"` RunnerID *int64 `json:"runner_id,omitempty"` - Frame int `json:"frame"` + Frame int `json:"frame"` // frame start (inclusive) for render tasks + FrameEnd *int `json:"frame_end,omitempty"` // frame end (inclusive); nil = single frame TaskType TaskType `json:"task_type"` Status TaskStatus `json:"status"` CurrentStep string `json:"current_step,omitempty"` @@ -138,8 +139,6 @@ type CreateJobRequest struct { UnhideObjects *bool `json:"unhide_objects,omitempty"` // Optional: Enable unhide tweaks for objects/collections EnableExecution *bool `json:"enable_execution,omitempty"` // Optional: Enable auto-execution in Blender (adds --enable-autoexec flag, defaults to false) BlenderVersion *string `json:"blender_version,omitempty"` // Optional: Override Blender version (e.g., "4.2" or "4.2.3") - PreserveHDR *bool `json:"preserve_hdr,omitempty"` // Optional: Preserve HDR range for EXR encoding (uses HLG with bt709 primaries) - PreserveAlpha *bool `json:"preserve_alpha,omitempty"` // Optional: Preserve alpha channel for EXR encoding (requires AV1 or VP9 codec) } // UpdateJobProgressRequest represents a request to update job progress @@ -234,8 +233,6 @@ type BlendMetadata struct { UnhideObjects *bool `json:"unhide_objects,omitempty"` // Enable unhide tweaks for objects/collections EnableExecution *bool `json:"enable_execution,omitempty"` // Enable auto-execution in Blender (adds --enable-autoexec flag, defaults to false) BlenderVersion string `json:"blender_version,omitempty"` // Detected or overridden Blender version (e.g., "4.2" or "4.2.3") - PreserveHDR *bool `json:"preserve_hdr,omitempty"` // Preserve HDR range for EXR encoding (uses HLG with bt709 primaries) - PreserveAlpha *bool `json:"preserve_alpha,omitempty"` // Preserve alpha channel for EXR encoding (requires AV1 or VP9 codec) } // MissingFilesInfo represents information about missing files/addons diff --git a/web/app.js b/web/app.js deleted file mode 100644 index e1569dc..0000000 --- a/web/app.js +++ /dev/null @@ -1,269 +0,0 @@ -const API_BASE = '/api'; - -let currentUser = null; - -// Check authentication on load -async function init() { - await checkAuth(); - setupEventListeners(); - if (currentUser) { - showMainPage(); - loadJobs(); - loadRunners(); - } else { - showLoginPage(); - } -} - -async function checkAuth() { - try { - const response = await fetch(`${API_BASE}/auth/me`); - if (response.ok) { - currentUser = await response.json(); - return true; - } - } catch (error) { - console.error('Auth check failed:', error); - } - return false; -} - -function showLoginPage() { - document.getElementById('login-page').classList.remove('hidden'); - document.getElementById('main-page').classList.add('hidden'); -} - -function showMainPage() { - document.getElementById('login-page').classList.add('hidden'); - document.getElementById('main-page').classList.remove('hidden'); - if (currentUser) { - document.getElementById('user-name').textContent = currentUser.name || currentUser.email; - } -} - -function setupEventListeners() { - // Navigation - document.querySelectorAll('.nav-btn').forEach(btn => { - btn.addEventListener('click', (e) => { - const page = e.target.dataset.page; - switchPage(page); - }); - }); - - // Logout - document.getElementById('logout-btn').addEventListener('click', async () => { - await fetch(`${API_BASE}/auth/logout`, { method: 'POST' }); - currentUser = null; - showLoginPage(); - }); - - // Job form - document.getElementById('job-form').addEventListener('submit', async (e) => { - e.preventDefault(); - await submitJob(); - }); -} - -function switchPage(page) { - document.querySelectorAll('.content-page').forEach(p => p.classList.add('hidden')); - document.querySelectorAll('.nav-btn').forEach(b => b.classList.remove('active')); - - document.getElementById(`${page}-page`).classList.remove('hidden'); - document.querySelector(`[data-page="${page}"]`).classList.add('active'); - - if (page === 'jobs') { - loadJobs(); - } else if (page === 'runners') { - loadRunners(); - } -} - -async function submitJob() { - const form = document.getElementById('job-form'); - const formData = new FormData(form); - - const jobData = { - name: document.getElementById('job-name').value, - frame_start: parseInt(document.getElementById('frame-start').value), - frame_end: parseInt(document.getElementById('frame-end').value), - output_format: document.getElementById('output-format').value, - }; - - try { - // Create job - const jobResponse = await fetch(`${API_BASE}/jobs`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(jobData), - }); - - if (!jobResponse.ok) { - throw new Error('Failed to create job'); - } - - const job = await jobResponse.json(); - - // Upload file - const fileInput = document.getElementById('blend-file'); - if (fileInput.files.length > 0) { - const fileFormData = new FormData(); - fileFormData.append('file', fileInput.files[0]); - - const fileResponse = await fetch(`${API_BASE}/jobs/${job.id}/upload`, { - method: 'POST', - body: fileFormData, - }); - - if (!fileResponse.ok) { - throw new Error('Failed to upload file'); - } - } - - alert('Job submitted successfully!'); - form.reset(); - switchPage('jobs'); - loadJobs(); - } catch (error) { - alert('Failed to submit job: ' + error.message); - } -} - -async function loadJobs() { - try { - const response = await fetch(`${API_BASE}/jobs`); - if (!response.ok) throw new Error('Failed to load jobs'); - - const jobs = await response.json(); - displayJobs(jobs); - } catch (error) { - console.error('Failed to load jobs:', error); - } -} - -function displayJobs(jobs) { - const container = document.getElementById('jobs-list'); - if (jobs.length === 0) { - container.innerHTML = '

No jobs yet. Submit a job to get started!

'; - return; - } - - container.innerHTML = jobs.map(job => ` -
-

${escapeHtml(job.name)}

-
- Frames: ${job.frame_start}-${job.frame_end} - Format: ${job.output_format} - Created: ${new Date(job.created_at).toLocaleString()} -
-
${job.status}
-
-
-
-
- - ${job.status === 'pending' || job.status === 'running' ? - `` : ''} -
-
- `).join(''); -} - -async function viewJob(jobId) { - try { - const response = await fetch(`${API_BASE}/jobs/${jobId}`); - if (!response.ok) throw new Error('Failed to load job'); - - const job = await response.json(); - - // Load files - const filesResponse = await fetch(`${API_BASE}/jobs/${jobId}/files`); - const files = filesResponse.ok ? await filesResponse.json() : []; - - const outputFiles = files.filter(f => f.file_type === 'output'); - if (outputFiles.length > 0) { - let message = 'Output files:\n'; - outputFiles.forEach(file => { - message += `- ${file.file_name}\n`; - }); - message += '\nWould you like to download them?'; - if (confirm(message)) { - outputFiles.forEach(file => { - window.open(`${API_BASE}/jobs/${jobId}/files/${file.id}/download`, '_blank'); - }); - } - } else { - alert(`Job: ${job.name}\nStatus: ${job.status}\nProgress: ${job.progress.toFixed(1)}%`); - } - } catch (error) { - alert('Failed to load job details: ' + error.message); - } -} - -async function cancelJob(jobId) { - if (!confirm('Are you sure you want to cancel this job?')) return; - - try { - const response = await fetch(`${API_BASE}/jobs/${jobId}`, { - method: 'DELETE', - }); - if (!response.ok) throw new Error('Failed to cancel job'); - loadJobs(); - } catch (error) { - alert('Failed to cancel job: ' + error.message); - } -} - -async function loadRunners() { - try { - const response = await fetch(`${API_BASE}/runners`); - if (!response.ok) throw new Error('Failed to load runners'); - - const runners = await response.json(); - displayRunners(runners); - } catch (error) { - console.error('Failed to load runners:', error); - } -} - -function displayRunners(runners) { - const container = document.getElementById('runners-list'); - if (runners.length === 0) { - container.innerHTML = '

No runners connected.

'; - return; - } - - container.innerHTML = runners.map(runner => { - const lastHeartbeat = new Date(runner.last_heartbeat); - const isOnline = (Date.now() - lastHeartbeat.getTime()) < 60000; // 1 minute - - return ` -
-

${escapeHtml(runner.name)}

-
- Hostname: ${escapeHtml(runner.hostname)} - Last heartbeat: ${lastHeartbeat.toLocaleString()} -
-
- ${isOnline ? 'Online' : 'Offline'} -
-
- `; - }).join(''); -} - -function escapeHtml(text) { - const div = document.createElement('div'); - div.textContent = text; - return div.innerHTML; -} - -// Auto-refresh jobs every 5 seconds -setInterval(() => { - if (currentUser && document.getElementById('jobs-page').classList.contains('hidden') === false) { - loadJobs(); - } -}, 5000); - -// Initialize on load -init(); - diff --git a/web/embed.go b/web/embed.go index f1786ba..fc7fa65 100644 --- a/web/embed.go +++ b/web/embed.go @@ -4,42 +4,26 @@ import ( "embed" "io/fs" "net/http" - "strings" ) -//go:embed dist/* -var distFS embed.FS +//go:embed templates templates/partials static +var uiFS embed.FS -// GetFileSystem returns an http.FileSystem for the embedded web UI files -func GetFileSystem() http.FileSystem { - subFS, err := fs.Sub(distFS, "dist") +// GetStaticFileSystem returns an http.FileSystem for embedded UI assets. +func GetStaticFileSystem() http.FileSystem { + subFS, err := fs.Sub(uiFS, "static") if err != nil { panic(err) } return http.FS(subFS) } -// SPAHandler returns an http.Handler that serves the embedded SPA -// It serves static files if they exist, otherwise falls back to index.html -func SPAHandler() http.Handler { - fsys := GetFileSystem() - fileServer := http.FileServer(fsys) - - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - path := r.URL.Path - - // Try to open the file - f, err := fsys.Open(strings.TrimPrefix(path, "/")) - if err != nil { - // File doesn't exist, serve index.html for SPA routing - r.URL.Path = "/" - fileServer.ServeHTTP(w, r) - return - } - f.Close() - - // File exists, serve it - fileServer.ServeHTTP(w, r) - }) +// StaticHandler serves /assets/* files from embedded static assets. +func StaticHandler() http.Handler { + return http.StripPrefix("/assets/", http.FileServer(GetStaticFileSystem())) } +// GetTemplateFS returns the embedded template filesystem. +func GetTemplateFS() fs.FS { + return uiFS +} diff --git a/web/index.html b/web/index.html deleted file mode 100644 index 50f6647..0000000 --- a/web/index.html +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - JiggaBlend - - -
- - - diff --git a/web/package-lock.json b/web/package-lock.json deleted file mode 100644 index f6bbe29..0000000 --- a/web/package-lock.json +++ /dev/null @@ -1,2677 +0,0 @@ -{ - "name": "jiggablend-web", - "version": "1.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "jiggablend-web", - "version": "1.0.0", - "dependencies": { - "react": "^18.2.0", - "react-dom": "^18.2.0" - }, - "devDependencies": { - "@vitejs/plugin-react": "^4.2.1", - "autoprefixer": "^10.4.16", - "postcss": "^8.4.32", - "tailwindcss": "^3.4.0", - "vite": "^7.2.4" - } - }, - "node_modules/@alloc/quick-lru": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", - "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", - "js-tokens": "^4.0.0", - "picocolors": "^1.1.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/compat-data": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz", - "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz", - "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.5", - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-module-transforms": "^7.28.3", - "@babel/helpers": "^7.28.4", - "@babel/parser": "^7.28.5", - "@babel/template": "^7.27.2", - "@babel/traverse": "^7.28.5", - "@babel/types": "^7.28.5", - "@jridgewell/remapping": "^2.3.5", - "convert-source-map": "^2.0.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/generator": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", - "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.28.5", - "@babel/types": "^7.28.5", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", - "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.27.2", - "@babel/helper-validator-option": "^7.27.1", - "browserslist": "^4.24.0", - "lru-cache": "^5.1.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-globals": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", - "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", - "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", - "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.28.3" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-plugin-utils": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", - "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-string-parser": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", - "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", - "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-option": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", - "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helpers": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", - "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", - "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.28.5" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-transform-react-jsx-self": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", - "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-react-jsx-source": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", - "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/template": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", - "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/parser": "^7.27.2", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/traverse": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", - "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.5", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.5", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.5", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/types": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", - "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.28.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.12.tgz", - "integrity": "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.12.tgz", - "integrity": "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.12.tgz", - "integrity": "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.12.tgz", - "integrity": "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.12.tgz", - "integrity": "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.12.tgz", - "integrity": "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.12.tgz", - "integrity": "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.12.tgz", - "integrity": "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.12.tgz", - "integrity": "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.12.tgz", - "integrity": "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.12.tgz", - "integrity": "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.12.tgz", - "integrity": "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.12.tgz", - "integrity": "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.12.tgz", - "integrity": "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.12.tgz", - "integrity": "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.12.tgz", - "integrity": "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.12.tgz", - "integrity": "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.12.tgz", - "integrity": "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.12.tgz", - "integrity": "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.12.tgz", - "integrity": "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.12.tgz", - "integrity": "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openharmony-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.12.tgz", - "integrity": "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.12.tgz", - "integrity": "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.12.tgz", - "integrity": "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.12.tgz", - "integrity": "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.12.tgz", - "integrity": "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.13", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", - "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0", - "@jridgewell/trace-mapping": "^0.3.24" - } - }, - "node_modules/@jridgewell/remapping": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", - "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", - "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", - "dev": true, - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.31", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", - "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@rolldown/pluginutils": { - "version": "1.0.0-beta.27", - "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", - "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", - "dev": true, - "license": "MIT" - }, - "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.53.3.tgz", - "integrity": "sha512-mRSi+4cBjrRLoaal2PnqH82Wqyb+d3HsPUN/W+WslCXsZsyHa9ZeQQX/pQsZaVIWDkPcpV6jJ+3KLbTbgnwv8w==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-android-arm64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.53.3.tgz", - "integrity": "sha512-CbDGaMpdE9sh7sCmTrTUyllhrg65t6SwhjlMJsLr+J8YjFuPmCEjbBSx4Z/e4SmDyH3aB5hGaJUP2ltV/vcs4w==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.53.3.tgz", - "integrity": "sha512-Nr7SlQeqIBpOV6BHHGZgYBuSdanCXuw09hon14MGOLGmXAFYjx1wNvquVPmpZnl0tLjg25dEdr4IQ6GgyToCUA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.53.3.tgz", - "integrity": "sha512-DZ8N4CSNfl965CmPktJ8oBnfYr3F8dTTNBQkRlffnUarJ2ohudQD17sZBa097J8xhQ26AwhHJ5mvUyQW8ddTsQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.53.3.tgz", - "integrity": "sha512-yMTrCrK92aGyi7GuDNtGn2sNW+Gdb4vErx4t3Gv/Tr+1zRb8ax4z8GWVRfr3Jw8zJWvpGHNpss3vVlbF58DZ4w==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.53.3.tgz", - "integrity": "sha512-lMfF8X7QhdQzseM6XaX0vbno2m3hlyZFhwcndRMw8fbAGUGL3WFMBdK0hbUBIUYcEcMhVLr1SIamDeuLBnXS+Q==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.53.3.tgz", - "integrity": "sha512-k9oD15soC/Ln6d2Wv/JOFPzZXIAIFLp6B+i14KhxAfnq76ajt0EhYc5YPeX6W1xJkAdItcVT+JhKl1QZh44/qw==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.53.3.tgz", - "integrity": "sha512-vTNlKq+N6CK/8UktsrFuc+/7NlEYVxgaEgRXVUVK258Z5ymho29skzW1sutgYjqNnquGwVUObAaxae8rZ6YMhg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.53.3.tgz", - "integrity": "sha512-RGrFLWgMhSxRs/EWJMIFM1O5Mzuz3Xy3/mnxJp/5cVhZ2XoCAxJnmNsEyeMJtpK+wu0FJFWz+QF4mjCA7AUQ3w==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.53.3.tgz", - "integrity": "sha512-kASyvfBEWYPEwe0Qv4nfu6pNkITLTb32p4yTgzFCocHnJLAHs+9LjUu9ONIhvfT/5lv4YS5muBHyuV84epBo/A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-loong64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.53.3.tgz", - "integrity": "sha512-JiuKcp2teLJwQ7vkJ95EwESWkNRFJD7TQgYmCnrPtlu50b4XvT5MOmurWNrCj3IFdyjBQ5p9vnrX4JM6I8OE7g==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.53.3.tgz", - "integrity": "sha512-EoGSa8nd6d3T7zLuqdojxC20oBfNT8nexBbB/rkxgKj5T5vhpAQKKnD+h3UkoMuTyXkP5jTjK/ccNRmQrPNDuw==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.53.3.tgz", - "integrity": "sha512-4s+Wped2IHXHPnAEbIB0YWBv7SDohqxobiiPA1FIWZpX+w9o2i4LezzH/NkFUl8LRci/8udci6cLq+jJQlh+0g==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.53.3.tgz", - "integrity": "sha512-68k2g7+0vs2u9CxDt5ktXTngsxOQkSEV/xBbwlqYcUrAVh6P9EgMZvFsnHy4SEiUl46Xf0IObWVbMvPrr2gw8A==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.53.3.tgz", - "integrity": "sha512-VYsFMpULAz87ZW6BVYw3I6sWesGpsP9OPcyKe8ofdg9LHxSbRMd7zrVrr5xi/3kMZtpWL/wC+UIJWJYVX5uTKg==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.53.3.tgz", - "integrity": "sha512-3EhFi1FU6YL8HTUJZ51imGJWEX//ajQPfqWLI3BQq4TlvHy4X0MOr5q3D2Zof/ka0d5FNdPwZXm3Yyib/UEd+w==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.53.3.tgz", - "integrity": "sha512-eoROhjcc6HbZCJr+tvVT8X4fW3/5g/WkGvvmwz/88sDtSJzO7r/blvoBDgISDiCjDRZmHpwud7h+6Q9JxFwq1Q==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.53.3.tgz", - "integrity": "sha512-OueLAWgrNSPGAdUdIjSWXw+u/02BRTcnfw9PN41D2vq/JSEPnJnVuBgw18VkN8wcd4fjUs+jFHVM4t9+kBSNLw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ] - }, - "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.53.3.tgz", - "integrity": "sha512-GOFuKpsxR/whszbF/bzydebLiXIHSgsEUp6M0JI8dWvi+fFa1TD6YQa4aSZHtpmh2/uAlj/Dy+nmby3TJ3pkTw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.53.3.tgz", - "integrity": "sha512-iah+THLcBJdpfZ1TstDFbKNznlzoxa8fmnFYK4V67HvmuNYkVdAywJSoteUszvBQ9/HqN2+9AZghbajMsFT+oA==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-x64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.53.3.tgz", - "integrity": "sha512-J9QDiOIZlZLdcot5NXEepDkstocktoVjkaKUtqzgzpt2yWjGlbYiKyp05rWwk4nypbYUNoFAztEgixoLaSETkg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.53.3.tgz", - "integrity": "sha512-UhTd8u31dXadv0MopwGgNOBpUVROFKWVQgAg5N1ESyCz8AuBcMqm4AuTjrwgQKGDfoFuz02EuMRHQIw/frmYKQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@types/babel__core": { - "version": "7.20.5", - "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", - "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.20.7", - "@babel/types": "^7.20.7", - "@types/babel__generator": "*", - "@types/babel__template": "*", - "@types/babel__traverse": "*" - } - }, - "node_modules/@types/babel__generator": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", - "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.0.0" - } - }, - "node_modules/@types/babel__template": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", - "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.1.0", - "@babel/types": "^7.0.0" - } - }, - "node_modules/@types/babel__traverse": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", - "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.28.2" - } - }, - "node_modules/@types/estree": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", - "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", - "dev": true, - "license": "MIT" - }, - "node_modules/@vitejs/plugin-react": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", - "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.28.0", - "@babel/plugin-transform-react-jsx-self": "^7.27.1", - "@babel/plugin-transform-react-jsx-source": "^7.27.1", - "@rolldown/pluginutils": "1.0.0-beta.27", - "@types/babel__core": "^7.20.5", - "react-refresh": "^0.17.0" - }, - "engines": { - "node": "^14.18.0 || >=16.0.0" - }, - "peerDependencies": { - "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" - } - }, - "node_modules/any-promise": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", - "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", - "dev": true, - "license": "MIT" - }, - "node_modules/anymatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", - "dev": true, - "license": "ISC", - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/arg": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", - "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", - "dev": true, - "license": "MIT" - }, - "node_modules/autoprefixer": { - "version": "10.4.22", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.22.tgz", - "integrity": "sha512-ARe0v/t9gO28Bznv6GgqARmVqcWOV3mfgUPn9becPHMiD3o9BwlRgaeccZnwTpZ7Zwqrm+c1sUSsMxIzQzc8Xg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/autoprefixer" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "browserslist": "^4.27.0", - "caniuse-lite": "^1.0.30001754", - "fraction.js": "^5.3.4", - "normalize-range": "^0.1.2", - "picocolors": "^1.1.1", - "postcss-value-parser": "^4.2.0" - }, - "bin": { - "autoprefixer": "bin/autoprefixer" - }, - "engines": { - "node": "^10 || ^12 || >=14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/baseline-browser-mapping": { - "version": "2.8.30", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.30.tgz", - "integrity": "sha512-aTUKW4ptQhS64+v2d6IkPzymEzzhw+G0bA1g3uBRV3+ntkH+svttKseW5IOR4Ed6NUVKqnY7qT3dKvzQ7io4AA==", - "dev": true, - "license": "Apache-2.0", - "bin": { - "baseline-browser-mapping": "dist/cli.js" - } - }, - "node_modules/binary-extensions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", - "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/browserslist": { - "version": "4.28.0", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.0.tgz", - "integrity": "sha512-tbydkR/CxfMwelN0vwdP/pLkDwyAASZ+VfWm4EOwlB6SWhx1sYnWLqo8N5j0rAzPfzfRaxt0mM/4wPU/Su84RQ==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "baseline-browser-mapping": "^2.8.25", - "caniuse-lite": "^1.0.30001754", - "electron-to-chromium": "^1.5.249", - "node-releases": "^2.0.27", - "update-browserslist-db": "^1.1.4" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/camelcase-css": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", - "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001756", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001756.tgz", - "integrity": "sha512-4HnCNKbMLkLdhJz3TToeVWHSnfJvPaq6vu/eRP0Ahub/07n484XHhBF5AJoSGHdVrS8tKFauUQz8Bp9P7LVx7A==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "CC-BY-4.0" - }, - "node_modules/chokidar": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", - "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", - "dev": true, - "license": "MIT", - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/chokidar/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/commander": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", - "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "dev": true, - "license": "MIT" - }, - "node_modules/cssesc": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", - "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", - "dev": true, - "license": "MIT", - "bin": { - "cssesc": "bin/cssesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/debug": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", - "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/didyoumean": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", - "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/dlv": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", - "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", - "dev": true, - "license": "MIT" - }, - "node_modules/electron-to-chromium": { - "version": "1.5.259", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.259.tgz", - "integrity": "sha512-I+oLXgpEJzD6Cwuwt1gYjxsDmu/S/Kd41mmLA3O+/uH2pFRO/DvOjUyGozL8j3KeLV6WyZ7ssPwELMsXCcsJAQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/esbuild": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.12.tgz", - "integrity": "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.12", - "@esbuild/android-arm": "0.25.12", - "@esbuild/android-arm64": "0.25.12", - "@esbuild/android-x64": "0.25.12", - "@esbuild/darwin-arm64": "0.25.12", - "@esbuild/darwin-x64": "0.25.12", - "@esbuild/freebsd-arm64": "0.25.12", - "@esbuild/freebsd-x64": "0.25.12", - "@esbuild/linux-arm": "0.25.12", - "@esbuild/linux-arm64": "0.25.12", - "@esbuild/linux-ia32": "0.25.12", - "@esbuild/linux-loong64": "0.25.12", - "@esbuild/linux-mips64el": "0.25.12", - "@esbuild/linux-ppc64": "0.25.12", - "@esbuild/linux-riscv64": "0.25.12", - "@esbuild/linux-s390x": "0.25.12", - "@esbuild/linux-x64": "0.25.12", - "@esbuild/netbsd-arm64": "0.25.12", - "@esbuild/netbsd-x64": "0.25.12", - "@esbuild/openbsd-arm64": "0.25.12", - "@esbuild/openbsd-x64": "0.25.12", - "@esbuild/openharmony-arm64": "0.25.12", - "@esbuild/sunos-x64": "0.25.12", - "@esbuild/win32-arm64": "0.25.12", - "@esbuild/win32-ia32": "0.25.12", - "@esbuild/win32-x64": "0.25.12" - } - }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/fast-glob": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", - "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-glob/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/fastq": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", - "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fraction.js": { - "version": "5.3.4", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", - "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "*" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/rawify" - } - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "license": "MIT", - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "dev": true, - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/jiti": { - "version": "1.21.7", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", - "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", - "dev": true, - "license": "MIT", - "bin": { - "jiti": "bin/jiti.js" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "license": "MIT" - }, - "node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", - "dev": true, - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true, - "license": "MIT", - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/lilconfig": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", - "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/antonk52" - } - }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", - "dev": true, - "license": "MIT" - }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "license": "MIT", - "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" - } - }, - "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^3.0.2" - } - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true, - "license": "MIT" - }, - "node_modules/mz": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", - "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "any-promise": "^1.0.0", - "object-assign": "^4.0.1", - "thenify-all": "^1.0.0" - } - }, - "node_modules/nanoid": { - "version": "3.3.11", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/node-releases": { - "version": "2.0.27", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", - "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", - "dev": true, - "license": "MIT" - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/normalize-range": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-hash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", - "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true, - "license": "MIT" - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "dev": true, - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pirates": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", - "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/postcss": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "nanoid": "^3.3.11", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-import": { - "version": "15.1.0", - "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", - "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", - "dev": true, - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.0.0", - "read-cache": "^1.0.0", - "resolve": "^1.1.7" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "postcss": "^8.0.0" - } - }, - "node_modules/postcss-js": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.1.0.tgz", - "integrity": "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "camelcase-css": "^2.0.1" - }, - "engines": { - "node": "^12 || ^14 || >= 16" - }, - "peerDependencies": { - "postcss": "^8.4.21" - } - }, - "node_modules/postcss-load-config": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz", - "integrity": "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "lilconfig": "^3.1.1" - }, - "engines": { - "node": ">= 18" - }, - "peerDependencies": { - "jiti": ">=1.21.0", - "postcss": ">=8.0.9", - "tsx": "^4.8.1", - "yaml": "^2.4.2" - }, - "peerDependenciesMeta": { - "jiti": { - "optional": true - }, - "postcss": { - "optional": true - }, - "tsx": { - "optional": true - }, - "yaml": { - "optional": true - } - } - }, - "node_modules/postcss-nested": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", - "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^6.1.1" - }, - "engines": { - "node": ">=12.0" - }, - "peerDependencies": { - "postcss": "^8.2.14" - } - }, - "node_modules/postcss-selector-parser": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", - "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-value-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", - "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/react": { - "version": "18.3.1", - "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", - "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", - "license": "MIT", - "dependencies": { - "loose-envify": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-dom": { - "version": "18.3.1", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", - "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", - "license": "MIT", - "dependencies": { - "loose-envify": "^1.1.0", - "scheduler": "^0.23.2" - }, - "peerDependencies": { - "react": "^18.3.1" - } - }, - "node_modules/react-refresh": { - "version": "0.17.0", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", - "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/read-cache": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", - "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", - "dev": true, - "license": "MIT", - "dependencies": { - "pify": "^2.3.0" - } - }, - "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "dev": true, - "license": "MIT", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/resolve": { - "version": "1.22.11", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", - "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/reusify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", - "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rollup": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.53.3.tgz", - "integrity": "sha512-w8GmOxZfBmKknvdXU1sdM9NHcoQejwF/4mNgj2JuEEdRaHwwF12K7e9eXn1nLZ07ad+du76mkVsyeb2rKGllsA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "1.0.8" - }, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" - }, - "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.53.3", - "@rollup/rollup-android-arm64": "4.53.3", - "@rollup/rollup-darwin-arm64": "4.53.3", - "@rollup/rollup-darwin-x64": "4.53.3", - "@rollup/rollup-freebsd-arm64": "4.53.3", - "@rollup/rollup-freebsd-x64": "4.53.3", - "@rollup/rollup-linux-arm-gnueabihf": "4.53.3", - "@rollup/rollup-linux-arm-musleabihf": "4.53.3", - "@rollup/rollup-linux-arm64-gnu": "4.53.3", - "@rollup/rollup-linux-arm64-musl": "4.53.3", - "@rollup/rollup-linux-loong64-gnu": "4.53.3", - "@rollup/rollup-linux-ppc64-gnu": "4.53.3", - "@rollup/rollup-linux-riscv64-gnu": "4.53.3", - "@rollup/rollup-linux-riscv64-musl": "4.53.3", - "@rollup/rollup-linux-s390x-gnu": "4.53.3", - "@rollup/rollup-linux-x64-gnu": "4.53.3", - "@rollup/rollup-linux-x64-musl": "4.53.3", - "@rollup/rollup-openharmony-arm64": "4.53.3", - "@rollup/rollup-win32-arm64-msvc": "4.53.3", - "@rollup/rollup-win32-ia32-msvc": "4.53.3", - "@rollup/rollup-win32-x64-gnu": "4.53.3", - "@rollup/rollup-win32-x64-msvc": "4.53.3", - "fsevents": "~2.3.2" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/scheduler": { - "version": "0.23.2", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", - "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", - "license": "MIT", - "dependencies": { - "loose-envify": "^1.1.0" - } - }, - "node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/source-map-js": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", - "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/sucrase": { - "version": "3.35.1", - "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.1.tgz", - "integrity": "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.2", - "commander": "^4.0.0", - "lines-and-columns": "^1.1.6", - "mz": "^2.7.0", - "pirates": "^4.0.1", - "tinyglobby": "^0.2.11", - "ts-interface-checker": "^0.1.9" - }, - "bin": { - "sucrase": "bin/sucrase", - "sucrase-node": "bin/sucrase-node" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/tailwindcss": { - "version": "3.4.18", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.18.tgz", - "integrity": "sha512-6A2rnmW5xZMdw11LYjhcI5846rt9pbLSabY5XPxo+XWdxwZaFEn47Go4NzFiHu9sNNmr/kXivP1vStfvMaK1GQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@alloc/quick-lru": "^5.2.0", - "arg": "^5.0.2", - "chokidar": "^3.6.0", - "didyoumean": "^1.2.2", - "dlv": "^1.1.3", - "fast-glob": "^3.3.2", - "glob-parent": "^6.0.2", - "is-glob": "^4.0.3", - "jiti": "^1.21.7", - "lilconfig": "^3.1.3", - "micromatch": "^4.0.8", - "normalize-path": "^3.0.0", - "object-hash": "^3.0.0", - "picocolors": "^1.1.1", - "postcss": "^8.4.47", - "postcss-import": "^15.1.0", - "postcss-js": "^4.0.1", - "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", - "postcss-nested": "^6.2.0", - "postcss-selector-parser": "^6.1.2", - "resolve": "^1.22.8", - "sucrase": "^3.35.0" - }, - "bin": { - "tailwind": "lib/cli.js", - "tailwindcss": "lib/cli.js" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/thenify": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", - "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", - "dev": true, - "license": "MIT", - "dependencies": { - "any-promise": "^1.0.0" - } - }, - "node_modules/thenify-all": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", - "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", - "dev": true, - "license": "MIT", - "dependencies": { - "thenify": ">= 3.1.0 < 4" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/tinyglobby": { - "version": "0.2.15", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", - "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "fdir": "^6.5.0", - "picomatch": "^4.0.3" - }, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/SuperchupuDev" - } - }, - "node_modules/tinyglobby/node_modules/fdir": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", - "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "picomatch": "^3 || ^4" - }, - "peerDependenciesMeta": { - "picomatch": { - "optional": true - } - } - }, - "node_modules/tinyglobby/node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/ts-interface-checker": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", - "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/update-browserslist-db": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.4.tgz", - "integrity": "sha512-q0SPT4xyU84saUX+tomz1WLkxUbuaJnR1xWt17M7fJtEJigJeWUNGUqrauFXsHnqev9y9JTRGwk13tFBuKby4A==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "escalade": "^3.2.0", - "picocolors": "^1.1.1" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "dev": true, - "license": "MIT" - }, - "node_modules/vite": { - "version": "7.2.4", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.2.4.tgz", - "integrity": "sha512-NL8jTlbo0Tn4dUEXEsUg8KeyG/Lkmc4Fnzb8JXN/Ykm9G4HNImjtABMJgkQoVjOBN/j2WAwDTRytdqJbZsah7w==", - "dev": true, - "license": "MIT", - "dependencies": { - "esbuild": "^0.25.0", - "fdir": "^6.5.0", - "picomatch": "^4.0.3", - "postcss": "^8.5.6", - "rollup": "^4.43.0", - "tinyglobby": "^0.2.15" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": "^20.19.0 || >=22.12.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^20.19.0 || >=22.12.0", - "jiti": ">=1.21.0", - "less": "^4.0.0", - "lightningcss": "^1.21.0", - "sass": "^1.70.0", - "sass-embedded": "^1.70.0", - "stylus": ">=0.54.8", - "sugarss": "^5.0.0", - "terser": "^5.16.0", - "tsx": "^4.8.1", - "yaml": "^2.4.2" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "jiti": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - }, - "tsx": { - "optional": true - }, - "yaml": { - "optional": true - } - } - }, - "node_modules/vite/node_modules/fdir": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", - "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "picomatch": "^3 || ^4" - }, - "peerDependenciesMeta": { - "picomatch": { - "optional": true - } - } - }, - "node_modules/vite/node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true, - "license": "ISC" - } - } -} diff --git a/web/package.json b/web/package.json deleted file mode 100644 index 3a14bd3..0000000 --- a/web/package.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "name": "jiggablend-web", - "version": "1.0.0", - "type": "module", - "scripts": { - "dev": "vite", - "build": "vite build", - "preview": "vite preview" - }, - "dependencies": { - "react": "^18.2.0", - "react-dom": "^18.2.0" - }, - "devDependencies": { - "@vitejs/plugin-react": "^4.2.1", - "autoprefixer": "^10.4.16", - "postcss": "^8.4.32", - "tailwindcss": "^3.4.0", - "vite": "^7.2.4" - } -} diff --git a/web/postcss.config.js b/web/postcss.config.js deleted file mode 100644 index b4a6220..0000000 --- a/web/postcss.config.js +++ /dev/null @@ -1,7 +0,0 @@ -export default { - plugins: { - tailwindcss: {}, - autoprefixer: {}, - }, -} - diff --git a/web/src/App.jsx b/web/src/App.jsx deleted file mode 100644 index abd3d76..0000000 --- a/web/src/App.jsx +++ /dev/null @@ -1,50 +0,0 @@ -import { useState, useEffect, useMemo } from 'react'; -import { useAuth } from './hooks/useAuth'; -import Login from './components/Login'; -import Layout from './components/Layout'; -import JobList from './components/JobList'; -import JobSubmission from './components/JobSubmission'; -import AdminPanel from './components/AdminPanel'; -import ErrorBoundary from './components/ErrorBoundary'; -import LoadingSpinner from './components/LoadingSpinner'; -import './styles/index.css'; - -function App() { - const { user, loading, refresh } = useAuth(); - const [activeTab, setActiveTab] = useState('jobs'); - - // Memoize login component to ensure it's ready immediately - const loginComponent = useMemo(() => , []); - - if (loading) { - return ( -
- -
- ); - } - - if (!user) { - return loginComponent; - } - - // Wrapper to change tabs - only check auth on mount, not on every navigation - const handleTabChange = (newTab) => { - setActiveTab(newTab); - }; - - return ( - - - {activeTab === 'jobs' && } - {activeTab === 'submit' && ( - handleTabChange('jobs')} /> - )} - {activeTab === 'admin' && } - - - ); -} - -export default App; - diff --git a/web/src/components/AdminPanel.jsx b/web/src/components/AdminPanel.jsx deleted file mode 100644 index 150e73d..0000000 --- a/web/src/components/AdminPanel.jsx +++ /dev/null @@ -1,810 +0,0 @@ -import { useState, useEffect, useRef } from 'react'; -import { admin, jobs, normalizeArrayResponse } from '../utils/api'; -import { wsManager } from '../utils/websocket'; -import UserJobs from './UserJobs'; -import PasswordChange from './PasswordChange'; -import LoadingSpinner from './LoadingSpinner'; - -export default function AdminPanel() { - const [activeSection, setActiveSection] = useState('api-keys'); - const [apiKeys, setApiKeys] = useState([]); - const [runners, setRunners] = useState([]); - const [users, setUsers] = useState([]); - const [loading, setLoading] = useState(false); - const [newAPIKeyName, setNewAPIKeyName] = useState(''); - const [newAPIKeyDescription, setNewAPIKeyDescription] = useState(''); - const [newAPIKeyScope, setNewAPIKeyScope] = useState('user'); // Default to user scope - const [newAPIKey, setNewAPIKey] = useState(null); - const [selectedUser, setSelectedUser] = useState(null); - const [registrationEnabled, setRegistrationEnabled] = useState(true); - const [passwordChangeUser, setPasswordChangeUser] = useState(null); - const listenerIdRef = useRef(null); // Listener ID for shared WebSocket - const subscribedChannelsRef = useRef(new Set()); // Track confirmed subscribed channels - const pendingSubscriptionsRef = useRef(new Set()); // Track pending subscriptions (waiting for confirmation) - - // Connect to shared WebSocket on mount - useEffect(() => { - listenerIdRef.current = wsManager.subscribe('adminpanel', { - open: () => { - console.log('AdminPanel: Shared WebSocket connected'); - // Subscribe to runners if already viewing runners section - if (activeSection === 'runners') { - subscribeToRunners(); - } - }, - message: (data) => { - // Handle subscription responses - update both local refs and wsManager - if (data.type === 'subscribed' && data.channel) { - pendingSubscriptionsRef.current.delete(data.channel); - subscribedChannelsRef.current.add(data.channel); - wsManager.confirmSubscription(data.channel); - console.log('Successfully subscribed to channel:', data.channel); - } else if (data.type === 'subscription_error' && data.channel) { - pendingSubscriptionsRef.current.delete(data.channel); - subscribedChannelsRef.current.delete(data.channel); - wsManager.failSubscription(data.channel); - console.error('Subscription failed for channel:', data.channel, data.error); - } - - // Handle runners channel messages - if (data.channel === 'runners' && data.type === 'runner_status') { - // Update runner in list - setRunners(prev => { - const index = prev.findIndex(r => r.id === data.runner_id); - if (index >= 0 && data.data) { - const updated = [...prev]; - updated[index] = { ...updated[index], ...data.data }; - return updated; - } - return prev; - }); - } - }, - error: (error) => { - console.error('AdminPanel: Shared WebSocket error:', error); - }, - close: (event) => { - console.log('AdminPanel: Shared WebSocket closed:', event); - subscribedChannelsRef.current.clear(); - pendingSubscriptionsRef.current.clear(); - } - }); - - // Ensure connection is established - wsManager.connect(); - - return () => { - // Unsubscribe from all channels before unmounting - unsubscribeFromRunners(); - if (listenerIdRef.current) { - wsManager.unsubscribe(listenerIdRef.current); - listenerIdRef.current = null; - } - }; - }, []); - - const subscribeToRunners = () => { - const channel = 'runners'; - // Don't subscribe if already subscribed or pending - if (subscribedChannelsRef.current.has(channel) || pendingSubscriptionsRef.current.has(channel)) { - return; - } - wsManager.subscribeToChannel(channel); - subscribedChannelsRef.current.add(channel); - pendingSubscriptionsRef.current.add(channel); - console.log('Subscribing to runners channel'); - }; - - const unsubscribeFromRunners = () => { - const channel = 'runners'; - if (!subscribedChannelsRef.current.has(channel)) { - return; // Not subscribed - } - wsManager.unsubscribeFromChannel(channel); - subscribedChannelsRef.current.delete(channel); - pendingSubscriptionsRef.current.delete(channel); - console.log('Unsubscribed from runners channel'); - }; - - useEffect(() => { - if (activeSection === 'api-keys') { - loadAPIKeys(); - unsubscribeFromRunners(); - } else if (activeSection === 'runners') { - loadRunners(); - subscribeToRunners(); - } else if (activeSection === 'users') { - loadUsers(); - unsubscribeFromRunners(); - } else if (activeSection === 'settings') { - loadSettings(); - unsubscribeFromRunners(); - } - }, [activeSection]); - - const loadAPIKeys = async () => { - setLoading(true); - try { - const data = await admin.listAPIKeys(); - setApiKeys(normalizeArrayResponse(data)); - } catch (error) { - console.error('Failed to load API keys:', error); - setApiKeys([]); - alert('Failed to load API keys'); - } finally { - setLoading(false); - } - }; - - const loadRunners = async () => { - setLoading(true); - try { - const data = await admin.listRunners(); - setRunners(normalizeArrayResponse(data)); - } catch (error) { - console.error('Failed to load runners:', error); - setRunners([]); - alert('Failed to load runners'); - } finally { - setLoading(false); - } - }; - - const loadUsers = async () => { - setLoading(true); - try { - const data = await admin.listUsers(); - setUsers(normalizeArrayResponse(data)); - } catch (error) { - console.error('Failed to load users:', error); - setUsers([]); - alert('Failed to load users'); - } finally { - setLoading(false); - } - }; - - const loadSettings = async () => { - setLoading(true); - try { - const data = await admin.getRegistrationEnabled(); - setRegistrationEnabled(data.enabled); - } catch (error) { - console.error('Failed to load settings:', error); - alert('Failed to load settings'); - } finally { - setLoading(false); - } - }; - - const handleToggleRegistration = async () => { - const newValue = !registrationEnabled; - setLoading(true); - try { - await admin.setRegistrationEnabled(newValue); - setRegistrationEnabled(newValue); - alert(`Registration ${newValue ? 'enabled' : 'disabled'}`); - } catch (error) { - console.error('Failed to update registration setting:', error); - alert('Failed to update registration setting'); - } finally { - setLoading(false); - } - }; - - const generateAPIKey = async () => { - if (!newAPIKeyName.trim()) { - alert('API key name is required'); - return; - } - - setLoading(true); - try { - const data = await admin.generateAPIKey(newAPIKeyName.trim(), newAPIKeyDescription.trim() || undefined, newAPIKeyScope); - setNewAPIKey(data); - setNewAPIKeyName(''); - setNewAPIKeyDescription(''); - setNewAPIKeyScope('user'); - await loadAPIKeys(); - } catch (error) { - console.error('Failed to generate API key:', error); - alert('Failed to generate API key'); - } finally { - setLoading(false); - } - }; - - const [deletingKeyId, setDeletingKeyId] = useState(null); - const [deletingRunnerId, setDeletingRunnerId] = useState(null); - - const revokeAPIKey = async (keyId) => { - if (!confirm('Are you sure you want to delete this API key? This action cannot be undone.')) { - return; - } - setDeletingKeyId(keyId); - try { - await admin.deleteAPIKey(keyId); - await loadAPIKeys(); - } catch (error) { - console.error('Failed to delete API key:', error); - alert('Failed to delete API key'); - } finally { - setDeletingKeyId(null); - } - }; - - - const deleteRunner = async (runnerId) => { - if (!confirm('Are you sure you want to delete this runner?')) { - return; - } - setDeletingRunnerId(runnerId); - try { - await admin.deleteRunner(runnerId); - await loadRunners(); - } catch (error) { - console.error('Failed to delete runner:', error); - alert('Failed to delete runner'); - } finally { - setDeletingRunnerId(null); - } - }; - - const copyToClipboard = (text) => { - navigator.clipboard.writeText(text); - alert('Copied to clipboard!'); - }; - - const isAPIKeyActive = (isActive) => { - return isActive; - }; - - return ( -
-
- - - - -
- - {activeSection === 'api-keys' && ( -
-
-

Generate API Key

-
-
-
- - setNewAPIKeyName(e.target.value)} - placeholder="e.g., production-runner-01" - className="w-full px-3 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent" - required - /> -
-
- - setNewAPIKeyDescription(e.target.value)} - placeholder="Optional description" - className="w-full px-3 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - -
-
-
- -
-
- - {newAPIKey && ( -
-

New API Key Generated:

-
-
- - {newAPIKey.key} - - -
-
-

Name: {newAPIKey.name}

- {newAPIKey.description &&

Description: {newAPIKey.description}

} -
-

- ⚠️ Save this API key securely. It will not be shown again. -

-
-
- )} -
- -
-

API Keys

- {loading ? ( - - ) : !apiKeys || apiKeys.length === 0 ? ( -

No API keys generated yet.

- ) : ( -
- - - - - - - - - - - - - {apiKeys.map((key) => { - return ( - - - - - - - - - ); - })} - -
- Name - - Scope - - Key Prefix - - Status - - Created At - - Actions -
-
-
{key.name}
- {key.description && ( -
{key.description}
- )} -
-
- - {key.scope === 'manager' ? 'Manager' : 'User'} - - - - {key.key_prefix} - - - {!key.is_active ? ( - - Revoked - - ) : ( - - Active - - )} - - {new Date(key.created_at).toLocaleString()} - - -
-
- )} -
-
- )} - - {activeSection === 'runners' && ( -
-

Runner Management

- {loading ? ( - - ) : !runners || runners.length === 0 ? ( -

No runners registered.

- ) : ( -
- - - - - - - - - - - - - - - {runners.map((runner) => { - const isOnline = new Date(runner.last_heartbeat) > new Date(Date.now() - 60000); - return ( - - - - - - - - - - - ); - })} - -
- Name - - Hostname - - Status - - API Key - - Priority - - Capabilities - - Last Heartbeat - - Actions -
- {runner.name} - - {runner.hostname} - - - {isOnline ? 'Online' : 'Offline'} - - - - jk_r{runner.id % 10}_... - - - {runner.priority} - - {runner.capabilities ? ( - (() => { - try { - const caps = JSON.parse(runner.capabilities); - const enabled = Object.entries(caps) - .filter(([_, v]) => v) - .map(([k, _]) => k) - .join(', '); - return enabled || 'None'; - } catch { - return runner.capabilities; - } - })() - ) : ( - 'None' - )} - - {new Date(runner.last_heartbeat).toLocaleString()} - - -
-
- )} -
- )} - - {activeSection === 'change-password' && passwordChangeUser && ( -
- - { - setPasswordChangeUser(null); - setActiveSection('users'); - }} - /> -
- )} - - {activeSection === 'users' && ( -
- {selectedUser ? ( - setSelectedUser(null)} - /> - ) : ( -
-

User Management

- {loading ? ( - - ) : !users || users.length === 0 ? ( -

No users found.

- ) : ( -
- - - - - - - - - - - - - - {users.map((user) => ( - - - - - - - - - - ))} - -
- Email - - Name - - Provider - - Admin - - Jobs - - Created - - Actions -
- {user.email} - - {user.name} - - {user.oauth_provider} - -
- {user.is_admin ? ( - - Admin - - ) : ( - - User - - )} - -
-
- {user.job_count || 0} - - {new Date(user.created_at).toLocaleString()} - -
- - {user.oauth_provider === 'local' && ( - - )} -
-
-
- )} -
- )} -
- )} - - {activeSection === 'settings' && ( -
- -
-

System Settings

- -
-
-
-

User Registration

-

- {registrationEnabled - ? 'New users can register via OAuth or local login' - : 'Registration is disabled. Only existing users can log in.'} -

-
-
- - {registrationEnabled ? 'Enabled' : 'Disabled'} - - -
-
-
-
-
- )} -
- ); -} - diff --git a/web/src/components/ErrorBoundary.jsx b/web/src/components/ErrorBoundary.jsx deleted file mode 100644 index f090aae..0000000 --- a/web/src/components/ErrorBoundary.jsx +++ /dev/null @@ -1,41 +0,0 @@ -import React from 'react'; - -class ErrorBoundary extends React.Component { - constructor(props) { - super(props); - this.state = { hasError: false, error: null }; - } - - static getDerivedStateFromError(error) { - return { hasError: true, error }; - } - - componentDidCatch(error, errorInfo) { - console.error('ErrorBoundary caught an error:', error, errorInfo); - } - - render() { - if (this.state.hasError) { - return ( -
-

Something went wrong

-

{this.state.error?.message || 'An unexpected error occurred'}

- -
- ); - } - - return this.props.children; - } -} - -export default ErrorBoundary; - diff --git a/web/src/components/ErrorMessage.jsx b/web/src/components/ErrorMessage.jsx deleted file mode 100644 index c7bc9c6..0000000 --- a/web/src/components/ErrorMessage.jsx +++ /dev/null @@ -1,26 +0,0 @@ -import React from 'react'; - -/** - * Shared ErrorMessage component for consistent error display - * Sanitizes error messages to prevent XSS - */ -export default function ErrorMessage({ error, className = '' }) { - if (!error) return null; - - // Sanitize error message - escape HTML entities - const sanitize = (text) => { - const div = document.createElement('div'); - div.textContent = text; - return div.innerHTML; - }; - - const sanitizedError = typeof error === 'string' ? sanitize(error) : sanitize(error.message || 'An error occurred'); - - return ( -
-

Error:

-

-

- ); -} - diff --git a/web/src/components/FileExplorer.jsx b/web/src/components/FileExplorer.jsx deleted file mode 100644 index ade6215..0000000 --- a/web/src/components/FileExplorer.jsx +++ /dev/null @@ -1,191 +0,0 @@ -import { useState } from 'react'; - -export default function FileExplorer({ files, onDownload, onPreview, onVideoPreview, isImageFile }) { - const [expandedPaths, setExpandedPaths] = useState(new Set()); // Root folder collapsed by default - - // Build directory tree from file paths - const buildTree = (files) => { - const tree = {}; - - files.forEach(file => { - const path = file.file_name; - // Handle both paths with slashes and single filenames - const parts = path.includes('/') ? path.split('/').filter(p => p) : [path]; - - // If it's a single file at root (no slashes), treat it specially - if (parts.length === 1 && !path.includes('/')) { - tree[parts[0]] = { - name: parts[0], - isFile: true, - file: file, - children: {}, - path: parts[0] - }; - return; - } - - let current = tree; - parts.forEach((part, index) => { - if (!current[part]) { - current[part] = { - name: part, - isFile: index === parts.length - 1, - file: index === parts.length - 1 ? file : null, - children: {}, - path: parts.slice(0, index + 1).join('/') - }; - } - current = current[part].children; - }); - }); - - return tree; - }; - - const togglePath = (path) => { - const newExpanded = new Set(expandedPaths); - if (newExpanded.has(path)) { - newExpanded.delete(path); - } else { - newExpanded.add(path); - } - setExpandedPaths(newExpanded); - }; - - const renderTree = (node, level = 0, parentPath = '') => { - const items = Object.values(node).sort((a, b) => { - // Directories first, then files - if (a.isFile !== b.isFile) { - return a.isFile ? 1 : -1; - } - return a.name.localeCompare(b.name); - }); - - return items.map((item) => { - const fullPath = parentPath ? `${parentPath}/${item.name}` : item.name; - const isExpanded = expandedPaths.has(fullPath); - const indent = level * 20; - - if (item.isFile) { - const file = item.file; - const isImage = isImageFile && isImageFile(file.file_name); - const isVideo = file.file_name.toLowerCase().endsWith('.mp4'); - const sizeMB = (file.file_size / 1024 / 1024).toFixed(2); - const isArchive = file.file_name.endsWith('.tar') || file.file_name.endsWith('.zip'); - - return ( -
-
- {isArchive ? '📦' : isVideo ? '🎬' : '📄'} - - {item.name} - - {sizeMB} MB -
-
- {isVideo && onVideoPreview && ( - - )} - {isImage && onPreview && ( - - )} - {onDownload && file.id && ( - - )} -
-
- ); - } else { - const hasChildren = Object.keys(item.children).length > 0; - return ( -
-
hasChildren && togglePath(fullPath)} - > - - {hasChildren ? (isExpanded ? '▼' : '▶') : '•'} - - - {hasChildren ? (isExpanded ? '📂' : '📁') : '📁'} - - {item.name} - {hasChildren && ( - - ({Object.keys(item.children).length}) - - )} -
- {hasChildren && isExpanded && ( -
- {renderTree(item.children, level + 1, fullPath)} -
- )} -
- ); - } - }); - }; - - const tree = buildTree(files); - - if (Object.keys(tree).length === 0) { - return ( -
- No files -
- ); - } - - // Wrap tree in a root folder - const rootExpanded = expandedPaths.has(''); - - return ( -
-
-
-
togglePath('')} - > - - {rootExpanded ? '▼' : '▶'} - - - {rootExpanded ? '📂' : '📁'} - - Files - - ({Object.keys(tree).length}) - -
- {rootExpanded && ( -
- {renderTree(tree)} -
- )} -
-
-
- ); -} - diff --git a/web/src/components/JobDetails.jsx b/web/src/components/JobDetails.jsx deleted file mode 100644 index d1206a3..0000000 --- a/web/src/components/JobDetails.jsx +++ /dev/null @@ -1,1207 +0,0 @@ -import { useState, useEffect, useRef } from 'react'; -import { jobs, REQUEST_SUPERSEDED } from '../utils/api'; -import { wsManager } from '../utils/websocket'; -import VideoPlayer from './VideoPlayer'; -import FileExplorer from './FileExplorer'; -import ErrorMessage from './ErrorMessage'; -import LoadingSpinner from './LoadingSpinner'; - -export default function JobDetails({ job, onClose, onUpdate }) { - const [jobDetails, setJobDetails] = useState(job); - const [files, setFiles] = useState([]); - const [contextFiles, setContextFiles] = useState([]); - const [tasks, setTasks] = useState([]); - const [loading, setLoading] = useState(true); - // Store steps and logs per task: { taskId: { steps: [], logs: [] } } - const [taskData, setTaskData] = useState({}); - // Track which tasks are expanded - const [expandedTasks, setExpandedTasks] = useState(new Set()); - const [streaming, setStreaming] = useState(false); - const [previewImage, setPreviewImage] = useState(null); // { url, fileName } or null - const [previewVideo, setPreviewVideo] = useState(null); // { url, fileName } or null - const listenerIdRef = useRef(null); // Listener ID for shared WebSocket - const subscribedChannelsRef = useRef(new Set()); // Track confirmed subscribed channels - const pendingSubscriptionsRef = useRef(new Set()); // Track pending subscriptions (waiting for confirmation) - const logContainerRefs = useRef({}); // Refs for each task's log container - const shouldAutoScrollRefs = useRef({}); // Auto-scroll state per task - const abortControllerRef = useRef(null); // AbortController for HTTP requests - - // Sync job prop to state when it changes - useEffect(() => { - if (job) { - setJobDetails(job); - } - }, [job?.id, job?.status, job?.progress]); - - useEffect(() => { - // Guard against undefined job or job.id - if (!job || !job.id) { - console.warn('JobDetails: job or job.id is undefined, skipping initialization'); - return; - } - - // Create new AbortController for this effect - abortControllerRef.current = new AbortController(); - - loadDetails(); - // Use shared WebSocket manager for real-time updates - listenerIdRef.current = wsManager.subscribe(`jobdetails_${job.id}`, { - open: () => { - console.log('JobDetails: Shared WebSocket connected for job', job.id); - // Subscribe to job channel - subscribe(`job:${job.id}`); - }, - message: (data) => { - handleWebSocketMessage(data); - }, - error: (error) => { - console.error('JobDetails: Shared WebSocket error:', error); - }, - close: (event) => { - console.log('JobDetails: Shared WebSocket closed:', event); - subscribedChannelsRef.current.clear(); - pendingSubscriptionsRef.current.clear(); - } - }); - - // Ensure connection is established - wsManager.connect(); - - return () => { - // Cancel any pending HTTP requests - if (abortControllerRef.current) { - abortControllerRef.current.abort(); - abortControllerRef.current = null; - } - // Unsubscribe from all channels - unsubscribeAll(); - if (listenerIdRef.current) { - wsManager.unsubscribe(listenerIdRef.current); - listenerIdRef.current = null; - } - }; - }, [job?.id]); - - useEffect(() => { - // Update log subscriptions based on expanded tasks - updateLogSubscriptions(); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [expandedTasks, tasks.length, jobDetails.status]); // Use tasks.length instead of tasks to avoid unnecessary re-runs - - // Auto-scroll logs to bottom when new logs arrive - useEffect(() => { - // Use requestAnimationFrame to ensure DOM has updated - requestAnimationFrame(() => { - Object.keys(logContainerRefs.current).forEach(key => { - const ref = logContainerRefs.current[key]; - if (!ref) return; - - // Initialize auto-scroll to true if not set - if (shouldAutoScrollRefs.current[key] === undefined) { - shouldAutoScrollRefs.current[key] = true; - } - - // Always auto-scroll unless user has manually scrolled up - // shouldAutoScrollRefs.current[key] is false only if user scrolled up manually - if (shouldAutoScrollRefs.current[key] !== false) { - // Scroll to bottom - ref.scrollTop = ref.scrollHeight; - } - }); - }); - }, [taskData]); - - // Helper function to load all files with pagination - const loadAllFiles = async (jobId, signal) => { - const allFiles = []; - let offset = 0; - const limit = 100; // Load 100 files per page - let hasMore = true; - - while (hasMore && !signal?.aborted) { - const fileList = await jobs.getFiles(jobId, { limit, offset, signal }); - - // Check for superseded sentinel - if (fileList === REQUEST_SUPERSEDED) { - return REQUEST_SUPERSEDED; - } - - const fileData = fileList?.data || fileList; - const files = Array.isArray(fileData) ? fileData : []; - allFiles.push(...files); - - // Check if there are more files to load - const total = fileList?.total; - if (total !== undefined) { - hasMore = offset + files.length < total; - } else { - // If total is not provided, check if we got a full page (or more) - // Use >= to safely handle edge cases where API returns different amounts - hasMore = files.length >= limit; - } - - offset += files.length; - } - - return allFiles; - }; - - const loadDetails = async () => { - // Guard against undefined job or job.id - if (!job || !job.id) { - console.warn('JobDetails: Cannot load details - job or job.id is undefined'); - return; - } - - try { - setLoading(true); - // Use summary endpoint for tasks initially - much faster - const signal = abortControllerRef.current?.signal; - const [details, allFilesResult, taskListResult] = await Promise.all([ - jobs.get(job.id, { signal }), - loadAllFiles(job.id, signal), // Load all files with pagination - jobs.getTasksSummary(job.id, { sort: 'frame:asc', signal }), // Get all tasks - ]); - - // Check if request was aborted - if (signal?.aborted) { - return; - } - setJobDetails(details); - - // Handle paginated file response - check for superseded sentinel - if (allFilesResult === REQUEST_SUPERSEDED) { - return; // Request was superseded, skip this update - } - setFiles(Array.isArray(allFilesResult) ? allFilesResult : []); - - // Handle paginated task summary response - check for superseded sentinel - if (taskListResult === REQUEST_SUPERSEDED) { - return; // Request was superseded, skip this update - } - const taskData = taskListResult?.data || taskListResult; - const taskSummaries = Array.isArray(taskData) ? taskData : []; - - // Convert summaries to task-like objects for display - const tasksForDisplay = taskSummaries.map(summary => ({ - id: summary.id, - job_id: job.id, - frame: summary.frame, - status: summary.status, - task_type: summary.task_type, - runner_id: summary.runner_id, - // These will be loaded on expand - current_step: null, - retry_count: 0, - max_retries: 3, - created_at: new Date().toISOString(), - })); - setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []); - - // Fetch context archive contents separately (may not exist for old jobs) - try { - const contextList = await jobs.getContextArchive(job.id, { signal }); - if (signal?.aborted) return; - setContextFiles(contextList || []); - } catch (error) { - if (signal?.aborted) return; - // Context archive may not exist for old jobs - setContextFiles([]); - } - - // Only load task data (logs/steps) for expanded tasks - // Don't auto-load for all tasks - wait for user to expand - if (details.status === 'running') { - // Only load data for tasks that are expanded - tasksForDisplay.forEach(task => { - if (expandedTasks.has(task.id)) { - const existingData = taskData[task.id]; - // Only fetch logs via HTTP if we don't have any logs yet - if (!existingData || !existingData.logs || existingData.logs.length === 0) { - loadTaskData(task.id); - } else if (!existingData.steps || existingData.steps.length === 0) { - loadTaskStepsOnly(task.id); - } - } - }); - } - - } catch (error) { - console.error('Failed to load job details:', error); - } finally { - setLoading(false); - } - }; - - const handleDownload = (fileId, fileName) => { - window.open(jobs.downloadFile(job.id, fileId), '_blank'); - }; - - const loadTaskData = async (taskId) => { - try { - console.log(`Loading task data for task ${taskId}...`); - const signal = abortControllerRef.current?.signal; - const [logsResult, steps] = await Promise.all([ - jobs.getTaskLogs(job.id, taskId, { limit: 1000, signal }), // Increased limit for completed tasks - jobs.getTaskSteps(job.id, taskId, { signal }), - ]); - - // Check if request was aborted - if (signal?.aborted) { - return; - } - - // Check for superseded sentinel - if (logsResult === REQUEST_SUPERSEDED || steps === REQUEST_SUPERSEDED) { - return; // Request was superseded, skip this update - } - - console.log(`Task ${taskId} logs result:`, logsResult); - - // Handle new format with logs, last_id, limit - const logs = logsResult.logs || logsResult; - const lastId = logsResult.last_id; - - console.log(`Task ${taskId} - loaded ${Array.isArray(logs) ? logs.length : 0} logs, ${Array.isArray(steps) ? steps.length : 0} steps`); - - setTaskData(prev => { - const current = prev[taskId] || { steps: [], logs: [], lastId: 0 }; - // Merge logs instead of replacing - this preserves WebSocket-streamed logs - // Deduplicate by log ID - const existingLogIds = new Set((current.logs || []).map(l => l.id)); - const newLogs = (Array.isArray(logs) ? logs : []).filter(l => !existingLogIds.has(l.id)); - const mergedLogs = [...(current.logs || []), ...newLogs].sort((a, b) => a.id - b.id); - - return { - ...prev, - [taskId]: { - steps: steps || current.steps, - logs: mergedLogs, - lastId: lastId || current.lastId - } - }; - }); - } catch (error) { - console.error('Failed to load task data:', error); - } - }; - - const loadTaskStepsOnly = async (taskId) => { - try { - const signal = abortControllerRef.current?.signal; - const steps = await jobs.getTaskSteps(job.id, taskId, { signal }); - - // Check if request was aborted - if (signal?.aborted) { - return; - } - - // Check for superseded sentinel - if (steps === REQUEST_SUPERSEDED) { - return; // Request was superseded, skip this update - } - setTaskData(prev => { - const current = prev[taskId] || { steps: [], logs: [] }; - return { - ...prev, - [taskId]: { - steps: steps || current.steps, - logs: current.logs || [] // Preserve existing logs - } - }; - }); - } catch (error) { - console.error('Failed to load task steps:', error); - } - }; - - const subscribe = (channel) => { - // Use wsManager's channel subscription (handles reconnect automatically) - wsManager.subscribeToChannel(channel); - subscribedChannelsRef.current.add(channel); - pendingSubscriptionsRef.current.add(channel); - }; - - const unsubscribe = (channel) => { - // Use wsManager's channel unsubscription - wsManager.unsubscribeFromChannel(channel); - subscribedChannelsRef.current.delete(channel); - pendingSubscriptionsRef.current.delete(channel); - }; - - const unsubscribeAll = () => { - subscribedChannelsRef.current.forEach(channel => { - unsubscribe(channel); - }); - }; - - const updateLogSubscriptions = () => { - // Guard against undefined job or job.id - if (!job || !job.id) { - return; - } - - // Determine which log channels should be subscribed - const shouldSubscribe = new Set(); - const isRunning = jobDetails.status === 'running' || jobDetails.status === 'pending'; - - // Subscribe to logs when task is expanded (not when step is expanded) - if (isRunning) { - expandedTasks.forEach(taskId => { - const channel = `logs:${job.id}:${taskId}`; - shouldSubscribe.add(channel); - }); - } - - // Subscribe to new channels - shouldSubscribe.forEach(channel => { - if (!subscribedChannelsRef.current.has(channel)) { - subscribe(channel); - } - }); - - // Unsubscribe from channels that shouldn't be subscribed - subscribedChannelsRef.current.forEach(channel => { - if (channel.startsWith('logs:') && !shouldSubscribe.has(channel)) { - unsubscribe(channel); - } - }); - }; - - const handleWebSocketMessage = (data) => { - try { - console.log('JobDetails: Client WebSocket message received:', data.type, data.channel, data); - - // Handle subscription responses - update both local refs and wsManager - if (data.type === 'subscribed' && data.channel) { - pendingSubscriptionsRef.current.delete(data.channel); - subscribedChannelsRef.current.add(data.channel); - wsManager.confirmSubscription(data.channel); - console.log('Successfully subscribed to channel:', data.channel, 'Total subscriptions:', subscribedChannelsRef.current.size); - } else if (data.type === 'subscription_error' && data.channel) { - pendingSubscriptionsRef.current.delete(data.channel); - subscribedChannelsRef.current.delete(data.channel); - wsManager.failSubscription(data.channel); - console.error('Subscription failed for channel:', data.channel, data.error); - if (job && job.id && data.channel === `job:${job.id}`) { - console.error('Failed to subscribe to job channel - job may not exist or access denied'); - } - } - - // Handle job channel messages - // Check both explicit channel and job_id match (for backwards compatibility) - // Guard against undefined job.id - if (!job || !job.id) { - return; - } - const isJobChannel = data.channel === `job:${job.id}` || - (data.job_id === job.id && !data.channel); - if (isJobChannel) { - console.log('Job channel message received:', data.type, data); - if (data.type === 'job_update' && data.data) { - // Update job details - console.log('Updating job details:', data.data); - setJobDetails(prev => { - const updated = { ...prev, ...data.data }; - console.log('Job details updated:', { - old_progress: prev.progress, - new_progress: updated.progress, - old_status: prev.status, - new_status: updated.status - }); - // Notify parent component of update - if (onUpdate) { - onUpdate(data.job_id || job.id, updated); - } - return updated; - }); - } else if (data.type === 'task_update') { - // Handle task_update - data.data contains the update fields - const taskId = data.task_id || (data.data && (data.data.id || data.data.task_id)); - console.log('Task update received:', { task_id: taskId, data: data.data, full_message: data }); - - if (!taskId) { - console.warn('task_update message missing task_id:', data); - return; - } - - if (!data.data) { - console.warn('task_update message missing data:', data); - return; - } - - // Update task in list - setTasks(prev => { - // Ensure prev is always an array - const prevArray = Array.isArray(prev) ? prev : []; - const index = prevArray.findIndex(t => t.id === taskId); - - if (index >= 0) { - // Task exists - update it - const updated = [...prevArray]; - const oldTask = updated[index]; - // Create a completely new task object to ensure React detects the change - const newTask = { - ...oldTask, - // Explicitly update each field from data.data to ensure changes are detected - status: data.data.status !== undefined ? data.data.status : oldTask.status, - runner_id: data.data.runner_id !== undefined ? data.data.runner_id : oldTask.runner_id, - started_at: data.data.started_at !== undefined ? data.data.started_at : oldTask.started_at, - completed_at: data.data.completed_at !== undefined ? data.data.completed_at : oldTask.completed_at, - error_message: data.data.error_message !== undefined ? data.data.error_message : oldTask.error_message, - output_path: data.data.output_path !== undefined ? data.data.output_path : oldTask.output_path, - current_step: data.data.current_step !== undefined ? data.data.current_step : oldTask.current_step, - // Merge any other fields - ...Object.keys(data.data).reduce((acc, key) => { - if (!['status', 'runner_id', 'started_at', 'completed_at', 'error_message', 'output_path', 'current_step'].includes(key)) { - acc[key] = data.data[key]; - } - return acc; - }, {}) - }; - updated[index] = newTask; - console.log('Updated task at index', index, { - task_id: taskId, - old_status: oldTask.status, - new_status: newTask.status, - old_runner_id: oldTask.runner_id, - new_runner_id: newTask.runner_id, - update_data: data.data, - full_new_task: newTask - }); - return updated; - } - // Task not found - check if data contains full task info (from initial state) - // Check both 'id' and 'task_id' fields - const taskIdFromData = data.data.id || data.data.task_id; - if (data.data && typeof data.data === 'object' && taskIdFromData && taskIdFromData === taskId) { - // This is a full task object from initial state - add it - console.log('Adding new task from initial state:', data.data); - return [...prevArray, { ...data.data, id: taskIdFromData }]; - } - // If task not found and it's a partial update, reload tasks to get the full list - console.log('Task not found in list, reloading tasks...'); - setTimeout(() => { - const reloadTasks = async () => { - try { - const signal = abortControllerRef.current?.signal; - const taskListResult = await jobs.getTasksSummary(job.id, { sort: 'frame:asc', signal }); - - // Check if request was aborted - if (signal?.aborted) { - return; - } - - // Check for superseded sentinel - if (taskListResult === REQUEST_SUPERSEDED) { - return; // Request was superseded, skip this update - } - const taskData = taskListResult.data || taskListResult; - const taskSummaries = Array.isArray(taskData) ? taskData : []; - const tasksForDisplay = taskSummaries.map(summary => ({ - id: summary.id, - job_id: job.id, - frame: summary.frame, - status: summary.status, - task_type: summary.task_type, - runner_id: summary.runner_id, - current_step: summary.current_step || null, - retry_count: summary.retry_count || 0, - max_retries: summary.max_retries || 3, - created_at: summary.created_at || new Date().toISOString(), - started_at: summary.started_at, - completed_at: summary.completed_at, - error_message: summary.error_message, - output_path: summary.output_path, - })); - setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []); - } catch (error) { - console.error('Failed to reload tasks:', error); - } - }; - reloadTasks(); - }, 100); - return prevArray; - }); - } else if (data.type === 'task_reset') { - // Handle task_reset - task was reset to pending, steps and logs were cleared - const taskId = data.task_id || (data.data && (data.data.id || data.data.task_id)); - console.log('Task reset received:', { task_id: taskId, data: data.data }); - - if (!taskId) { - console.warn('task_reset message missing task_id:', data); - return; - } - - // Update task in list - setTasks(prev => { - const prevArray = Array.isArray(prev) ? prev : []; - const index = prevArray.findIndex(t => t.id === taskId); - - if (index >= 0) { - const updated = [...prevArray]; - const oldTask = updated[index]; - const newTask = { - ...oldTask, - status: data.data?.status || 'pending', - runner_id: null, - current_step: null, - started_at: null, - error_message: data.data?.error_message || null, - retry_count: data.data?.retry_count !== undefined ? data.data.retry_count : oldTask.retry_count, - }; - updated[index] = newTask; - console.log('Reset task at index', index, { task_id: taskId, new_task: newTask }); - return updated; - } - return prevArray; - }); - - // Clear steps and logs for this task if flags indicate they were cleared - if (data.data?.steps_cleared || data.data?.logs_cleared) { - setTaskData(prev => { - const current = prev[taskId]; - if (!current) return prev; - return { - ...prev, - [taskId]: { - steps: data.data?.steps_cleared ? [] : current.steps, - logs: data.data?.logs_cleared ? [] : current.logs, - lastId: 0, - } - }; - }); - } - } else if (data.type === 'task_added' && data.data) { - // New task was added - reload task summaries to get the new task - console.log('task_added message received, reloading tasks...', data); - const reloadTasks = async () => { - try { - const signal = abortControllerRef.current?.signal; - const taskListResult = await jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame:asc', signal }); - - // Check if request was aborted - if (signal?.aborted) { - return; - } - - // Check for superseded sentinel - if (taskListResult === REQUEST_SUPERSEDED) { - return; // Request was superseded, skip this update - } - const taskData = taskListResult.data || taskListResult; - const taskSummaries = Array.isArray(taskData) ? taskData : []; - const tasksForDisplay = taskSummaries.map(summary => ({ - id: summary.id, - job_id: job.id, - frame: summary.frame, - status: summary.status, - task_type: summary.task_type, - runner_id: summary.runner_id, - current_step: null, - retry_count: 0, - max_retries: 3, - created_at: new Date().toISOString(), - })); - setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []); - } catch (error) { - console.error('Failed to reload tasks:', error); - // Fallback to full reload - loadDetails(); - } - }; - reloadTasks(); - } else if (data.type === 'tasks_added' && data.data) { - // Multiple new tasks were added - reload task summaries - console.log('tasks_added message received, reloading tasks...', data); - const reloadTasks = async () => { - try { - const signal = abortControllerRef.current?.signal; - const taskListResult = await jobs.getTasksSummary(job.id, { limit: 100, sort: 'frame:asc', signal }); - - // Check if request was aborted - if (signal?.aborted) { - return; - } - - // Check for superseded sentinel - if (taskListResult === REQUEST_SUPERSEDED) { - return; // Request was superseded, skip this update - } - const taskData = taskListResult.data || taskListResult; - const taskSummaries = Array.isArray(taskData) ? taskData : []; - const tasksForDisplay = taskSummaries.map(summary => ({ - id: summary.id, - job_id: job.id, - frame: summary.frame, - status: summary.status, - task_type: summary.task_type, - runner_id: summary.runner_id, - current_step: null, - retry_count: 0, - max_retries: 3, - created_at: new Date().toISOString(), - })); - setTasks(Array.isArray(tasksForDisplay) ? tasksForDisplay : []); - } catch (error) { - console.error('Failed to reload tasks:', error); - // Fallback to full reload - loadDetails(); - } - }; - reloadTasks(); - } else if (data.type === 'file_added' && data.data) { - // New file was added - reload all files - const reloadFiles = async () => { - try { - const signal = abortControllerRef.current?.signal; - const allFilesResult = await loadAllFiles(job.id, signal); - - // Check if request was aborted - if (signal?.aborted) { - return; - } - - // Check for superseded sentinel - if (allFilesResult === REQUEST_SUPERSEDED) { - return; // Request was superseded, skip this update - } - setFiles(Array.isArray(allFilesResult) ? allFilesResult : []); - } catch (error) { - console.error('Failed to reload files:', error); - } - }; - reloadFiles(); - } else if (data.type === 'step_update' && data.data && data.task_id) { - // Step was created or updated - update task data - console.log('step_update message received:', data); - setTaskData(prev => { - const taskId = data.task_id; - const current = prev[taskId] || { steps: [], logs: [] }; - const stepData = data.data; - - // Find if step already exists - const existingSteps = current.steps || []; - const stepIndex = existingSteps.findIndex(s => s.step_name === stepData.step_name); - - let updatedSteps; - if (stepIndex >= 0) { - // Update existing step - updatedSteps = [...existingSteps]; - updatedSteps[stepIndex] = { - ...updatedSteps[stepIndex], - ...stepData, - id: stepData.step_id || updatedSteps[stepIndex].id, - }; - } else { - // Add new step - updatedSteps = [...existingSteps, { - id: stepData.step_id, - step_name: stepData.step_name, - status: stepData.status, - duration_ms: stepData.duration_ms, - error_message: stepData.error_message, - }]; - } - - return { - ...prev, - [taskId]: { - ...current, - steps: updatedSteps, - } - }; - }); - } - } else if (data.channel && data.channel.startsWith('logs:')) { - // Handle log channel messages - if (data.type === 'log' && data.data) { - const log = data.data; - // Get task_id from log data or top-level message - const taskId = log.task_id || data.task_id; - if (!taskId) { - console.warn('Log message missing task_id:', data); - return; - } - console.log('Received log for task:', taskId, log); - setTaskData(prev => { - const current = prev[taskId] || { steps: [], logs: [] }; - - // If log has a step_name, ensure the step exists in the steps array - let updatedSteps = current.steps || []; - if (log.step_name) { - const stepExists = updatedSteps.some(s => s.step_name === log.step_name); - if (!stepExists) { - // Create placeholder step for logs that arrive before step_update - console.log('Creating placeholder step for:', log.step_name, 'in task:', taskId); - updatedSteps = [...updatedSteps, { - id: null, // Will be updated when step_update arrives - step_name: log.step_name, - status: 'running', // Default to running since we're receiving logs - duration_ms: null, - error_message: null, - }]; - } - } - - // Check if log already exists (avoid duplicates) - if (!current.logs.find(l => l.id === log.id)) { - return { - ...prev, - [taskId]: { - ...current, - steps: updatedSteps, - logs: [...current.logs, log] - } - }; - } - // Even if log is duplicate, update steps if needed - return { - ...prev, - [taskId]: { - ...current, - steps: updatedSteps, - } - }; - }); - } - } else if (data.type === 'connected') { - // Connection established - } - } catch (error) { - console.error('Failed to parse WebSocket message:', error); - } - }; - - // startLogStream is no longer needed - subscriptions are managed by updateLogSubscriptions - - const toggleTask = async (taskId) => { - const newExpanded = new Set(expandedTasks); - if (newExpanded.has(taskId)) { - newExpanded.delete(taskId); - } else { - newExpanded.add(taskId); - // Load full task details if we only have summary - const tasksArray = Array.isArray(tasks) ? tasks : []; - const currentTask = tasksArray.find(t => t.id === taskId); - if (currentTask && !currentTask.created_at) { - // This is a summary - fetch full task details - try { - const signal = abortControllerRef.current?.signal; - const fullTasks = await jobs.getTasks(job.id, { - limit: 1, - signal, - // We can't filter by task ID, so we'll get all and find the one we need - }); - - // Check if request was aborted - if (signal?.aborted) { - return; - } - const taskData = fullTasks.data || fullTasks; - const fullTask = Array.isArray(taskData) ? taskData.find(t => t.id === taskId) : null; - if (fullTask) { - setTasks(prev => { - const prevArray = Array.isArray(prev) ? prev : []; - return prevArray.map(t => t.id === taskId ? fullTask : t); - }); - } - } catch (err) { - console.error('Failed to load full task details:', err); - } - } - // Always load logs/steps when expanding a task to ensure we have the latest data - // This is especially important for completed tasks that weren't loaded before - const existingData = taskData[taskId]; - const hasLogs = existingData && existingData.logs && existingData.logs.length > 0; - const hasSteps = existingData && existingData.steps && existingData.steps.length > 0; - - if (!hasLogs || !hasSteps) { - console.log(`Loading task data for task ${taskId} (logs: ${hasLogs}, steps: ${hasSteps})`); - await loadTaskData(taskId); - } else { - console.log(`Task ${taskId} already has ${existingData.logs.length} logs and ${existingData.steps.length} steps, skipping load`); - } - } - setExpandedTasks(newExpanded); - }; - - - const toggleAutoScroll = (taskId, containerName) => { - const key = `${taskId}-${containerName}`; - // Toggle auto-scroll state (default to true if undefined) - const currentState = shouldAutoScrollRefs.current[key] !== false; - shouldAutoScrollRefs.current[key] = !currentState; - // Force re-render to update button state - // We don't have expandedSteps anymore, so just trigger a re-render by updating a dummy state - setExpandedTasks(new Set(expandedTasks)); - }; - - const handleLogWheel = (taskId, containerName) => { - const key = `${taskId}-${containerName}`; - // Turn off auto-scroll when user scrolls with wheel - if (shouldAutoScrollRefs.current[key] !== false) { - shouldAutoScrollRefs.current[key] = false; - // Force re-render to update button state - setExpandedTasks(new Set(expandedTasks)); - } - }; - - const handleLogClick = (taskId, containerName, e) => { - // Pause on left or right click - if (e.button === 0 || e.button === 2) { - const key = `${taskId}-${containerName}`; - if (shouldAutoScrollRefs.current[key] !== false) { - shouldAutoScrollRefs.current[key] = false; - // Force re-render to update button state - setExpandedTasks(new Set(expandedTasks)); - } - } - }; - - const getLogLevelColor = (level) => { - switch (level) { - case 'ERROR': - return 'text-red-400'; - case 'WARN': - return 'text-yellow-400'; - case 'DEBUG': - return 'text-gray-500'; - default: - return 'text-gray-200'; - } - }; - - const getStepStatusIcon = (status) => { - switch (status) { - case 'completed': - return '✓'; - case 'failed': - return '✗'; - case 'running': - return '⏳'; - case 'skipped': - return '⏸'; - default: - return '○'; - } - }; - - const getTaskStatusColor = (status) => { - const colors = { - pending: 'bg-yellow-400/20 text-yellow-400', - running: 'bg-orange-400/20 text-orange-400', - completed: 'bg-green-400/20 text-green-400', - failed: 'bg-red-400/20 text-red-400', - }; - return colors[status] || 'bg-gray-500/20 text-gray-400'; - }; - - const handleDelete = async () => { - if (!confirm('Are you sure you want to permanently delete this job? This action cannot be undone.')) return; - try { - await jobs.delete(jobDetails.id); - if (onUpdate) { - onUpdate(); - } - onClose(); - } catch (error) { - alert('Failed to delete job: ' + error.message); - } - }; - - const outputFiles = files.filter((f) => f.file_type === 'output'); - const inputFiles = files.filter((f) => f.file_type === 'input'); - - // Helper to check if a file is a browser-supported image (or EXR which we convert server-side) - const isImageFile = (fileName) => { - // Browser-supported image formats + EXR (converted server-side) - const imageExtensions = [ - '.png', '.jpg', '.jpeg', '.gif', '.webp', '.bmp', '.svg', - '.ico', '.avif', '.apng', '.jfif', '.pjpeg', '.pjp', - '.exr' // EXR files are converted to PNG server-side - ]; - const lowerName = fileName.toLowerCase(); - return imageExtensions.some(ext => lowerName.endsWith(ext)); - }; - - // Helper to check if a file is an EXR file - const isEXRFile = (fileName) => { - return fileName.toLowerCase().endsWith('.exr'); - }; - - return ( - <> - {/* Image Preview Modal */} - {previewImage && ( -
setPreviewImage(null)} - > -
e.stopPropagation()} - > -
-

{previewImage.fileName}

- -
-
- {previewImage.fileName} { - e.target.style.display = 'none'; - const errorDiv = e.target.nextSibling; - if (errorDiv) { - errorDiv.style.display = 'block'; - } - }} - /> -
- Failed to load image preview -
-
-
-
- )} - - {/* Video Preview Modal */} - {previewVideo && ( -
setPreviewVideo(null)} - > -
e.stopPropagation()} - > -
-

{previewVideo.fileName}

- -
-
- -
-
-
- )} - -
-
-
-

{jobDetails.name}

-
- {(jobDetails.status === 'completed' || jobDetails.status === 'failed' || jobDetails.status === 'cancelled') && ( - - )} - -
-
- -
- {loading && } - - {!loading && ( - <> -
-
-

Status

-

{jobDetails.status}

-
-
-

Progress

-

- {(jobDetails.progress || 0).toFixed(1)}% -

-
-
-

Frame Range

-

- {jobDetails.frame_start} - {jobDetails.frame_end} -

-
-
-

Output Format

-

- {jobDetails.output_format} -

-
-
- - {contextFiles.length > 0 && ( -
-

- Context Archive -

- ({ - id: 0, // Context files don't have IDs - file_name: f.path || f.name || '', - file_size: f.size || 0, - file_type: 'input' - }))} - onDownload={null} // Context files can't be downloaded individually - isImageFile={isImageFile} - /> -
- )} - - {outputFiles.length > 0 && ( -
-

- Output Files -

- { - // Use EXR preview endpoint for EXR files, regular download for others - const imageUrl = isEXRFile(file.file_name) - ? jobs.previewEXR(job.id, file.id) - : jobs.downloadFile(job.id, file.id); - setPreviewImage({ url: imageUrl, fileName: file.file_name }); - }} - onVideoPreview={(file) => { - setPreviewVideo({ url: jobs.getVideoUrl(job.id), fileName: file.file_name }); - }} - isImageFile={isImageFile} - /> -
- )} - - - -
-

- Tasks {streaming && (streaming)} -

-
- {tasks.length > 0 ? ( - tasks.map((task) => { - const isExpanded = expandedTasks.has(task.id); - const taskInfo = taskData[task.id] || { steps: [], logs: [] }; - const { steps, logs } = taskInfo; - - // Sort all logs chronologically (no grouping by step_name) - const sortedLogs = [...logs].sort((a, b) => new Date(a.created_at) - new Date(b.created_at)); - - return ( -
- {/* Task Header */} -
toggleTask(task.id)} - className="flex items-center justify-between p-3 bg-gray-800 rounded-t-lg cursor-pointer hover:bg-gray-750 transition-colors" - > -
- - {isExpanded ? '▼' : '▶'} - - - {task.status} - - - {task.task_type === 'encode' ? `Encode (${jobDetails.frame_start} - ${jobDetails.frame_end})` : `Frame ${task.frame}`} - - {task.task_type && task.task_type !== 'render' && task.task_type !== 'encode' && ( - ({task.task_type}) - )} -
-
- {task.runner_id && `Runner ${task.runner_id}`} -
-
- - {/* Task Content (Continuous Log Stream) */} - {isExpanded && ( -
- {/* Header with auto-scroll */} -
-
-
- -
- - {/* Logs */} -
{ - if (el) { - logContainerRefs.current[`${task.id}-logs`] = el; - // Initialize auto-scroll to true (follow logs) when ref is first set - if (shouldAutoScrollRefs.current[`${task.id}-logs`] === undefined) { - shouldAutoScrollRefs.current[`${task.id}-logs`] = true; - } - } - }} - onWheel={() => handleLogWheel(task.id, 'logs')} - onMouseDown={(e) => handleLogClick(task.id, 'logs', e)} - onContextMenu={(e) => handleLogClick(task.id, 'logs', e)} - className="bg-black text-green-400 font-mono text-sm p-3 rounded max-h-96 overflow-y-auto" - > - {sortedLogs.length === 0 ? ( -

No logs yet...

- ) : ( - sortedLogs.map((log) => ( -
- - [{new Date(log.created_at).toLocaleTimeString()}] - - {log.message} -
- )) - )} -
-
- )} -
- ); - }) - ) : ( -

No tasks yet...

- )} -
-
- - )} -
-
-
- - ); -} - diff --git a/web/src/components/JobList.jsx b/web/src/components/JobList.jsx deleted file mode 100644 index 6584b29..0000000 --- a/web/src/components/JobList.jsx +++ /dev/null @@ -1,289 +0,0 @@ -import { useState, useEffect, useRef } from 'react'; -import { jobs, normalizeArrayResponse } from '../utils/api'; -import { wsManager } from '../utils/websocket'; -import JobDetails from './JobDetails'; -import LoadingSpinner from './LoadingSpinner'; - -export default function JobList() { - const [jobList, setJobList] = useState([]); - const [loading, setLoading] = useState(true); - const [selectedJob, setSelectedJob] = useState(null); - const [pagination, setPagination] = useState({ total: 0, limit: 50, offset: 0 }); - const [hasMore, setHasMore] = useState(true); - const listenerIdRef = useRef(null); - - useEffect(() => { - loadJobs(); - // Use shared WebSocket manager for real-time updates - listenerIdRef.current = wsManager.subscribe('joblist', { - open: () => { - console.log('JobList: Shared WebSocket connected'); - // Load initial job list via HTTP to get current state - loadJobs(); - }, - message: (data) => { - console.log('JobList: Client WebSocket message received:', data.type, data.channel, data); - // Handle jobs channel messages (always broadcasted) - if (data.channel === 'jobs') { - if (data.type === 'job_update' && data.data) { - console.log('JobList: Updating job:', data.job_id, data.data); - // Update job in list - setJobList(prev => { - const prevArray = Array.isArray(prev) ? prev : []; - const index = prevArray.findIndex(j => j.id === data.job_id); - if (index >= 0) { - const updated = [...prevArray]; - updated[index] = { ...updated[index], ...data.data }; - console.log('JobList: Updated job at index', index, updated[index]); - return updated; - } - // If job not in current page, reload to get updated list - if (data.data.status === 'completed' || data.data.status === 'failed') { - loadJobs(); - } - return prevArray; - }); - } else if (data.type === 'job_created' && data.data) { - console.log('JobList: New job created:', data.job_id, data.data); - // New job created - add to list - setJobList(prev => { - const prevArray = Array.isArray(prev) ? prev : []; - // Check if job already exists (avoid duplicates) - if (prevArray.findIndex(j => j.id === data.job_id) >= 0) { - return prevArray; - } - // Add new job at the beginning - return [data.data, ...prevArray]; - }); - } - } else if (data.type === 'connected') { - // Connection established - console.log('JobList: WebSocket connected'); - } - }, - error: (error) => { - console.error('JobList: Shared WebSocket error:', error); - }, - close: (event) => { - console.log('JobList: Shared WebSocket closed:', event); - } - }); - - // Ensure connection is established - wsManager.connect(); - - return () => { - if (listenerIdRef.current) { - wsManager.unsubscribe(listenerIdRef.current); - listenerIdRef.current = null; - } - }; - }, []); - - const loadJobs = async (append = false) => { - try { - const offset = append ? pagination.offset + pagination.limit : 0; - const result = await jobs.listSummary({ - limit: pagination.limit, - offset, - sort: 'created_at:desc' - }); - - // Handle both old format (array) and new format (object with data, total, etc.) - const jobsArray = normalizeArrayResponse(result); - const total = result.total !== undefined ? result.total : jobsArray.length; - - if (append) { - setJobList(prev => { - const prevArray = Array.isArray(prev) ? prev : []; - return [...prevArray, ...jobsArray]; - }); - setPagination(prev => ({ ...prev, offset, total })); - } else { - setJobList(jobsArray); - setPagination({ total, limit: result.limit || pagination.limit, offset: result.offset || 0 }); - } - - setHasMore(offset + jobsArray.length < total); - } catch (error) { - console.error('Failed to load jobs:', error); - // Ensure jobList is always an array even on error - if (!append) { - setJobList([]); - } - } finally { - setLoading(false); - } - }; - - const loadMore = () => { - if (!loading && hasMore) { - loadJobs(true); - } - }; - - // Keep selectedJob in sync with the job list when it refreshes - useEffect(() => { - if (selectedJob && jobList.length > 0) { - const freshJob = jobList.find(j => j.id === selectedJob.id); - if (freshJob) { - // Update to the fresh object from the list to keep it in sync - setSelectedJob(freshJob); - } else { - // Job was deleted or no longer exists, clear selection - setSelectedJob(null); - } - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [jobList]); // Only depend on jobList, not selectedJob to avoid infinite loops - - const handleCancel = async (jobId) => { - if (!confirm('Are you sure you want to cancel this job?')) return; - try { - await jobs.cancel(jobId); - loadJobs(); - } catch (error) { - alert('Failed to cancel job: ' + error.message); - } - }; - - const handleDelete = async (jobId) => { - if (!confirm('Are you sure you want to permanently delete this job? This action cannot be undone.')) return; - try { - // Optimistically update the list - setJobList(prev => { - const prevArray = Array.isArray(prev) ? prev : []; - return prevArray.filter(j => j.id !== jobId); - }); - if (selectedJob && selectedJob.id === jobId) { - setSelectedJob(null); - } - // Then actually delete - await jobs.delete(jobId); - // Reload to ensure consistency - loadJobs(); - } catch (error) { - // On error, reload to restore correct state - loadJobs(); - alert('Failed to delete job: ' + error.message); - } - }; - - const getStatusColor = (status) => { - const colors = { - pending: 'bg-yellow-400/20 text-yellow-400', - running: 'bg-orange-400/20 text-orange-400', - completed: 'bg-green-400/20 text-green-400', - failed: 'bg-red-400/20 text-red-400', - cancelled: 'bg-gray-500/20 text-gray-400', - }; - return colors[status] || colors.pending; - }; - - if (loading && jobList.length === 0) { - return ; - } - - if (jobList.length === 0) { - return ( -
-

No jobs yet. Submit a job to get started!

-
- ); - } - - return ( - <> -
- {jobList.map((job) => ( -
-
-

{job.name}

- - {job.status} - -
- -
- {job.frame_start !== undefined && job.frame_end !== undefined && ( -

Frames: {job.frame_start} - {job.frame_end}

- )} - {job.output_format &&

Format: {job.output_format}

} -

Created: {new Date(job.created_at).toLocaleString()}

-
- -
-
- Progress - {job.progress.toFixed(1)}% -
-
-
-
-
- -
- - {(job.status === 'pending' || job.status === 'running') && ( - - )} - {(job.status === 'completed' || job.status === 'failed' || job.status === 'cancelled') && ( - - )} -
-
- ))} -
- - {hasMore && ( -
- -
- )} - - {selectedJob && ( - setSelectedJob(null)} - onUpdate={loadJobs} - /> - )} - - ); -} diff --git a/web/src/components/JobSubmission.jsx b/web/src/components/JobSubmission.jsx deleted file mode 100644 index 607a7bb..0000000 --- a/web/src/components/JobSubmission.jsx +++ /dev/null @@ -1,3682 +0,0 @@ -import { useState, useEffect, useRef } from 'react'; -import { jobs } from '../utils/api'; -import { wsManager } from '../utils/websocket'; -import JobDetails from './JobDetails'; -import ErrorMessage from './ErrorMessage'; -import LoadingSpinner from './LoadingSpinner'; - -export default function JobSubmission({ onSuccess }) { - const [step, setStep] = useState(1); // 1 = upload & extract metadata, 2 = missing addons (if any), 3 = configure & submit - const [formData, setFormData] = useState({ - name: '', - frame_start: 1, - frame_end: 10, - output_format: 'PNG', - render_settings: null, // Will contain engine settings - unhide_objects: false, // Unhide objects/collections tweak - enable_execution: false, // Enable auto-execution in Blender - blender_version: '', // Blender version override (empty = auto-detect) - preserve_hdr: false, // Preserve HDR range for EXR encoding - preserve_alpha: false, // Preserve alpha channel for EXR encoding - }); - const [showAdvancedSettings, setShowAdvancedSettings] = useState(false); - const [file, setFile] = useState(null); - const [submitting, setSubmitting] = useState(false); - const [error, setError] = useState(''); - const [metadataStatus, setMetadataStatus] = useState(null); // 'extracting', 'completed', 'error' - const [metadata, setMetadata] = useState(null); - const [currentJobId, setCurrentJobId] = useState(null); - const [uploadSessionId, setUploadSessionId] = useState(null); // Session ID from file upload - const [createdJob, setCreatedJob] = useState(null); - const [uploadProgress, setUploadProgress] = useState(0); - const [isUploading, setIsUploading] = useState(false); - const [blendFiles, setBlendFiles] = useState([]); // For ZIP files with multiple blend files - const [selectedMainBlend, setSelectedMainBlend] = useState(''); - const [confirmedMissingFiles, setConfirmedMissingFiles] = useState(false); // Confirmation for missing files - const [uploadTimeRemaining, setUploadTimeRemaining] = useState(null); // Estimated time remaining in seconds - const [blenderVersions, setBlenderVersions] = useState([]); // Available Blender versions from server - const [loadingBlenderVersions, setLoadingBlenderVersions] = useState(false); - - // Use refs to track cancellation state across re-renders - const isCancelledRef = useRef(false); - const isCompletedRef = useRef(false); - const currentJobIdRef = useRef(null); - const cleanupRef = useRef(null); - const formatManuallyChangedRef = useRef(false); // Track if user manually changed output format - const stepRef = useRef(step); // Track current step to avoid stale closures - const uploadStartTimeRef = useRef(null); // Track when upload started - const listenerIdRef = useRef(null); // Listener ID for shared WebSocket - const subscribedChannelsRef = useRef(new Set()); // Track confirmed subscribed channels - const pendingSubscriptionsRef = useRef(new Set()); // Track pending subscriptions (waiting for confirmation) - - // Keep stepRef in sync with step state - useEffect(() => { - stepRef.current = step; - }, [step]); - - // Helper function to format time remaining - const formatTimeRemaining = (seconds) => { - if (!seconds || seconds < 0 || !isFinite(seconds)) return null; - - if (seconds < 60) { - return `${Math.round(seconds)}s`; - } else if (seconds < 3600) { - const mins = Math.floor(seconds / 60); - const secs = Math.round(seconds % 60); - return `${mins}m ${secs}s`; - } else if (seconds < 86400) { - const hours = Math.floor(seconds / 3600); - const mins = Math.floor((seconds % 3600) / 60); - return `${hours}h ${mins}m`; - } else { - const days = Math.floor(seconds / 86400); - const hours = Math.floor((seconds % 86400) / 3600); - const mins = Math.floor((seconds % 3600) / 60); - return `${days}d ${hours}h ${mins}m`; - } - }; - - // Fetch available Blender versions on mount - useEffect(() => { - const fetchBlenderVersions = async () => { - setLoadingBlenderVersions(true); - try { - const response = await fetch('/api/blender/versions'); - if (response.ok) { - const data = await response.json(); - setBlenderVersions(data.versions || []); - } - } catch (err) { - console.error('Failed to fetch Blender versions:', err); - } finally { - setLoadingBlenderVersions(false); - } - }; - fetchBlenderVersions(); - }, []); - - // Connect to shared WebSocket on mount - useEffect(() => { - listenerIdRef.current = wsManager.subscribe('jobsubmission', { - open: () => { - console.log('JobSubmission: Shared WebSocket connected'); - }, - message: (data) => { - // Handle subscription responses - update both local refs and wsManager - if (data.type === 'subscribed' && data.channel) { - pendingSubscriptionsRef.current.delete(data.channel); - subscribedChannelsRef.current.add(data.channel); - wsManager.confirmSubscription(data.channel); - console.log('Successfully subscribed to channel:', data.channel); - } else if (data.type === 'subscription_error' && data.channel) { - pendingSubscriptionsRef.current.delete(data.channel); - subscribedChannelsRef.current.delete(data.channel); - wsManager.failSubscription(data.channel); - console.error('Subscription failed for channel:', data.channel, data.error); - // If it's the upload channel we're trying to subscribe to, show error - if (data.channel.startsWith('upload:')) { - setError(`Failed to subscribe to upload progress: ${data.error}`); - } - } - - // Upload progress is now handled via HTTP response - no WebSocket messages needed - }, - error: (error) => { - console.error('JobSubmission: Shared WebSocket error:', error); - }, - close: (event) => { - console.log('JobSubmission: Shared WebSocket closed:', event); - subscribedChannelsRef.current.clear(); - pendingSubscriptionsRef.current.clear(); - } - }); - - // Ensure connection is established - wsManager.connect(); - - return () => { - // Unsubscribe from all channels before unmounting - unsubscribeFromAllChannels(); - if (listenerIdRef.current) { - wsManager.unsubscribe(listenerIdRef.current); - listenerIdRef.current = null; - } - }; - }, []); - - // Helper function to unsubscribe from upload channel - const unsubscribeFromUploadChannel = (channel) => { - if (!subscribedChannelsRef.current.has(channel)) { - return; // Not subscribed - } - wsManager.unsubscribeFromChannel(channel); - subscribedChannelsRef.current.delete(channel); - pendingSubscriptionsRef.current.delete(channel); - console.log('Unsubscribed from upload channel:', channel); - }; - - // Helper function to unsubscribe from all channels - const unsubscribeFromAllChannels = () => { - subscribedChannelsRef.current.forEach(channel => { - wsManager.unsubscribeFromChannel(channel); - }); - subscribedChannelsRef.current.clear(); - pendingSubscriptionsRef.current.clear(); - }; - - // No polling needed - metadata is extracted synchronously during upload - - const handleFileChange = async (e) => { - const selectedFile = e.target.files[0]; - if (!selectedFile) { - setFile(null); - return; - } - - setFile(selectedFile); - setMetadataStatus(null); - setMetadata(null); - setCurrentJobId(null); - setUploadSessionId(null); - setUploadProgress(0); - setUploadTimeRemaining(null); - uploadStartTimeRef.current = null; - setBlendFiles([]); - setSelectedMainBlend(''); - formatManuallyChangedRef.current = false; // Reset when new file is selected - - const isBlend = selectedFile.name.toLowerCase().endsWith('.blend'); - const isZip = selectedFile.name.toLowerCase().endsWith('.zip'); - - // If it's a blend file or ZIP, upload and extract metadata - if (isBlend || isZip) { - try { - setIsUploading(true); - setUploadProgress(0); - setUploadTimeRemaining(null); - uploadStartTimeRef.current = Date.now(); - setMetadataStatus('extracting'); - - // Upload file and get metadata in HTTP response - const result = await jobs.uploadFileForJobCreation(selectedFile, (progress) => { - // Show upload progress during upload - setUploadProgress(progress); - // Calculate time remaining for upload progress - if (progress > 0 && progress < 100 && uploadStartTimeRef.current) { - const elapsed = (Date.now() - uploadStartTimeRef.current) / 1000; // seconds - const remaining = (elapsed / progress) * (100 - progress); - setUploadTimeRemaining(remaining); - } else if (progress >= 100) { - // Upload complete - switch to processing status - setUploadProgress(100); - setMetadataStatus('processing'); - setUploadTimeRemaining(null); - } - }, selectedMainBlend || undefined); - - // Store session ID for later use when creating the job - if (result.session_id) { - setUploadSessionId(result.session_id); - } - - // Upload and processing complete - metadata is in the response - setIsUploading(false); - setUploadProgress(100); - setUploadTimeRemaining(null); - uploadStartTimeRef.current = null; - - // Handle ZIP extraction results - multiple blend files found - if (result.status === 'select_blend' || (result.zip_extracted && result.blend_files && result.blend_files.length > 1)) { - setBlendFiles(result.blend_files || []); - setMetadataStatus('select_blend'); - return; - } - - // If metadata was extracted, use it - if (result.metadata_extracted && result.metadata) { - setMetadata(result.metadata); - setMetadataStatus('completed'); - isCompletedRef.current = true; - - // Auto-populate form fields - let normalizedFormat = result.metadata.render_settings?.output_format; - if (normalizedFormat) { - const formatMap = { - 'OPEN_EXR': 'EXR', - 'EXR': 'EXR', - 'PNG': 'PNG', - 'JPEG': 'JPEG', - 'JPG': 'JPEG', - }; - normalizedFormat = formatMap[normalizedFormat.toUpperCase()] || normalizedFormat; - } - - setFormData(prev => ({ - ...prev, - frame_start: result.metadata.frame_start || prev.frame_start, - frame_end: result.metadata.frame_end || prev.frame_end, - output_format: normalizedFormat || prev.output_format, - render_settings: result.metadata.render_settings ? { - ...result.metadata.render_settings, - engine_settings: result.metadata.render_settings.engine_settings || {}, - } : null, - blender_version: result.metadata.blender_version || prev.blender_version, - })); - } else { - setMetadataStatus('error'); - } - - // If main blend file was auto-detected or specified, continue - if (result.main_blend_file) { - setSelectedMainBlend(result.main_blend_file); - } - } catch (err) { - console.error('Failed to upload file and extract metadata:', err); - setMetadataStatus('error'); - setIsUploading(false); - setUploadProgress(0); - setUploadSessionId(null); - setUploadTimeRemaining(null); - uploadStartTimeRef.current = null; - setError(err.message || 'Failed to upload file and extract metadata'); - } - } - }; - - const handleBlendFileSelect = async () => { - if (!selectedMainBlend || !file) { - setError('Please select a main blend file'); - return; - } - - try { - setIsUploading(true); - setUploadProgress(0); - setUploadTimeRemaining(null); - uploadStartTimeRef.current = Date.now(); - setMetadataStatus('extracting'); - - // Re-upload with selected main blend file - const result = await jobs.uploadFileForJobCreation(file, (progress) => { - // Show upload progress during upload - setUploadProgress(progress); - // Calculate time remaining for upload progress - if (progress > 0 && progress < 100 && uploadStartTimeRef.current) { - const elapsed = (Date.now() - uploadStartTimeRef.current) / 1000; // seconds - const remaining = (elapsed / progress) * (100 - progress); - setUploadTimeRemaining(remaining); - } else if (progress >= 100) { - // Upload complete - switch to processing status - setUploadProgress(100); - setMetadataStatus('processing'); - setUploadTimeRemaining(null); - } - }, selectedMainBlend); - - setBlendFiles([]); - - // Store session ID - if (result.session_id) { - setUploadSessionId(result.session_id); - } - - // Upload and processing complete - metadata is in the response - setIsUploading(false); - - // If metadata was extracted, use it - if (result.metadata_extracted && result.metadata) { - setMetadata(result.metadata); - setMetadataStatus('completed'); - isCompletedRef.current = true; - - // Auto-populate form fields - let normalizedFormat = result.metadata.render_settings?.output_format; - if (normalizedFormat) { - const formatMap = { - 'OPEN_EXR': 'EXR', - 'EXR': 'EXR', - 'PNG': 'PNG', - 'JPEG': 'JPEG', - 'JPG': 'JPEG', - }; - normalizedFormat = formatMap[normalizedFormat.toUpperCase()] || normalizedFormat; - } - - setFormData(prev => ({ - ...prev, - frame_start: result.metadata.frame_start || prev.frame_start, - frame_end: result.metadata.frame_end || prev.frame_end, - output_format: normalizedFormat || prev.output_format, - render_settings: result.metadata.render_settings ? { - ...result.metadata.render_settings, - engine_settings: result.metadata.render_settings.engine_settings || {}, - } : null, - blender_version: result.metadata.blender_version || prev.blender_version, - })); - } else { - setMetadataStatus('error'); - } - } catch (err) { - console.error('Failed to upload with selected blend file:', err); - setError(err.message || 'Failed to upload with selected blend file'); - setIsUploading(false); - setMetadataStatus('error'); - setUploadProgress(0); - setUploadSessionId(null); - setUploadTimeRemaining(null); - uploadStartTimeRef.current = null; - } - }; - - const handleContinueToStep2 = () => { - if (metadataStatus === 'completed' || metadataStatus === 'error') { - // Check if there are missing addons - if so, go to addon step, otherwise skip to config - const hasMissingAddons = metadata?.missing_files_info?.missing_addons && - metadata.missing_files_info.missing_addons.length > 0; - if (hasMissingAddons) { - setStep(2); // Step 2 = missing addons - } else { - setStep(3); // Step 3 = configure & submit - } - } - }; - - const handleContinueToStep3 = () => { - setStep(3); // Continue from addons step to config step - }; - - const handleBackToStep1 = () => { - setStep(1); - }; - - const handleBackToStep2 = () => { - setStep(2); - }; - - const handleSubmit = async (e) => { - e.preventDefault(); - setError(''); - - // Check if there are missing files/addons and require confirmation - const hasMissingFiles = metadata?.missing_files_info?.has_missing && - metadata.missing_files_info.missing_addons && - metadata.missing_files_info.missing_addons.length > 0; - - if (hasMissingFiles && !confirmedMissingFiles) { - setError('Please confirm that you want to proceed with missing addons'); - return; - } - - setSubmitting(true); - - try { - if (!file) { - throw new Error('Please select a Blender file'); - } - - if (!uploadSessionId) { - throw new Error('File upload session not found. Please upload the file again.'); - } - - const frameStart = parseInt(formData.frame_start); - const frameEnd = parseInt(formData.frame_end); - - if (frameStart < 0) { - throw new Error('Frame start must be 0 or greater. Negative starting frames are not supported.'); - } - if (frameEnd < 0) { - throw new Error('Frame end must be 0 or greater. Negative frame numbers are not supported.'); - } - if (frameEnd < frameStart) { - throw new Error('Invalid frame range'); - } - - // Create render job with upload session ID if we have one - const renderSettings = formData.render_settings && formData.render_settings.engine_settings ? { - engine: formData.render_settings.engine || 'cycles', - resolution_x: formData.render_settings.resolution_x || 1920, - resolution_y: formData.render_settings.resolution_y || 1080, - frame_rate: formData.render_settings.frame_rate || (metadata?.render_settings?.frame_rate || 24.0), - engine_settings: formData.render_settings.engine_settings, - } : null; - - console.log('Submitting job with output_format:', formData.output_format, 'formatManuallyChanged:', formatManuallyChangedRef.current); - const job = await jobs.create({ - job_type: 'render', - name: formData.name, - frame_start: parseInt(formData.frame_start), - frame_end: parseInt(formData.frame_end), - output_format: formData.output_format, - render_settings: renderSettings, - upload_session_id: uploadSessionId || undefined, // Pass session ID to move context archive - unhide_objects: formData.unhide_objects || undefined, // Pass unhide toggle - enable_execution: formData.enable_execution || undefined, // Pass enable execution toggle - preserve_hdr: formData.preserve_hdr || undefined, // Pass preserve HDR toggle - preserve_alpha: formData.preserve_alpha || undefined, // Pass preserve alpha toggle - blender_version: formData.blender_version || undefined, // Pass Blender version override - }); - - // Fetch the full job details - const jobDetails = await jobs.get(job.id); - - // Set created job to show details - setCreatedJob(jobDetails); - } catch (err) { - const errorMessage = err.message || 'Failed to submit job'; - - // Check if this is a session expiry error - if (errorMessage.includes('upload session') || - errorMessage.includes('Context archive not found') || - errorMessage.includes('Please upload the file again')) { - // Reset the entire form - upload session has expired - setError('Your upload session has expired. Please upload your file again.'); - setFile(null); - setMetadata(null); - setMetadataStatus(null); - setUploadSessionId(null); - setStep(1); - setFormData({ - name: '', - frame_start: 1, - frame_end: 10, - output_format: 'PNG', - render_settings: null, - unhide_objects: false, - enable_execution: false, - blender_version: '', - preserve_hdr: false, - preserve_alpha: false, - }); - setShowAdvancedSettings(false); - formatManuallyChangedRef.current = false; - } else { - setError(errorMessage); - } - setSubmitting(false); - } - }; - - const handleCloseJobDetails = () => { - setCreatedJob(null); - // Reset form - setFormData({ - name: '', - frame_start: 1, - frame_end: 10, - output_format: 'PNG', - allow_parallel_runners: true, - render_settings: null, - unhide_objects: false, - enable_execution: false, - blender_version: '', - }); - setShowAdvancedSettings(false); - setFile(null); - setMetadata(null); - setMetadataStatus(null); - setCurrentJobId(null); - formatManuallyChangedRef.current = false; - setStep(1); - if (onSuccess) { - onSuccess(); - } - }; - - // If job was created, show job details - if (createdJob) { - return ( - jobs.get(createdJob.id).then(setCreatedJob)} - /> - ); - } - - return ( -
-
-

Submit New Job

-
- {/* Step 1: Upload */} -
= 1 ? 'text-orange-500 font-medium' : 'text-gray-500'}`}> -
= 1 ? 'bg-orange-600 text-white' : 'bg-gray-700'}`}> - {step > 1 ? '✓' : '1'} -
- Upload -
-
- {/* Step 2: Missing Addons (always shown, skipped if no addons) */} - {(() => { - const hasMissingAddons = metadata?.missing_files_info?.missing_addons && metadata.missing_files_info.missing_addons.length > 0; - const step2Completed = step > 2 || (step === 3 && !hasMissingAddons); - const step2Active = step === 2 || (step > 1 && hasMissingAddons && step < 3); - const step2Skipped = step >= 3 && !hasMissingAddons; - return ( -
-
- {step2Completed ? '✓' : step2Skipped ? '—' : '2'} -
- Addons -
- ); - })()} -
- {/* Step 3: Configure & Submit */} -
= 3 ? 'text-orange-500 font-medium' : 'text-gray-500'}`}> -
= 3 ? 'bg-orange-600 text-white' : 'bg-gray-700'}`}> - {step > 3 ? '✓' : '3'} -
- Configure -
-
-
- - - - {step === 1 ? ( - // Step 1: Upload file and extract metadata -
-
-
- - - -

- Notice: All files uploaded and generated will be deleted along with your job after 30 days unless you or an admin delete it earlier. -

-
-
-
- - - {blendFiles.length > 1 && ( -
-

- Multiple blend files found. Please select the main blend file: -

- - -
- )} - {(isUploading || metadataStatus === 'extracting' || metadataStatus === 'processing') && ( -
- {isUploading && uploadProgress < 100 ? ( -
-
- Uploading file... -
- {uploadTimeRemaining && ( - - ~{formatTimeRemaining(uploadTimeRemaining)} remaining - - )} - {Math.round(uploadProgress)}% -
-
-
-
-
-
- ) : metadataStatus === 'processing' ? ( -
-
- Processing file and extracting metadata... -
- ) : ( -
-
- Extracting metadata from blend file... -
- )} -
- )} - {metadataStatus === 'completed' && metadata && ( -
- {metadata.has_negative_frames && ( -
-
⚠️ Negative Frame Numbers Detected
-
- Your Blender file contains negative frame numbers (frame_start: {metadata.frame_start}, frame_end: {metadata.frame_end}). - Negative starting frames are not supported and may not work exactly as you expect. - Please adjust your Blender file's frame range settings to start at 0 or higher before submitting. -
-
- )} -
-
Metadata extracted successfully!
-
- {metadata.blender_version?.file_saved_with && ( -
- Blender Version: {metadata.blender_version.file_saved_with} - (saved with — render version may differ) -
- )} -
Frames: {metadata.frame_start} - {metadata.frame_end}
-
Resolution: {metadata.render_settings?.resolution_x} x {metadata.render_settings?.resolution_y}
-
Frame Rate: {metadata.render_settings?.frame_rate || 24} fps
-
Engine: {metadata.render_settings?.engine}
- {metadata.render_settings?.engine_settings?.samples && ( -
Cycles Samples: {metadata.render_settings.engine_settings.samples}
- )} - {metadata.render_settings?.engine_settings?.taa_render_samples && ( -
EEVEE Samples: {metadata.render_settings.engine_settings.taa_render_samples}
- )} -
- -
-
- )} - {metadataStatus === 'error' && ( -
-
Could not extract metadata. You can still continue and fill in the form manually.
- -
- )} -
-
- ) : step === 2 ? ( - // Step 2: Missing Addons (only shown if there are missing addons) -
- -
-
Missing Addons Detected
-
-

The following addons are required by this blend file but are not available on the render servers:

-
    - {metadata?.missing_files_info?.missing_addons?.map((addon, idx) => ( -
  • {addon}
  • - ))} -
-

- Note: The render may fail or produce unexpected results if these addons are required for rendering. - You can still proceed, but be aware that the output may not match your expectations. -

-
- -
-
- ) : ( - // Step 3: Configure and submit -
- -
- - setFormData({ ...formData, name: e.target.value })} - required - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent placeholder-gray-500" - placeholder="My Render Job" - /> -
- -
-
- EXPERIMENTAL -

- Frame range auto-detection may vary by Blender version or hardware. Verify these values match your blend file. -

-
-
-
- - setFormData({ ...formData, frame_start: e.target.value })} - required - min="0" - className="w-full px-4 py-2 bg-gray-900 border border-yellow-400/50 rounded-lg text-gray-100 focus:ring-2 focus:ring-yellow-500 focus:border-transparent" - /> - {formData.frame_start < 0 && ( -

Frame start must be 0 or greater. Negative frames are not supported.

- )} -
-
- - setFormData({ ...formData, frame_end: e.target.value })} - required - min={Math.max(0, formData.frame_start)} - className="w-full px-4 py-2 bg-gray-900 border border-yellow-400/50 rounded-lg text-gray-100 focus:ring-2 focus:ring-yellow-500 focus:border-transparent" - /> - {formData.frame_end < 0 && ( -

Frame end must be 0 or greater. Negative frames are not supported.

- )} -
-
-
- -
- - -
- -
- - -

- Override the Blender version used for rendering. Leave as auto-detect to use the version the file was saved with. -

-
- -
-
- setFormData({ ...formData, unhide_objects: e.target.checked })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-900 rounded" - /> - -
-
- -
-
- setFormData({ ...formData, enable_execution: e.target.checked })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-900 rounded" - /> - -
-
- - {(formData.output_format === 'EXR_264_MP4' || formData.output_format === 'EXR_AV1_MP4' || formData.output_format === 'EXR_VP9_WEBM') && ( - <> -
-

- Note: The preserve options below allow you to explicitly control HDR and alpha preservation. If autodetection finds HDR content or alpha channels in your EXR files, they will be automatically preserved even if these options are unchecked. Important: Alpha detection only checks the first frame, so if your render uses transparency later in the sequence, you should explicitly enable the preserve alpha option. HDR detection is not perfect and may miss some HDR content, so if you're certain your render contains HDR content, you should explicitly enable the preserve HDR option. -

-
-
-
- setFormData({ ...formData, preserve_hdr: e.target.checked })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-900 rounded" - /> - -
-
- - )} - - {(formData.output_format === 'EXR_AV1_MP4' || formData.output_format === 'EXR_VP9_WEBM') && ( -
-
- setFormData({ ...formData, preserve_alpha: e.target.checked })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-900 rounded" - /> - -
-
- )} - - {metadata && metadataStatus === 'completed' && ( - <> -
-
Metadata from blend file:
-
- {metadata.blender_version?.file_saved_with && ( -
- Blender Version: {metadata.blender_version.file_saved_with} - (saved with — render version may differ) -
- )} -
Frames: {metadata.frame_start} - {metadata.frame_end}
-
Resolution: {metadata.render_settings?.resolution_x} x {metadata.render_settings?.resolution_y}
-
Frame Rate: {metadata.render_settings?.frame_rate || 24} fps
-
Engine: {metadata.render_settings?.engine}
- {metadata.render_settings?.engine_settings?.samples && ( -
Samples: {metadata.render_settings.engine_settings.samples}
- )} - {metadata.render_settings?.engine_settings?.taa_render_samples && ( -
EEVEE Samples: {metadata.render_settings.engine_settings.taa_render_samples}
- )} -
-
- - {/* Missing Files/Addons Warning */} - {metadata.missing_files_info?.has_missing && - metadata.missing_files_info.missing_addons && - metadata.missing_files_info.missing_addons.length > 0 && ( -
-
⚠️ Missing Addons Detected
-
-

The following addons are required by this blend file but are not available:

-
    - {metadata.missing_files_info.missing_addons.map((addon, idx) => ( -
  • {addon}
  • - ))} -
-

Rendering may fail or produce incorrect results without these addons.

-
-
- setConfirmedMissingFiles(e.target.checked)} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-900 rounded" - /> - -
-
- )} - - )} - - {/* Advanced Render Settings */} - {formData.render_settings && ( -
- - - {showAdvancedSettings && ( -
- {/* Engine Selection */} -
- - -
- - {/* Resolution */} -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - resolution_x: parseInt(e.target.value) || 1920, - } - })} - min="1" - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - resolution_y: parseInt(e.target.value) || 1080, - } - })} - min="1" - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - {/* Frame Rate */} -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - frame_rate: parseFloat(e.target.value) || 24.0, - } - })} - min="0.01" - max="120" - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -

- Warning: Adjusting FPS will cause slow-motion/fast-motion effects and may make the video look laggy. - This only changes the playback speed - the number of rendered frames stays the same. -

-
- - {/* Cycles Settings */} - {formData.render_settings.engine === 'cycles' && formData.render_settings.engine_settings && ( -
-
Cycles Settings
- - {/* Sampling Section */} -
-
Sampling
- - {/* Noise Threshold */} -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_adaptive_sampling: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - adaptive_threshold: parseFloat(e.target.value) || 0.01, - } - } - })} - disabled={formData.render_settings.engine_settings.use_adaptive_sampling === false} - min="0" - max="1" - className="flex-1 px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent disabled:opacity-50" - /> -
- - {/* Max Samples */} -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - samples: parseInt(e.target.value) || 4096, - } - } - })} - min="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- - {/* Min Samples */} -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - adaptive_min_samples: parseInt(e.target.value) || 0, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- - {/* Time Limit */} -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - time_limit: parseFloat(e.target.value) || 0, - } - } - })} - min="0" - step="any" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_denoising: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- - {formData.render_settings.engine_settings.use_denoising && ( -
- {/* Denoiser Info */} -
- Using OpenImageDenoise (GPU agnostic) -
- - {/* Passes */} -
- - -
- - {/* Prefilter */} -
- - -
- - {/* Quality (OpenImageDenoise only) */} - {(formData.render_settings.engine_settings.denoiser || 'OPENIMAGEDENOISE') === 'OPENIMAGEDENOISE' && ( -
- - -
- )} - -
- )} -
- - {/* Sampling > Path Guiding */} -
-
Sampling › Path Guiding
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_guiding: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- - {formData.render_settings.engine_settings.use_guiding && ( -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - guiding_training_samples: parseInt(e.target.value) || 128, - } - } - })} - min="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_surface_guiding: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_volume_guiding: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- )} -
- - {/* Sampling > Lights */} -
-
Sampling › Lights
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_light_tree: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- -
- - { - const val = parseFloat(e.target.value); - const clampedVal = isNaN(val) ? 0.01 : Math.max(0, Math.min(1, val)); - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - light_sampling_threshold: clampedVal, - } - } - }); - }} - min="0" - max="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - {/* Light Paths Section - matches Blender's panel */} -
-
Light Paths › Max Bounces
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - max_bounces: parseInt(e.target.value) || 12, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - diffuse_bounces: parseInt(e.target.value) || 4, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - glossy_bounces: parseInt(e.target.value) || 4, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - transmission_bounces: parseInt(e.target.value) || 12, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - volume_bounces: parseInt(e.target.value) || 0, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - transparent_max_bounces: parseInt(e.target.value) || 8, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - {/* Light Paths > Clamping */} -
-
Light Paths › Clamping
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - sample_clamp_direct: parseFloat(e.target.value) || 0, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - sample_clamp_indirect: parseFloat(e.target.value) || 0, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
-
- - {/* Light Paths > Caustics */} -
-
Light Paths › Caustics
- -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - caustics_reflective: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - caustics_refractive: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - blur_glossy: parseFloat(e.target.value) || 0, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - {/* Light Paths > Fast GI */} -
-
Light Paths › Fast GI Approximation
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_fast_gi: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- - {formData.render_settings.engine_settings.use_fast_gi && ( -
-
- - -
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - ao_bounces: parseInt(e.target.value) || 1, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - ao_bounces_render: parseInt(e.target.value) || 1, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
-
- )} -
- - {/* Curves (Hair) Section - matches Blender's panel */} -
-
Curves
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_hair: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- - {formData.render_settings.engine_settings.use_hair !== false && ( -
-
- - -
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - hair_subdivisions: parseInt(e.target.value) || 2, - } - } - })} - min="0" - max="24" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- )} -
- - {/* Volumes Section */} -
-
Volumes
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - volume_step_rate: parseFloat(e.target.value) || 1.0, - } - } - })} - min="0.01" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - volume_max_steps: parseInt(e.target.value) || 1024, - } - } - })} - min="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
-
- - {/* Film Section */} -
-
Film
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - film_exposure: parseFloat(e.target.value) || 1.0, - } - } - })} - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - film_transparent: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - film_transparent_glass: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- -
- - -
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - filter_width: parseFloat(e.target.value) || 1.5, - } - } - })} - min="0.01" - max="10" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - {/* Performance Section */} -
-
Performance
- -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_auto_tile: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_persistent_data: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - tile_size: parseInt(e.target.value) || 2048, - } - } - })} - min="8" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - {/* Simplify Section */} -
-
Simplify
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_simplify: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- - {formData.render_settings.engine_settings.use_simplify && ( -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - simplify_subdivision_render: parseInt(e.target.value) || 6, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - simplify_child_particles_render: parseFloat(e.target.value) || 1.0, - } - } - })} - min="0" - max="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- )} -
-
- )} - - {/* EEVEE Settings */} - {formData.render_settings.engine === 'eevee' && formData.render_settings.engine_settings && ( -
-
EEVEE Settings
- - {/* Sampling Section */} -
-
Sampling
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - taa_render_samples: parseInt(e.target.value) || 64, - } - } - })} - min="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - taa_samples: parseInt(e.target.value) || 16, - } - } - })} - min="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_taa_reprojection: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- - {/* Sampling > Clamping */} -
-
Sampling › Clamping
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - clamp_surface_direct: parseFloat(e.target.value) || 0, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - clamp_surface_indirect: parseFloat(e.target.value) || 0, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
-
- - {/* Shadows Section */} -
-
Shadows
- -
-
- - -
-
- - -
-
- -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_soft_shadows: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_shadow_high_bitdepth: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- -
- - { - const val = parseFloat(e.target.value); - const clampedVal = isNaN(val) ? 0.01 : Math.max(0, Math.min(1, val)); - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - light_threshold: clampedVal, - } - } - }); - }} - min="0" - max="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - {/* Raytracing Section */} -
-
Raytracing
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_raytracing: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- - {formData.render_settings.engine_settings.use_raytracing && ( -
- - -
- )} -
- - {/* Screen Space Reflections Section */} -
-
Screen Space Reflections
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_ssr: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- - {formData.render_settings.engine_settings.use_ssr && ( -
-
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_ssr_refraction: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_ssr_halfres: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - ssr_quality: parseFloat(e.target.value) || 0.25, - } - } - })} - min="0" - max="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - ssr_max_roughness: parseFloat(e.target.value) || 0.5, - } - } - })} - min="0" - max="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - ssr_thickness: parseFloat(e.target.value) || 0.2, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - ssr_border_fade: parseFloat(e.target.value) || 0.075, - } - } - })} - min="0" - max="0.5" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
-
- )} -
- - {/* Ambient Occlusion Section */} -
-
Ambient Occlusion
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_gtao: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- - {formData.render_settings.engine_settings.use_gtao && ( -
-
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - gtao_distance: parseFloat(e.target.value) || 0.2, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - gtao_factor: parseFloat(e.target.value) || 1.0, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_gtao_bent_normals: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_gtao_bounce: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
-
- )} -
- - {/* Bloom Section */} -
-
Bloom
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_bloom: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- - {formData.render_settings.engine_settings.use_bloom && ( -
-
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - bloom_threshold: parseFloat(e.target.value) || 0.8, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - bloom_knee: parseFloat(e.target.value) || 0.5, - } - } - })} - min="0" - max="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - bloom_radius: parseFloat(e.target.value) || 6.5, - } - } - })} - min="0" - max="2048" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - bloom_intensity: parseFloat(e.target.value) || 0.05, - } - } - })} - min="0" - max="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - bloom_clamp: parseFloat(e.target.value) || 0, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- )} -
- - {/* Depth of Field Section */} -
-
Depth of Field
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - bokeh_max_size: parseFloat(e.target.value) || 100, - } - } - })} - min="0" - max="2048" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - bokeh_threshold: parseFloat(e.target.value) || 1.0, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - bokeh_neighbor_max: parseFloat(e.target.value) || 10, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - bokeh_denoise_fac: parseFloat(e.target.value) || 0.75, - } - } - })} - min="0" - max="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - bokeh_overblur: parseFloat(e.target.value) || 5, - } - } - })} - min="0" - max="100" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_bokeh_high_quality_slight_defocus: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_bokeh_jittered: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
-
- - {/* Subsurface Scattering Section */} -
-
Subsurface Scattering
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - sss_samples: parseInt(e.target.value) || 7, - } - } - })} - min="1" - max="32" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - sss_jitter_threshold: parseFloat(e.target.value) || 0.3, - } - } - })} - min="0" - max="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - {/* Volumetrics Section */} -
-
Volumetrics
- -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_volumetric_lights: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_volumetric_shadows: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - volumetric_start: parseFloat(e.target.value) || 0.1, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - volumetric_end: parseFloat(e.target.value) || 100, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- -
-
- - -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - volumetric_samples: parseInt(e.target.value) || 64, - } - } - })} - min="1" - max="256" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - volumetric_sample_distribution: parseFloat(e.target.value) || 0.8, - } - } - })} - min="0" - max="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - {/* Motion Blur Section */} -
-
Motion Blur
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_motion_blur: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- - {formData.render_settings.engine_settings.use_motion_blur && ( -
-
- - -
- -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - motion_blur_shutter: parseFloat(e.target.value) || 0.5, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - motion_blur_steps: parseInt(e.target.value) || 1, - } - } - })} - min="1" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
-
-
- )} -
- - {/* Indirect Lighting Section */} -
-
Indirect Lighting
- -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - gi_diffuse_bounces: parseInt(e.target.value) || 3, - } - } - })} - min="0" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- -
-
- - -
-
- - -
-
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - gi_auto_bake: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
-
- - {/* Film Section */} -
-
Film
- -
- setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - use_overscan: e.target.checked, - } - } - })} - className="h-4 w-4 text-orange-600 focus:ring-orange-500 border-gray-600 bg-gray-800 rounded" - /> - -
- - {formData.render_settings.engine_settings.use_overscan && ( -
- - setFormData({ - ...formData, - render_settings: { - ...formData.render_settings, - engine_settings: { - ...formData.render_settings.engine_settings, - overscan_size: parseFloat(e.target.value) || 3, - } - } - })} - min="0" - max="50" - className="w-full px-3 py-1.5 bg-gray-800 border border-gray-600 rounded text-gray-100 text-sm focus:ring-2 focus:ring-orange-500 focus:border-transparent" - /> -
- )} -
-
- )} -
- )} -
- )} - -
-
- Selected file: {file?.name} -
- {(isUploading || submitting) && ( -
-
- {isUploading ? 'Uploading file...' : 'Creating job...'} - {isUploading && ( -
- {uploadTimeRemaining && ( - - ~{formatTimeRemaining(uploadTimeRemaining)} remaining - - )} - {Math.round(uploadProgress)}% -
- )} -
- {isUploading && ( -
-
-
- )} -
- )} - - {metadata?.missing_files_info?.has_missing && !confirmedMissingFiles && ( -

- Please confirm that you want to proceed with missing addons -

- )} -
-
- )} -
- ); -} - diff --git a/web/src/components/Layout.jsx b/web/src/components/Layout.jsx deleted file mode 100644 index 8562b62..0000000 --- a/web/src/components/Layout.jsx +++ /dev/null @@ -1,76 +0,0 @@ -import { useAuth } from '../hooks/useAuth'; - -export default function Layout({ children, activeTab, onTabChange }) { - const { user, logout } = useAuth(); - const isAdmin = user?.is_admin || false; - - // Note: If user becomes null, App.jsx will handle showing Login component - // We don't need to redirect here as App.jsx already checks for !user - - return ( -
-
-
-
-

- JiggaBlend -

-
- {user?.name || user?.email} - -
-
-
-
- - - -
- {children} -
-
- ); -} - diff --git a/web/src/components/LoadingSpinner.jsx b/web/src/components/LoadingSpinner.jsx deleted file mode 100644 index 3a3d155..0000000 --- a/web/src/components/LoadingSpinner.jsx +++ /dev/null @@ -1,19 +0,0 @@ -import React from 'react'; - -/** - * Shared LoadingSpinner component with size variants - */ -export default function LoadingSpinner({ size = 'md', className = '', borderColor = 'border-orange-500' }) { - const sizeClasses = { - sm: 'h-8 w-8', - md: 'h-12 w-12', - lg: 'h-16 w-16', - }; - - return ( -
-
-
- ); -} - diff --git a/web/src/components/Login.jsx b/web/src/components/Login.jsx deleted file mode 100644 index 0ae2d35..0000000 --- a/web/src/components/Login.jsx +++ /dev/null @@ -1,277 +0,0 @@ -import { useState, useEffect } from 'react'; -import { auth } from '../utils/api'; -import ErrorMessage from './ErrorMessage'; - -export default function Login() { - const [providers, setProviders] = useState({ - google: false, - discord: false, - local: false, - }); - const [showRegister, setShowRegister] = useState(false); - const [email, setEmail] = useState(''); - const [name, setName] = useState(''); - const [username, setUsername] = useState(''); - const [password, setPassword] = useState(''); - const [confirmPassword, setConfirmPassword] = useState(''); - const [error, setError] = useState(''); - const [loading, setLoading] = useState(false); - - useEffect(() => { - checkAuthProviders(); - // Check for registration disabled error in URL - const urlParams = new URLSearchParams(window.location.search); - if (urlParams.get('error') === 'registration_disabled') { - setError('Registration is currently disabled. Please contact an administrator.'); - } - }, []); - - const checkAuthProviders = async () => { - try { - const result = await auth.getProviders(); - setProviders({ - google: result.google || false, - discord: result.discord || false, - local: result.local || false, - }); - } catch (error) { - // If endpoint fails, assume no providers are available - console.error('Failed to check auth providers:', error); - setProviders({ google: false, discord: false, local: false }); - } - }; - - const handleLocalLogin = async (e) => { - e.preventDefault(); - setError(''); - setLoading(true); - - try { - await auth.localLogin(username, password); - // Reload page to trigger auth check in App component - window.location.reload(); - } catch (err) { - setError(err.message || 'Login failed'); - setLoading(false); - } - }; - - const handleLocalRegister = async (e) => { - e.preventDefault(); - setError(''); - - if (password !== confirmPassword) { - setError('Passwords do not match'); - return; - } - - if (password.length < 8) { - setError('Password must be at least 8 characters long'); - return; - } - - setLoading(true); - - try { - await auth.localRegister(email, name, password); - // Reload page to trigger auth check in App component - window.location.reload(); - } catch (err) { - setError(err.message || 'Registration failed'); - setLoading(false); - } - }; - - return ( -
-
-
-

- JiggaBlend -

-

Blender Render Farm

-
- -
- - {providers.local && ( -
-
- - -
- - {!showRegister ? ( -
-
- - setUsername(e.target.value)} - required - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent placeholder-gray-500" - placeholder="Enter your email" - /> -
-
- - setPassword(e.target.value)} - required - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent placeholder-gray-500" - placeholder="Enter password" - /> -
- -
- ) : ( -
-
- - setEmail(e.target.value)} - required - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent placeholder-gray-500" - placeholder="Enter your email" - /> -
-
- - setName(e.target.value)} - required - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent placeholder-gray-500" - placeholder="Enter your name" - /> -
-
- - setPassword(e.target.value)} - required - minLength={8} - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent placeholder-gray-500" - placeholder="At least 8 characters" - /> -
-
- - setConfirmPassword(e.target.value)} - required - minLength={8} - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent placeholder-gray-500" - placeholder="Confirm password" - /> -
- -
- )} -
- )} - - {providers.google && ( - - - - - - - - Continue with Google - - )} - - {providers.discord && ( - - - - - Continue with Discord - - )} - - {!providers.google && !providers.discord && !providers.local && ( -
- No authentication methods are configured. Please contact an administrator. -
- )} -
-
-
- ); -} - diff --git a/web/src/components/PasswordChange.jsx b/web/src/components/PasswordChange.jsx deleted file mode 100644 index 1b76c93..0000000 --- a/web/src/components/PasswordChange.jsx +++ /dev/null @@ -1,137 +0,0 @@ -import { useState } from 'react'; -import { auth } from '../utils/api'; -import ErrorMessage from './ErrorMessage'; -import { useAuth } from '../hooks/useAuth'; - -export default function PasswordChange({ targetUserId = null, targetUserName = null, onSuccess }) { - const { user } = useAuth(); - const [oldPassword, setOldPassword] = useState(''); - const [newPassword, setNewPassword] = useState(''); - const [confirmPassword, setConfirmPassword] = useState(''); - const [error, setError] = useState(''); - const [success, setSuccess] = useState(''); - const [loading, setLoading] = useState(false); - - const isAdmin = user?.is_admin || false; - const isChangingOtherUser = targetUserId !== null && isAdmin; - - const handleSubmit = async (e) => { - e.preventDefault(); - setError(''); - setSuccess(''); - - if (newPassword !== confirmPassword) { - setError('New passwords do not match'); - return; - } - - if (newPassword.length < 8) { - setError('Password must be at least 8 characters long'); - return; - } - - if (!isChangingOtherUser && !oldPassword) { - setError('Old password is required'); - return; - } - - setLoading(true); - - try { - await auth.changePassword( - isChangingOtherUser ? null : oldPassword, - newPassword, - isChangingOtherUser ? targetUserId : null - ); - setSuccess('Password changed successfully'); - setOldPassword(''); - setNewPassword(''); - setConfirmPassword(''); - if (onSuccess) { - setTimeout(() => { - onSuccess(); - }, 1500); - } - } catch (err) { - setError(err.message || 'Failed to change password'); - } finally { - setLoading(false); - } - }; - - return ( -
-

- {isChangingOtherUser ? `Change Password for ${targetUserName || 'User'}` : 'Change Password'} -

- - - - {success && ( -
- {success} -
- )} - -
- {!isChangingOtherUser && ( -
- - setOldPassword(e.target.value)} - required - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent" - placeholder="Enter current password" - /> -
- )} - -
- - setNewPassword(e.target.value)} - required - minLength={8} - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent" - placeholder="At least 8 characters" - /> -
- -
- - setConfirmPassword(e.target.value)} - required - minLength={8} - className="w-full px-4 py-2 bg-gray-900 border border-gray-600 rounded-lg text-gray-100 focus:ring-2 focus:ring-orange-500 focus:border-transparent" - placeholder="Confirm new password" - /> -
- - -
-
- ); -} - diff --git a/web/src/components/UserJobs.jsx b/web/src/components/UserJobs.jsx deleted file mode 100644 index 8054e14..0000000 --- a/web/src/components/UserJobs.jsx +++ /dev/null @@ -1,179 +0,0 @@ -import { useState, useEffect, useRef } from 'react'; -import { admin, normalizeArrayResponse } from '../utils/api'; -import { wsManager } from '../utils/websocket'; -import JobDetails from './JobDetails'; -import LoadingSpinner from './LoadingSpinner'; - -export default function UserJobs({ userId, userName, onBack }) { - const [jobList, setJobList] = useState([]); - const [loading, setLoading] = useState(true); - const [selectedJob, setSelectedJob] = useState(null); - const listenerIdRef = useRef(null); - - useEffect(() => { - loadJobs(); - // Use shared WebSocket manager for real-time updates instead of polling - listenerIdRef.current = wsManager.subscribe(`userjobs_${userId}`, { - open: () => { - console.log('UserJobs: Shared WebSocket connected'); - loadJobs(); - }, - message: (data) => { - // Handle jobs channel messages (always broadcasted) - if (data.channel === 'jobs') { - if (data.type === 'job_update' && data.data) { - // Update job in list if it belongs to this user - setJobList(prev => { - const prevArray = Array.isArray(prev) ? prev : []; - const index = prevArray.findIndex(j => j.id === data.job_id); - if (index >= 0) { - const updated = [...prevArray]; - updated[index] = { ...updated[index], ...data.data }; - return updated; - } - // If job not in current list, reload to get updated list - if (data.data.status === 'completed' || data.data.status === 'failed') { - loadJobs(); - } - return prevArray; - }); - } else if (data.type === 'job_created' && data.data) { - // New job created - reload to check if it belongs to this user - loadJobs(); - } - } - }, - error: (error) => { - console.error('UserJobs: Shared WebSocket error:', error); - }, - close: (event) => { - console.log('UserJobs: Shared WebSocket closed:', event); - } - }); - - // Ensure connection is established - wsManager.connect(); - - return () => { - if (listenerIdRef.current) { - wsManager.unsubscribe(listenerIdRef.current); - listenerIdRef.current = null; - } - }; - }, [userId]); - - const loadJobs = async () => { - try { - const data = await admin.getUserJobs(userId); - setJobList(normalizeArrayResponse(data)); - } catch (error) { - console.error('Failed to load jobs:', error); - setJobList([]); - } finally { - setLoading(false); - } - }; - - const getStatusColor = (status) => { - const colors = { - pending: 'bg-yellow-400/20 text-yellow-400', - running: 'bg-orange-400/20 text-orange-400', - completed: 'bg-green-400/20 text-green-400', - failed: 'bg-red-400/20 text-red-400', - cancelled: 'bg-gray-500/20 text-gray-400', - }; - return colors[status] || colors.pending; - }; - - if (selectedJob) { - return ( - setSelectedJob(null)} - onUpdate={loadJobs} - /> - ); - } - - if (loading) { - return ; - } - - return ( -
-
-
- -

Jobs for {userName}

-
-
- - {jobList.length === 0 ? ( -
-

No jobs found for this user.

-
- ) : ( -
- {jobList.map((job) => ( -
-
-
-

{job.name}

-

- {job.job_type === 'render' && job.frame_start !== null && job.frame_end !== null - ? `Frames ${job.frame_start}-${job.frame_end}` - : 'Metadata extraction'} -

-
- - {job.status} - -
- - {job.status === 'running' && ( -
-
- Progress - {Math.round(job.progress)}% -
-
-
-
-
- )} - -
-
- Created: {new Date(job.created_at).toLocaleString()} -
- -
-
- ))} -
- )} -
- ); -} - diff --git a/web/src/components/VideoPlayer.jsx b/web/src/components/VideoPlayer.jsx deleted file mode 100644 index 3c23f0c..0000000 --- a/web/src/components/VideoPlayer.jsx +++ /dev/null @@ -1,90 +0,0 @@ -import { useState, useRef, useEffect } from 'react'; -import ErrorMessage from './ErrorMessage'; -import LoadingSpinner from './LoadingSpinner'; - -export default function VideoPlayer({ videoUrl, onClose }) { - const videoRef = useRef(null); - const [loading, setLoading] = useState(true); - const [error, setError] = useState(null); - - useEffect(() => { - const video = videoRef.current; - if (!video || !videoUrl) return; - - const handleCanPlay = () => { - setLoading(false); - }; - - const handleError = (e) => { - console.error('Video playback error:', e, video.error); - // Get more detailed error information - let errorMsg = 'Failed to load video'; - if (video.error) { - switch (video.error.code) { - case video.error.MEDIA_ERR_ABORTED: - errorMsg = 'Video loading aborted'; - break; - case video.error.MEDIA_ERR_NETWORK: - errorMsg = 'Network error while loading video'; - break; - case video.error.MEDIA_ERR_DECODE: - errorMsg = 'Video decoding error'; - break; - case video.error.MEDIA_ERR_SRC_NOT_SUPPORTED: - errorMsg = 'Video format not supported'; - break; - } - } - setError(errorMsg); - setLoading(false); - }; - - const handleLoadStart = () => { - setLoading(true); - setError(null); - }; - - video.addEventListener('canplay', handleCanPlay); - video.addEventListener('error', handleError); - video.addEventListener('loadstart', handleLoadStart); - - return () => { - video.removeEventListener('canplay', handleCanPlay); - video.removeEventListener('error', handleError); - video.removeEventListener('loadstart', handleLoadStart); - }; - }, [videoUrl]); - - if (error) { - return ( - - ); - } - - return ( -
- {loading && ( -
- -
- )} - -
- ); -} - diff --git a/web/src/hooks/useAuth.js b/web/src/hooks/useAuth.js deleted file mode 100644 index 78b5c82..0000000 --- a/web/src/hooks/useAuth.js +++ /dev/null @@ -1,88 +0,0 @@ -import { useState, useEffect, useRef } from 'react'; -import { auth, setAuthErrorHandler } from '../utils/api'; - -export function useAuth() { - const [user, setUser] = useState(null); - const [loading, setLoading] = useState(true); - const userRef = useRef(user); - - // Keep userRef in sync with user state - useEffect(() => { - userRef.current = user; - }, [user]); - - const checkAuth = async () => { - try { - const userData = await auth.getMe(); - setUser(userData); - setLoading(false); - return userData; // Return user data for verification - } catch (error) { - // If 401/403, user is not authenticated - // Check if it's an auth error - if (error.message && (error.message.includes('Unauthorized') || error.message.includes('401') || error.message.includes('403'))) { - setUser(null); - setLoading(false); - throw error; // Re-throw to allow caller to handle - } else { - // Other errors (network, etc.) - don't log out, just re-throw - // This prevents network issues from logging users out - setLoading(false); - throw error; // Re-throw to allow caller to handle - } - } - }; - - const logout = async () => { - try { - await auth.logout(); - } catch (error) { - console.error('Logout error:', error); - } finally { - // Refresh the page to show login - window.location.reload(); - } - }; - - useEffect(() => { - // Set up global auth error handler - setAuthErrorHandler(() => { - setUser(null); - setLoading(false); - }); - - // Listen for auth errors from API calls - const handleAuthErrorEvent = () => { - setUser(null); - setLoading(false); - }; - window.addEventListener('auth-error', handleAuthErrorEvent); - - // Initial auth check - checkAuth(); - - // Periodic auth check every 10 seconds - const authInterval = setInterval(() => { - // Use ref to check current user state without dependency - if (userRef.current) { - // Only check if we have a user (don't spam when logged out) - checkAuth().catch((error) => { - // Only log out if it's actually an auth error, not a network error - // Network errors shouldn't log the user out - if (error.message && (error.message.includes('Unauthorized') || error.message.includes('401') || error.message.includes('403'))) { - // This is a real auth error - user will be set to null by checkAuth - } - // For other errors (network, etc.), don't log out - just silently fail - }); - } - }, 10000); // 10 seconds - - return () => { - window.removeEventListener('auth-error', handleAuthErrorEvent); - clearInterval(authInterval); - }; - }, []); // Empty deps - only run on mount/unmount - - return { user, loading, logout, refresh: checkAuth }; -} - diff --git a/web/src/main.jsx b/web/src/main.jsx deleted file mode 100644 index 4813ff5..0000000 --- a/web/src/main.jsx +++ /dev/null @@ -1,10 +0,0 @@ -import React from 'react' -import ReactDOM from 'react-dom/client' -import App from './App.jsx' - -ReactDOM.createRoot(document.getElementById('root')).render( - - - , -) - diff --git a/web/src/styles/index.css b/web/src/styles/index.css deleted file mode 100644 index c3f8e8f..0000000 --- a/web/src/styles/index.css +++ /dev/null @@ -1,14 +0,0 @@ -@tailwind base; -@tailwind components; -@tailwind utilities; - -body { - margin: 0; - font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', - 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', - sans-serif; - -webkit-font-smoothing: antialiased; - -moz-osx-font-smoothing: grayscale; - @apply bg-gray-900 text-gray-100; -} - diff --git a/web/src/utils/api.js b/web/src/utils/api.js deleted file mode 100644 index c22665c..0000000 --- a/web/src/utils/api.js +++ /dev/null @@ -1,552 +0,0 @@ -const API_BASE = '/api'; - -// Global auth error handler - will be set by useAuth hook -let onAuthError = null; - -// Request debouncing and deduplication -const pendingRequests = new Map(); // key: endpoint+params, value: Promise -const requestQueue = new Map(); // key: endpoint+params, value: { resolve, reject, timestamp } -const DEBOUNCE_DELAY = 100; // 100ms debounce delay -const DEDUPE_WINDOW = 5000; // 5 seconds - same request within this window uses cached promise - -// Generate cache key from endpoint and params -function getCacheKey(endpoint, options = {}) { - const params = new URLSearchParams(); - Object.keys(options).sort().forEach(key => { - if (options[key] !== undefined && options[key] !== null) { - params.append(key, String(options[key])); - } - }); - const query = params.toString(); - return `${endpoint}${query ? '?' + query : ''}`; -} - -// Utility function to normalize array responses (handles both old and new formats) -export function normalizeArrayResponse(response) { - const data = response?.data || response; - return Array.isArray(data) ? data : []; -} - -// Sentinel value to indicate a request was superseded (instead of rejecting) -// Export it so components can check for it -export const REQUEST_SUPERSEDED = Symbol('REQUEST_SUPERSEDED'); - -// Debounced request wrapper -function debounceRequest(key, requestFn, delay = DEBOUNCE_DELAY) { - return new Promise((resolve, reject) => { - // Check if there's a pending request for this key - if (pendingRequests.has(key)) { - const pending = pendingRequests.get(key); - // If request is very recent (within dedupe window), reuse it - const now = Date.now(); - if (pending.timestamp && (now - pending.timestamp) < DEDUPE_WINDOW) { - pending.promise.then(resolve).catch(reject); - return; - } else { - // Request is older than dedupe window - remove it and create new one - pendingRequests.delete(key); - } - } - - // Clear any existing timeout for this key - if (requestQueue.has(key)) { - const queued = requestQueue.get(key); - clearTimeout(queued.timeout); - // Resolve with sentinel value instead of rejecting - this prevents errors from propagating - // The new request will handle the actual response - queued.resolve(REQUEST_SUPERSEDED); - } - - // Queue new request - const timeout = setTimeout(() => { - requestQueue.delete(key); - const promise = requestFn(); - const timestamp = Date.now(); - pendingRequests.set(key, { promise, timestamp }); - - promise - .then(result => { - pendingRequests.delete(key); - resolve(result); - }) - .catch(error => { - pendingRequests.delete(key); - reject(error); - }); - }, delay); - - requestQueue.set(key, { resolve, reject, timeout }); - }); -} - -export const setAuthErrorHandler = (handler) => { - onAuthError = handler; -}; - -// Whitelist of endpoints that should NOT trigger auth error handling -// These are endpoints that can legitimately return 401/403 without meaning the user is logged out -const AUTH_CHECK_ENDPOINTS = ['/auth/me', '/auth/logout']; - -const handleAuthError = (response, endpoint) => { - if (response.status === 401 || response.status === 403) { - // Don't trigger auth error handler for endpoints that check auth status - if (AUTH_CHECK_ENDPOINTS.includes(endpoint)) { - return; - } - // Trigger auth error handler if set (this will clear user state) - if (onAuthError) { - onAuthError(); - } - // Force a re-check of auth status to ensure login is shown - // This ensures the App component re-renders with user=null - if (typeof window !== 'undefined') { - // Dispatch a custom event that useAuth can listen to - window.dispatchEvent(new CustomEvent('auth-error')); - } - } -}; - -// Extract error message from response - centralized to avoid duplication -async function extractErrorMessage(response) { - try { - const errorData = await response.json(); - return errorData?.error || response.statusText; - } catch { - return response.statusText; - } -} - -export const api = { - async get(endpoint, options = {}) { - const abortController = options.signal || new AbortController(); - const response = await fetch(`${API_BASE}${endpoint}`, { - credentials: 'include', // Include cookies for session - signal: abortController.signal, - }); - if (!response.ok) { - // Handle auth errors before parsing response - handleAuthError(response, endpoint); - const errorMessage = await extractErrorMessage(response); - throw new Error(errorMessage); - } - return response.json(); - }, - - async post(endpoint, data, options = {}) { - const abortController = options.signal || new AbortController(); - const response = await fetch(`${API_BASE}${endpoint}`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(data), - credentials: 'include', // Include cookies for session - signal: abortController.signal, - }); - if (!response.ok) { - // Handle auth errors before parsing response - handleAuthError(response, endpoint); - const errorMessage = await extractErrorMessage(response); - throw new Error(errorMessage); - } - return response.json(); - }, - - async patch(endpoint, data, options = {}) { - const abortController = options.signal || new AbortController(); - const response = await fetch(`${API_BASE}${endpoint}`, { - method: 'PATCH', - headers: { 'Content-Type': 'application/json' }, - body: data ? JSON.stringify(data) : undefined, - credentials: 'include', // Include cookies for session - signal: abortController.signal, - }); - if (!response.ok) { - // Handle auth errors before parsing response - handleAuthError(response, endpoint); - const errorMessage = await extractErrorMessage(response); - throw new Error(errorMessage); - } - return response.json(); - }, - - async delete(endpoint, options = {}) { - const abortController = options.signal || new AbortController(); - const response = await fetch(`${API_BASE}${endpoint}`, { - method: 'DELETE', - credentials: 'include', // Include cookies for session - signal: abortController.signal, - }); - if (!response.ok) { - // Handle auth errors before parsing response - handleAuthError(response, endpoint); - const errorMessage = await extractErrorMessage(response); - throw new Error(errorMessage); - } - return response.json(); - }, - - async uploadFile(endpoint, file, onProgress, mainBlendFile) { - return new Promise((resolve, reject) => { - const formData = new FormData(); - formData.append('file', file); - if (mainBlendFile) { - formData.append('main_blend_file', mainBlendFile); - } - - const xhr = new XMLHttpRequest(); - - // Track upload progress - if (onProgress) { - xhr.upload.addEventListener('progress', (e) => { - if (e.lengthComputable) { - const percentComplete = (e.loaded / e.total) * 100; - onProgress(percentComplete); - } - }); - } - - xhr.addEventListener('load', () => { - if (xhr.status >= 200 && xhr.status < 300) { - try { - const response = JSON.parse(xhr.responseText); - resolve(response); - } catch (err) { - resolve(xhr.responseText); - } - } else { - // Handle auth errors - if (xhr.status === 401 || xhr.status === 403) { - handleAuthError({ status: xhr.status }, endpoint); - } - try { - const errorData = JSON.parse(xhr.responseText); - reject(new Error(errorData.error || xhr.statusText)); - } catch { - reject(new Error(xhr.statusText)); - } - } - }); - - xhr.addEventListener('error', () => { - reject(new Error('Upload failed')); - }); - - xhr.addEventListener('abort', () => { - reject(new Error('Upload aborted')); - }); - - xhr.open('POST', `${API_BASE}${endpoint}`); - xhr.withCredentials = true; // Include cookies for session - xhr.send(formData); - }); - }, -}; - -export const auth = { - async getMe() { - return api.get('/auth/me'); - }, - - async logout() { - return api.post('/auth/logout'); - }, - - async getProviders() { - return api.get('/auth/providers'); - }, - - async isLocalLoginAvailable() { - return api.get('/auth/local/available'); - }, - - async localRegister(email, name, password) { - return api.post('/auth/local/register', { email, name, password }); - }, - - async localLogin(username, password) { - return api.post('/auth/local/login', { username, password }); - }, - - async changePassword(oldPassword, newPassword, targetUserId = null) { - const body = { old_password: oldPassword, new_password: newPassword }; - if (targetUserId !== null) { - body.target_user_id = targetUserId; - } - return api.post('/auth/change-password', body); - }, -}; - -export const jobs = { - async list(options = {}) { - const key = getCacheKey('/jobs', options); - return debounceRequest(key, () => { - const params = new URLSearchParams(); - if (options.limit) params.append('limit', options.limit.toString()); - if (options.offset) params.append('offset', options.offset.toString()); - if (options.status) params.append('status', options.status); - if (options.sort) params.append('sort', options.sort); - const query = params.toString(); - return api.get(`/jobs${query ? '?' + query : ''}`); - }); - }, - - async listSummary(options = {}) { - const key = getCacheKey('/jobs/summary', options); - return debounceRequest(key, () => { - const params = new URLSearchParams(); - if (options.limit) params.append('limit', options.limit.toString()); - if (options.offset) params.append('offset', options.offset.toString()); - if (options.status) params.append('status', options.status); - if (options.sort) params.append('sort', options.sort); - const query = params.toString(); - return api.get(`/jobs/summary${query ? '?' + query : ''}`, options); - }); - }, - - async get(id, options = {}) { - const key = getCacheKey(`/jobs/${id}`, options); - return debounceRequest(key, async () => { - if (options.etag) { - // Include ETag in request headers for conditional requests - const headers = { 'If-None-Match': options.etag }; - const response = await fetch(`${API_BASE}/jobs/${id}`, { - credentials: 'include', - headers, - }); - if (response.status === 304) { - return null; // Not modified - } - if (!response.ok) { - const errorData = await response.json().catch(() => null); - throw new Error(errorData?.error || response.statusText); - } - return response.json(); - } - return api.get(`/jobs/${id}`, options); - }); - }, - - async create(jobData) { - return api.post('/jobs', jobData); - }, - - async cancel(id) { - return api.delete(`/jobs/${id}`); - }, - - async delete(id) { - return api.post(`/jobs/${id}/delete`); - }, - - async uploadFile(jobId, file, onProgress, mainBlendFile) { - return api.uploadFile(`/jobs/${jobId}/upload`, file, onProgress, mainBlendFile); - }, - - async uploadFileForJobCreation(file, onProgress, mainBlendFile) { - return api.uploadFile(`/jobs/upload`, file, onProgress, mainBlendFile); - }, - - async getFiles(jobId, options = {}) { - const key = getCacheKey(`/jobs/${jobId}/files`, options); - return debounceRequest(key, () => { - const params = new URLSearchParams(); - if (options.limit) params.append('limit', options.limit.toString()); - if (options.offset) params.append('offset', options.offset.toString()); - if (options.file_type) params.append('file_type', options.file_type); - if (options.extension) params.append('extension', options.extension); - const query = params.toString(); - return api.get(`/jobs/${jobId}/files${query ? '?' + query : ''}`, options); - }); - }, - - async getFilesCount(jobId, options = {}) { - const key = getCacheKey(`/jobs/${jobId}/files/count`, options); - return debounceRequest(key, () => { - const params = new URLSearchParams(); - if (options.file_type) params.append('file_type', options.file_type); - const query = params.toString(); - return api.get(`/jobs/${jobId}/files/count${query ? '?' + query : ''}`); - }); - }, - - async getContextArchive(jobId, options = {}) { - return api.get(`/jobs/${jobId}/context`, options); - }, - - downloadFile(jobId, fileId) { - return `${API_BASE}/jobs/${jobId}/files/${fileId}/download`; - }, - - previewEXR(jobId, fileId) { - return `${API_BASE}/jobs/${jobId}/files/${fileId}/preview-exr`; - }, - - getVideoUrl(jobId) { - return `${API_BASE}/jobs/${jobId}/video`; - }, - - async getTaskLogs(jobId, taskId, options = {}) { - const key = getCacheKey(`/jobs/${jobId}/tasks/${taskId}/logs`, options); - return debounceRequest(key, async () => { - const params = new URLSearchParams(); - if (options.stepName) params.append('step_name', options.stepName); - if (options.logLevel) params.append('log_level', options.logLevel); - if (options.limit) params.append('limit', options.limit.toString()); - if (options.sinceId) params.append('since_id', options.sinceId.toString()); - const query = params.toString(); - const result = await api.get(`/jobs/${jobId}/tasks/${taskId}/logs${query ? '?' + query : ''}`, options); - // Handle both old format (array) and new format (object with logs, last_id, limit) - if (Array.isArray(result)) { - return { logs: result, last_id: result.length > 0 ? result[result.length - 1].id : 0, limit: options.limit || 100 }; - } - return result; - }); - }, - - async getTaskSteps(jobId, taskId, options = {}) { - return api.get(`/jobs/${jobId}/tasks/${taskId}/steps`, options); - }, - - // New unified client WebSocket - DEPRECATED: Use wsManager from websocket.js instead - // This is kept for backwards compatibility but should not be used - streamClientWebSocket() { - console.warn('streamClientWebSocket() is deprecated - use wsManager from websocket.js instead'); - const wsProtocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; - const wsHost = window.location.host; - const url = `${wsProtocol}//${wsHost}${API_BASE}/ws`; - return new WebSocket(url); - }, - - // Old WebSocket methods (to be removed after migration) - streamTaskLogsWebSocket(jobId, taskId, lastId = 0) { - // Convert HTTP to WebSocket URL - const wsProtocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; - const wsHost = window.location.host; - const url = `${wsProtocol}//${wsHost}${API_BASE}/jobs/${jobId}/tasks/${taskId}/logs/ws?last_id=${lastId}`; - return new WebSocket(url); - }, - - streamJobsWebSocket() { - const wsProtocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; - const wsHost = window.location.host; - const url = `${wsProtocol}//${wsHost}${API_BASE}/jobs/ws-old`; - return new WebSocket(url); - }, - - streamJobWebSocket(jobId) { - const wsProtocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; - const wsHost = window.location.host; - const url = `${wsProtocol}//${wsHost}${API_BASE}/jobs/${jobId}/ws`; - return new WebSocket(url); - }, - - async retryTask(jobId, taskId) { - return api.post(`/jobs/${jobId}/tasks/${taskId}/retry`); - }, - - async getMetadata(jobId) { - return api.get(`/jobs/${jobId}/metadata`); - }, - - async getTasks(jobId, options = {}) { - const key = getCacheKey(`/jobs/${jobId}/tasks`, options); - return debounceRequest(key, () => { - const params = new URLSearchParams(); - if (options.limit) params.append('limit', options.limit.toString()); - if (options.offset) params.append('offset', options.offset.toString()); - if (options.status) params.append('status', options.status); - if (options.frameStart) params.append('frame_start', options.frameStart.toString()); - if (options.frameEnd) params.append('frame_end', options.frameEnd.toString()); - if (options.sort) params.append('sort', options.sort); - const query = params.toString(); - return api.get(`/jobs/${jobId}/tasks${query ? '?' + query : ''}`, options); - }); - }, - - async getTasksSummary(jobId, options = {}) { - const key = getCacheKey(`/jobs/${jobId}/tasks/summary`, options); - return debounceRequest(key, () => { - const params = new URLSearchParams(); - if (options.limit) params.append('limit', options.limit.toString()); - if (options.offset) params.append('offset', options.offset.toString()); - if (options.status) params.append('status', options.status); - if (options.sort) params.append('sort', options.sort); - const query = params.toString(); - return api.get(`/jobs/${jobId}/tasks/summary${query ? '?' + query : ''}`, options); - }); - }, - - async batchGetJobs(jobIds) { - // Sort jobIds for consistent cache key - const sortedIds = [...jobIds].sort((a, b) => a - b); - const key = getCacheKey('/jobs/batch', { job_ids: sortedIds.join(',') }); - return debounceRequest(key, () => { - return api.post('/jobs/batch', { job_ids: jobIds }); - }); - }, - - async batchGetTasks(jobId, taskIds) { - // Sort taskIds for consistent cache key - const sortedIds = [...taskIds].sort((a, b) => a - b); - const key = getCacheKey(`/jobs/${jobId}/tasks/batch`, { task_ids: sortedIds.join(',') }); - return debounceRequest(key, () => { - return api.post(`/jobs/${jobId}/tasks/batch`, { task_ids: taskIds }); - }); - }, -}; - -export const runners = { - // Non-admin runner list removed - use admin.listRunners() instead -}; - -export const admin = { - async generateAPIKey(name, description, scope) { - const data = { name, scope }; - if (description) data.description = description; - return api.post('/admin/runners/api-keys', data); - }, - - async listAPIKeys() { - return api.get('/admin/runners/api-keys'); - }, - - async revokeAPIKey(keyId) { - return api.patch(`/admin/runners/api-keys/${keyId}/revoke`); - }, - - async deleteAPIKey(keyId) { - return api.delete(`/admin/runners/api-keys/${keyId}`); - }, - - async listRunners() { - return api.get('/admin/runners'); - }, - - async verifyRunner(runnerId) { - return api.post(`/admin/runners/${runnerId}/verify`); - }, - - async deleteRunner(runnerId) { - return api.delete(`/admin/runners/${runnerId}`); - }, - - async listUsers() { - return api.get('/admin/users'); - }, - - async getUserJobs(userId) { - return api.get(`/admin/users/${userId}/jobs`); - }, - - async setUserAdminStatus(userId, isAdmin) { - return api.post(`/admin/users/${userId}/admin`, { is_admin: isAdmin }); - }, - - async getRegistrationEnabled() { - return api.get('/admin/settings/registration'); - }, - - async setRegistrationEnabled(enabled) { - return api.post('/admin/settings/registration', { enabled }); - }, -}; - diff --git a/web/src/utils/websocket.js b/web/src/utils/websocket.js deleted file mode 100644 index 223b802..0000000 --- a/web/src/utils/websocket.js +++ /dev/null @@ -1,271 +0,0 @@ -// Shared WebSocket connection manager -// All components should use this instead of creating their own connections - -class WebSocketManager { - constructor() { - this.ws = null; - this.listeners = new Map(); // Map of listener IDs to callback functions - this.reconnectTimeout = null; - this.reconnectDelay = 2000; - this.isConnecting = false; - this.listenerIdCounter = 0; - this.verboseLogging = false; // Set to true to enable verbose WebSocket logging - - // Track server-side channel subscriptions for re-subscription on reconnect - this.serverSubscriptions = new Set(); // Channels we want to be subscribed to - this.confirmedSubscriptions = new Set(); // Channels confirmed by server - this.pendingSubscriptions = new Set(); // Channels waiting for confirmation - } - - connect() { - // If already connected or connecting, don't create a new connection - if (this.ws && (this.ws.readyState === WebSocket.CONNECTING || this.ws.readyState === WebSocket.OPEN)) { - return; - } - - if (this.isConnecting) { - return; - } - - this.isConnecting = true; - - try { - const wsProtocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; - const wsHost = window.location.host; - const API_BASE = '/api'; - const url = `${wsProtocol}//${wsHost}${API_BASE}/jobs/ws`; - - this.ws = new WebSocket(url); - - this.ws.onopen = () => { - if (this.verboseLogging) { - console.log('Shared WebSocket connected'); - } - this.isConnecting = false; - - // Re-subscribe to all channels that were previously subscribed - this.resubscribeToChannels(); - - this.notifyListeners('open', {}); - }; - - this.ws.onmessage = (event) => { - try { - const data = JSON.parse(event.data); - if (this.verboseLogging) { - console.log('WebSocketManager: Message received:', data.type, data.channel || 'no channel', data); - } - this.notifyListeners('message', data); - } catch (error) { - console.error('WebSocketManager: Failed to parse message:', error, 'Raw data:', event.data); - } - }; - - this.ws.onerror = (error) => { - console.error('Shared WebSocket error:', error); - this.isConnecting = false; - this.notifyListeners('error', error); - }; - - this.ws.onclose = (event) => { - if (this.verboseLogging) { - console.log('Shared WebSocket closed:', { - code: event.code, - reason: event.reason, - wasClean: event.wasClean - }); - } - this.ws = null; - this.isConnecting = false; - - // Clear confirmed/pending but keep serverSubscriptions for re-subscription - this.confirmedSubscriptions.clear(); - this.pendingSubscriptions.clear(); - - this.notifyListeners('close', event); - - // Always retry connection if we have listeners - if (this.listeners.size > 0) { - if (this.reconnectTimeout) { - clearTimeout(this.reconnectTimeout); - } - this.reconnectTimeout = setTimeout(() => { - if (!this.ws || this.ws.readyState === WebSocket.CLOSED) { - this.connect(); - } - }, this.reconnectDelay); - } - }; - } catch (error) { - console.error('Failed to create WebSocket:', error); - this.isConnecting = false; - // Retry after delay - this.reconnectTimeout = setTimeout(() => { - this.connect(); - }, this.reconnectDelay); - } - } - - subscribe(listenerId, callbacks) { - // Generate ID if not provided - if (!listenerId) { - listenerId = `listener_${this.listenerIdCounter++}`; - } - - if (this.verboseLogging) { - console.log('WebSocketManager: Subscribing listener:', listenerId, 'WebSocket state:', this.ws ? this.ws.readyState : 'no connection'); - } - this.listeners.set(listenerId, callbacks); - - // Connect if not already connected - if (!this.ws || this.ws.readyState === WebSocket.CLOSED) { - if (this.verboseLogging) { - console.log('WebSocketManager: WebSocket not connected, connecting...'); - } - this.connect(); - } - - // If already open, notify immediately - if (this.ws && this.ws.readyState === WebSocket.OPEN && callbacks.open) { - if (this.verboseLogging) { - console.log('WebSocketManager: WebSocket already open, calling open callback for listener:', listenerId); - } - // Use setTimeout to ensure this happens after the listener is registered - setTimeout(() => { - if (callbacks.open) { - callbacks.open(); - } - }, 0); - } - - return listenerId; - } - - unsubscribe(listenerId) { - this.listeners.delete(listenerId); - - // If no more listeners, we could close the connection, but let's keep it open - // in case other components need it - } - - send(data) { - if (this.ws && this.ws.readyState === WebSocket.OPEN) { - if (this.verboseLogging) { - console.log('WebSocketManager: Sending message:', data); - } - this.ws.send(JSON.stringify(data)); - } else { - console.warn('WebSocketManager: Cannot send message - connection not open. State:', this.ws ? this.ws.readyState : 'no connection', 'Message:', data); - } - } - - notifyListeners(eventType, data) { - this.listeners.forEach((callbacks) => { - if (callbacks[eventType]) { - try { - callbacks[eventType](data); - } catch (error) { - console.error('Error in WebSocket listener:', error); - } - } - }); - } - - getReadyState() { - return this.ws ? this.ws.readyState : WebSocket.CLOSED; - } - - // Subscribe to a server-side channel (will be re-subscribed on reconnect) - subscribeToChannel(channel) { - if (this.serverSubscriptions.has(channel)) { - // Already subscribed or pending - return; - } - - this.serverSubscriptions.add(channel); - - if (this.ws && this.ws.readyState === WebSocket.OPEN) { - if (!this.confirmedSubscriptions.has(channel) && !this.pendingSubscriptions.has(channel)) { - this.pendingSubscriptions.add(channel); - this.send({ type: 'subscribe', channel }); - if (this.verboseLogging) { - console.log('WebSocketManager: Subscribing to channel:', channel); - } - } - } - } - - // Unsubscribe from a server-side channel (won't be re-subscribed on reconnect) - unsubscribeFromChannel(channel) { - this.serverSubscriptions.delete(channel); - this.confirmedSubscriptions.delete(channel); - this.pendingSubscriptions.delete(channel); - - if (this.ws && this.ws.readyState === WebSocket.OPEN) { - this.send({ type: 'unsubscribe', channel }); - if (this.verboseLogging) { - console.log('WebSocketManager: Unsubscribing from channel:', channel); - } - } - } - - // Mark a channel subscription as confirmed (call this when server confirms) - confirmSubscription(channel) { - this.pendingSubscriptions.delete(channel); - this.confirmedSubscriptions.add(channel); - if (this.verboseLogging) { - console.log('WebSocketManager: Subscription confirmed for channel:', channel); - } - } - - // Mark a channel subscription as failed (call this when server rejects) - failSubscription(channel) { - this.pendingSubscriptions.delete(channel); - this.serverSubscriptions.delete(channel); - if (this.verboseLogging) { - console.log('WebSocketManager: Subscription failed for channel:', channel); - } - } - - // Check if subscribed to a channel - isSubscribedToChannel(channel) { - return this.confirmedSubscriptions.has(channel); - } - - // Re-subscribe to all channels after reconnect - resubscribeToChannels() { - if (this.serverSubscriptions.size === 0) { - return; - } - - if (this.verboseLogging) { - console.log('WebSocketManager: Re-subscribing to channels:', Array.from(this.serverSubscriptions)); - } - - for (const channel of this.serverSubscriptions) { - if (!this.pendingSubscriptions.has(channel)) { - this.pendingSubscriptions.add(channel); - this.send({ type: 'subscribe', channel }); - } - } - } - - disconnect() { - if (this.reconnectTimeout) { - clearTimeout(this.reconnectTimeout); - this.reconnectTimeout = null; - } - if (this.ws) { - this.ws.close(); - this.ws = null; - } - this.listeners.clear(); - this.serverSubscriptions.clear(); - this.confirmedSubscriptions.clear(); - this.pendingSubscriptions.clear(); - } -} - -// Export singleton instance -export const wsManager = new WebSocketManager(); - diff --git a/web/static/admin.js b/web/static/admin.js new file mode 100644 index 0000000..0cbdafa --- /dev/null +++ b/web/static/admin.js @@ -0,0 +1,95 @@ +(function () { + const msgEl = document.getElementById("admin-message"); + const errEl = document.getElementById("admin-error"); + const saveRegBtn = document.getElementById("save-registration"); + const regCheckbox = document.getElementById("registration-enabled"); + const createKeyBtn = document.getElementById("create-api-key"); + + function showMessage(msg) { + msgEl.textContent = msg || ""; + msgEl.classList.toggle("hidden", !msg); + } + function showError(msg) { + errEl.textContent = msg || ""; + errEl.classList.toggle("hidden", !msg); + } + + async function request(url, method, payload) { + const res = await fetch(url, { + method, + credentials: "include", + headers: { "Content-Type": "application/json" }, + body: payload ? JSON.stringify(payload) : undefined, + }); + const data = await res.json().catch(() => ({})); + if (!res.ok) throw new Error(data.error || "Request failed"); + return data; + } + + function refreshAll() { + if (!window.htmx) return window.location.reload(); + htmx.ajax("GET", "/ui/fragments/admin/runners", "#admin-runners"); + htmx.ajax("GET", "/ui/fragments/admin/users", "#admin-users"); + htmx.ajax("GET", "/ui/fragments/admin/apikeys", "#admin-apikeys"); + } + + if (saveRegBtn && regCheckbox) { + saveRegBtn.addEventListener("click", async () => { + showError(""); + try { + await request("/api/admin/settings/registration", "POST", { enabled: regCheckbox.checked }); + showMessage("Registration setting saved."); + } catch (err) { + showError(err.message); + } + }); + } + + if (createKeyBtn) { + createKeyBtn.addEventListener("click", async () => { + const name = prompt("API key name:"); + if (!name) return; + showError(""); + try { + const data = await request("/api/admin/runners/api-keys", "POST", { name, scope: "manager" }); + showMessage(`New API key created: ${data.key}`); + refreshAll(); + } catch (err) { + showError(err.message); + } + }); + } + + document.body.addEventListener("click", async (e) => { + const deleteRunner = e.target.closest("[data-delete-runner]"); + const setAdmin = e.target.closest("[data-set-admin]"); + const revokeKey = e.target.closest("[data-revoke-apikey]"); + const deleteKey = e.target.closest("[data-delete-apikey]"); + if (!deleteRunner && !setAdmin && !revokeKey && !deleteKey) return; + + showError(""); + try { + if (deleteRunner) { + const id = deleteRunner.getAttribute("data-delete-runner"); + if (!confirm("Delete this runner?")) return; + await request(`/api/admin/runners/${id}`, "DELETE"); + } + if (setAdmin) { + const id = setAdmin.getAttribute("data-set-admin"); + const value = setAdmin.getAttribute("data-admin-value") === "true"; + await request(`/api/admin/users/${id}/admin`, "POST", { is_admin: value }); + } + if (revokeKey) { + const id = revokeKey.getAttribute("data-revoke-apikey"); + await request(`/api/admin/runners/api-keys/${id}/revoke`, "PATCH"); + } + if (deleteKey) { + const id = deleteKey.getAttribute("data-delete-apikey"); + await request(`/api/admin/runners/api-keys/${id}`, "DELETE"); + } + refreshAll(); + } catch (err) { + showError(err.message); + } + }); +})(); diff --git a/web/static/job_new.js b/web/static/job_new.js new file mode 100644 index 0000000..c17aa7d --- /dev/null +++ b/web/static/job_new.js @@ -0,0 +1,286 @@ +(function () { + const uploadForm = document.getElementById("upload-analyze-form"); + const configForm = document.getElementById("job-config-form"); + const fileInput = document.getElementById("source-file"); + const statusEl = document.getElementById("upload-status"); + const errorEl = document.getElementById("job-create-error"); + const blendVersionEl = document.getElementById("blender-version"); + const mainBlendWrapper = document.getElementById("main-blend-wrapper"); + const mainBlendSelect = document.getElementById("main-blend-select"); + const metadataPreview = document.getElementById("metadata-preview"); + const configSection = document.getElementById("job-config-section"); + const uploadSection = document.getElementById("job-upload-section"); + const uploadSubmitBtn = uploadForm.querySelector('button[type="submit"]'); + const stepUpload = document.getElementById("step-upload"); + const stepConfig = document.getElementById("step-config"); + const nameInput = document.getElementById("job-name"); + const frameStartInput = document.getElementById("frame-start"); + const frameEndInput = document.getElementById("frame-end"); + const outputFormatInput = document.getElementById("output-format"); + const unhideObjectsInput = document.getElementById("unhide-objects"); + const enableExecutionInput = document.getElementById("enable-execution"); + + let sessionID = ""; + let pollTimer = null; + let uploadInProgress = false; + + function showError(msg) { + errorEl.textContent = msg || ""; + errorEl.classList.toggle("hidden", !msg); + } + + function showStatus(msg) { + statusEl.classList.remove("hidden"); + statusEl.innerHTML = `

${msg}

`; + } + + function setUploadBusy(busy) { + uploadInProgress = busy; + if (!uploadSubmitBtn) return; + uploadSubmitBtn.disabled = busy; + } + + function setStep(step) { + const uploadActive = step === 1; + stepUpload.classList.toggle("active", uploadActive); + stepUpload.classList.toggle("complete", !uploadActive); + stepConfig.classList.toggle("active", !uploadActive); + uploadSection.classList.toggle("hidden", !uploadActive); + configSection.classList.toggle("hidden", uploadActive); + } + + function fileNameToJobName(fileName) { + const stem = (fileName || "Render Job").replace(/\.[^/.]+$/, ""); + return stem.trim() || "Render Job"; + } + + function prefillFromMetadata(status, fileName) { + const metadata = status.metadata || {}; + const render = metadata.render_settings || {}; + + nameInput.value = fileNameToJobName(fileName || status.file_name); + frameStartInput.value = Number.isFinite(metadata.frame_start) ? metadata.frame_start : 1; + frameEndInput.value = Number.isFinite(metadata.frame_end) ? metadata.frame_end : 250; + + if (render.output_format && outputFormatInput.querySelector(`option[value="${render.output_format}"]`)) { + outputFormatInput.value = render.output_format; + } else { + outputFormatInput.value = "EXR"; + } + + if (metadata.blender_version && blendVersionEl.querySelector(`option[value="${metadata.blender_version}"]`)) { + blendVersionEl.value = metadata.blender_version; + } else { + blendVersionEl.value = ""; + } + + unhideObjectsInput.checked = Boolean(metadata.unhide_objects); + enableExecutionInput.checked = Boolean(metadata.enable_execution); + + const scenes = metadata.scene_info || {}; + metadataPreview.innerHTML = ` + + `; + } + + async function loadBlenderVersions() { + try { + const res = await fetch("/api/blender/versions", { credentials: "include" }); + if (!res.ok) return; + const data = await res.json(); + const versions = data.versions || []; + versions.slice(0, 30).forEach((v) => { + const option = document.createElement("option"); + option.value = v.full; + option.textContent = v.full; + blendVersionEl.appendChild(option); + }); + } catch (_) {} + } + + function uploadFile(mainBlendFile) { + return new Promise((resolve, reject) => { + const file = fileInput.files[0]; + if (!file) { + reject(new Error("Select a file first")); + return; + } + const lowerName = file.name.toLowerCase(); + const isAccepted = lowerName.endsWith(".blend") || lowerName.endsWith(".zip"); + if (!isAccepted) { + reject(new Error("Only .blend or .zip files are supported.")); + return; + } + + const fd = new FormData(); + fd.append("file", file); + if (mainBlendFile) { + fd.append("main_blend_file", mainBlendFile); + } + + const xhr = new XMLHttpRequest(); + xhr.open("POST", "/api/jobs/upload", true); + xhr.withCredentials = true; + xhr.upload.addEventListener("progress", (e) => { + if (!e.lengthComputable) return; + const pct = Math.round((e.loaded / e.total) * 100); + showStatus(`Uploading: ${pct}%`); + }); + xhr.onload = () => { + try { + const data = JSON.parse(xhr.responseText || "{}"); + if (xhr.status >= 400) { + reject(new Error(data.error || "Upload failed")); + return; + } + resolve(data); + } catch (err) { + reject(err); + } + }; + xhr.onerror = () => reject(new Error("Upload failed")); + xhr.send(fd); + }); + } + + async function pollUploadStatus() { + if (!sessionID) return null; + const res = await fetch(`/api/jobs/upload/status?session_id=${encodeURIComponent(sessionID)}`, { credentials: "include" }); + const data = await res.json().catch(() => ({})); + if (!res.ok) { + throw new Error(data.error || "Upload status check failed"); + } + return data; + } + + async function createJob(payload) { + const res = await fetch("/api/jobs", { + method: "POST", + credentials: "include", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(payload), + }); + const data = await res.json().catch(() => ({})); + if (!res.ok) { + throw new Error(data.error || "Job creation failed"); + } + return data; + } + + async function runSubmission(mainBlendFile) { + showError(""); + setStep(1); + configSection.classList.add("hidden"); + metadataPreview.innerHTML = ""; + + const upload = await uploadFile(mainBlendFile); + sessionID = upload.session_id; + showStatus("Upload complete. Processing..."); + + clearInterval(pollTimer); + await new Promise((resolve, reject) => { + pollTimer = setInterval(async () => { + try { + const status = await pollUploadStatus(); + if (!status) return; + showStatus(`${status.message || status.status} (${Math.round((status.progress || 0) * 100)}%)`); + + if (status.status === "select_blend") { + clearInterval(pollTimer); + mainBlendSelect.innerHTML = ""; + (status.blend_files || []).forEach((path) => { + const option = document.createElement("option"); + option.value = path; + option.textContent = path; + mainBlendSelect.appendChild(option); + }); + mainBlendWrapper.classList.remove("hidden"); + reject(new Error("Select a main blend file and submit again.")); + return; + } + + if (status.status === "error") { + clearInterval(pollTimer); + reject(new Error(status.error || "Upload processing failed")); + return; + } + + if (status.status === "completed") { + clearInterval(pollTimer); + prefillFromMetadata(status, fileInput.files[0]?.name || ""); + setStep(2); + resolve(); + } + } catch (err) { + clearInterval(pollTimer); + reject(err); + } + }, 1500); + }); + + } + + async function submitJobConfig() { + if (!sessionID) { + throw new Error("Upload and analyze a file first."); + } + + const fd = new FormData(configForm); + const jobName = String(fd.get("name") || "").trim(); + if (!jobName) { + throw new Error("Job name is required."); + } + nameInput.value = jobName; + const payload = { + job_type: "render", + name: jobName, + frame_start: Number(fd.get("frame_start")), + frame_end: Number(fd.get("frame_end")), + output_format: fd.get("output_format"), + upload_session_id: sessionID, + unhide_objects: Boolean(fd.get("unhide_objects")), + enable_execution: Boolean(fd.get("enable_execution")), + }; + const blenderVersion = fd.get("blender_version"); + if (blenderVersion) payload.blender_version = blenderVersion; + + const job = await createJob(payload); + showStatus(`Job created (#${job.id}). Redirecting...`); + window.location.href = `/jobs/${job.id}`; + } + + uploadForm.addEventListener("submit", async (e) => { + e.preventDefault(); + if (uploadInProgress) { + return; + } + try { + setUploadBusy(true); + const selected = mainBlendWrapper.classList.contains("hidden") ? "" : mainBlendSelect.value; + await runSubmission(selected); + } catch (err) { + showError(err.message || "Failed to create job"); + setUploadBusy(false); + } + }); + + configForm.addEventListener("submit", async (e) => { + e.preventDefault(); + try { + showError(""); + await submitJobConfig(); + } catch (err) { + showError(err.message || "Failed to create job"); + } + }); + + setStep(1); + loadBlenderVersions(); +})(); diff --git a/web/static/job_show.js b/web/static/job_show.js new file mode 100644 index 0000000..2726a23 --- /dev/null +++ b/web/static/job_show.js @@ -0,0 +1,428 @@ +(function () { + const jobID = window.location.pathname.split("/").pop(); + const progressFill = document.querySelector(".progress-fill[data-progress]"); + const progressText = document.getElementById("job-progress-text"); + const statusBadge = document.getElementById("job-status-badge"); + const tasksRefreshBtn = document.getElementById("tasks-refresh"); + const tasksFragment = document.getElementById("tasks-fragment"); + const filesRefreshBtn = document.getElementById("files-refresh"); + const filesFragment = document.getElementById("files-fragment"); + const cancelJobBtn = document.getElementById("cancel-job-btn"); + const deleteJobBtn = document.getElementById("delete-job-btn"); + const previewModal = document.getElementById("exr-preview-modal"); + const previewImage = document.getElementById("exr-preview-image"); + const previewLoading = document.getElementById("exr-preview-loading"); + const previewError = document.getElementById("exr-preview-error"); + const previewName = document.getElementById("exr-preview-name"); + let lastJobSnapshot = null; + let lastSmartRefreshAt = 0; + if (progressFill) { + const value = Number(progressFill.getAttribute("data-progress") || "0"); + const bounded = Math.max(0, Math.min(100, value)); + progressFill.style.width = `${bounded}%`; + } + + function statusClass(status) { + const normalized = String(status || "").toLowerCase(); + if (normalized === "completed") return "status-completed"; + if (normalized === "running") return "status-running"; + if (normalized === "failed") return "status-failed"; + if (normalized === "cancelled") return "status-cancelled"; + return "status-pending"; + } + + function applyJobState(job) { + if (!job) return; + const normalizedStatus = String(job.status || "pending").toLowerCase(); + const canCancel = normalizedStatus === "pending" || normalizedStatus === "running"; + const canDelete = normalizedStatus === "completed" || normalizedStatus === "failed" || normalizedStatus === "cancelled"; + const progressValue = Math.max(0, Math.min(100, Number(job.progress || 0))); + if (progressFill) { + progressFill.style.width = `${progressValue}%`; + progressFill.setAttribute("data-progress", String(Math.round(progressValue))); + } + if (progressText) { + progressText.textContent = `${Math.round(progressValue)}%`; + } + if (statusBadge) { + statusBadge.textContent = normalizedStatus; + statusBadge.classList.remove("status-pending", "status-running", "status-completed", "status-failed", "status-cancelled"); + statusBadge.classList.add(statusClass(job.status)); + } + if (cancelJobBtn) { + cancelJobBtn.classList.toggle("hidden", !canCancel); + } + if (deleteJobBtn) { + deleteJobBtn.classList.toggle("hidden", !canDelete); + } + } + + function refreshTasksAndFiles() { + if (!window.htmx) return; + if (tasksFragment) { + htmx.ajax("GET", `/ui/fragments/jobs/${jobID}/tasks`, "#tasks-fragment"); + } + if (filesFragment) { + htmx.ajax("GET", `/ui/fragments/jobs/${jobID}/files`, "#files-fragment"); + } + lastSmartRefreshAt = Date.now(); + } + + async function pollJobState() { + try { + const res = await fetch(`/api/jobs/${jobID}`, { credentials: "include" }); + if (!res.ok) return; + const job = await res.json(); + applyJobState(job); + + const snapshot = { + status: String(job.status || ""), + progress: Math.round(Number(job.progress || 0)), + startedAt: job.started_at || "", + completedAt: job.completed_at || "", + }; + const changed = + !lastJobSnapshot || + snapshot.status !== lastJobSnapshot.status || + snapshot.progress !== lastJobSnapshot.progress || + snapshot.startedAt !== lastJobSnapshot.startedAt || + snapshot.completedAt !== lastJobSnapshot.completedAt; + lastJobSnapshot = snapshot; + + // Smart refresh fragments only when job state changes. + if (changed) { + refreshTasksAndFiles(); + return; + } + + // Fallback while running: refresh infrequently even without visible progress deltas. + if (snapshot.status === "running" && Date.now() - lastSmartRefreshAt > 12000) { + refreshTasksAndFiles(); + } + } catch (_) { + // Keep UI usable even if polling briefly fails. + } + } + + if (tasksRefreshBtn && tasksFragment && window.htmx) { + tasksRefreshBtn.addEventListener("click", () => { + htmx.ajax("GET", `/ui/fragments/jobs/${jobID}/tasks`, "#tasks-fragment"); + }); + } + if (filesRefreshBtn && filesFragment && window.htmx) { + filesRefreshBtn.addEventListener("click", () => { + htmx.ajax("GET", `/ui/fragments/jobs/${jobID}/files`, "#files-fragment"); + }); + } + pollJobState(); + setInterval(pollJobState, 2500); + + async function apiRequest(url, method) { + const res = await fetch(url, { + method, + credentials: "include", + headers: { "Content-Type": "application/json" }, + }); + const data = await res.json().catch(() => ({})); + if (!res.ok) { + throw new Error(data.error || "Request failed"); + } + return data; + } + + function closePreviewModal() { + if (!previewModal) return; + previewModal.classList.add("hidden"); + if (previewImage) { + previewImage.classList.add("hidden"); + previewImage.removeAttribute("src"); + } + if (previewLoading) previewLoading.classList.remove("hidden"); + if (previewError) { + previewError.classList.add("hidden"); + previewError.textContent = ""; + } + } + + function openPreviewModal(url, name) { + if (!previewModal || !previewImage) return; + previewModal.classList.remove("hidden"); + if (previewName) previewName.textContent = name ? `File: ${name}` : ""; + if (previewLoading) previewLoading.classList.remove("hidden"); + if (previewError) { + previewError.classList.add("hidden"); + previewError.textContent = ""; + } + previewImage.classList.add("hidden"); + previewImage.onload = () => { + if (previewLoading) previewLoading.classList.add("hidden"); + previewImage.classList.remove("hidden"); + }; + previewImage.onerror = () => { + if (previewLoading) previewLoading.classList.add("hidden"); + if (previewError) { + previewError.textContent = "Failed to load preview image."; + previewError.classList.remove("hidden"); + } + }; + previewImage.src = url; + } + + document.body.addEventListener("click", async (e) => { + const previewBtn = e.target.closest("[data-exr-preview-url]"); + if (previewBtn) { + const url = previewBtn.getAttribute("data-exr-preview-url"); + const name = previewBtn.getAttribute("data-exr-preview-name"); + if (url) { + openPreviewModal(url, name || ""); + } + return; + } + + const modalClose = e.target.closest("[data-modal-close]"); + if (modalClose) { + closePreviewModal(); + return; + } + + const cancelBtn = e.target.closest("[data-cancel-job]"); + const deleteBtn = e.target.closest("[data-delete-job]"); + if (!cancelBtn && !deleteBtn) return; + const id = (cancelBtn || deleteBtn).getAttribute(cancelBtn ? "data-cancel-job" : "data-delete-job"); + try { + if (cancelBtn) { + if (!confirm("Cancel this job?")) return; + await apiRequest(`/api/jobs/${id}`, "DELETE"); + } else { + if (!confirm("Delete this job permanently?")) return; + await apiRequest(`/api/jobs/${id}/delete`, "POST"); + window.location.href = "/jobs"; + return; + } + window.location.reload(); + } catch (err) { + alert(err.message); + } + }); + + document.addEventListener("keydown", (e) => { + if (e.key === "Escape") { + closePreviewModal(); + } + }); + + const taskSelect = document.getElementById("task-log-task-id"); + const levelFilter = document.getElementById("task-log-level-filter"); + const autoRefreshToggle = document.getElementById("task-log-auto-refresh"); + const followToggle = document.getElementById("task-log-follow"); + const refreshBtn = document.getElementById("task-log-refresh"); + const copyBtn = document.getElementById("task-log-copy"); + const output = document.getElementById("task-log-output"); + const statusEl = document.getElementById("task-log-status"); + const state = { + timer: null, + activeTaskID: "", + lastLogID: 0, + logs: [], + seenIDs: new Set(), + }; + + function setStatus(text) { + if (statusEl) statusEl.textContent = text; + } + + function levelClass(level) { + const normalized = String(level || "INFO").toUpperCase(); + if (normalized === "ERROR") return "log-error"; + if (normalized === "WARN") return "log-warn"; + if (normalized === "DEBUG") return "log-debug"; + return "log-info"; + } + + function formatTime(ts) { + if (!ts) return "--:--:--"; + const d = new Date(ts); + if (Number.isNaN(d.getTime())) return "--:--:--"; + return d.toLocaleTimeString(); + } + + function renderLogs() { + if (!output) return; + const selectedLevel = (levelFilter?.value || "").toUpperCase(); + const filtered = state.logs.filter((entry) => { + if (!selectedLevel) return true; + return String(entry.log_level || "").toUpperCase() === selectedLevel; + }); + + if (filtered.length === 0) { + output.innerHTML = '
No logs yet.
'; + return; + } + + output.innerHTML = filtered.map((entry) => { + const level = String(entry.log_level || "INFO").toUpperCase(); + const step = entry.step_name ? ` (${entry.step_name})` : ""; + const message = String(entry.message || "").replaceAll("&", "&").replaceAll("<", "<").replaceAll(">", ">"); + return `
+ ${formatTime(entry.created_at)} + ${level}${step} + ${message} +
`; + }).join(""); + + if (followToggle?.checked) { + output.scrollTop = output.scrollHeight; + } + } + + function getVisibleLogs() { + const selectedLevel = (levelFilter?.value || "").toUpperCase(); + return state.logs.filter((entry) => { + if (!selectedLevel) return true; + return String(entry.log_level || "").toUpperCase() === selectedLevel; + }); + } + + function logsToText(entries) { + return entries.map((entry) => { + const level = String(entry.log_level || "INFO").toUpperCase(); + const step = entry.step_name ? ` (${entry.step_name})` : ""; + return `[${formatTime(entry.created_at)}] [${level}]${step} ${entry.message || ""}`; + }).join("\n"); + } + + function collectTaskOptions() { + if (!taskSelect) return; + const buttons = document.querySelectorAll("[data-view-logs-task-id]"); + const current = taskSelect.value; + + taskSelect.innerHTML = ''; + buttons.forEach((btn) => { + const id = btn.getAttribute("data-view-logs-task-id"); + if (!id) return; + const row = btn.closest("tr"); + const status = row?.querySelector(".status")?.textContent?.trim() || ""; + const type = row?.children?.[1]?.textContent?.trim() || ""; + const option = document.createElement("option"); + option.value = id; + option.textContent = `#${id} ${type ? `(${type})` : ""} ${status ? `- ${status}` : ""}`.trim(); + taskSelect.appendChild(option); + }); + + if (current && taskSelect.querySelector(`option[value="${current}"]`)) { + taskSelect.value = current; + } + } + + async function fetchLogs({ reset = false, full = false } = {}) { + const taskID = taskSelect?.value?.trim(); + if (!taskID) { + setStatus("Select a task to view logs."); + return; + } + + if (reset || taskID !== state.activeTaskID) { + state.activeTaskID = taskID; + state.lastLogID = 0; + state.logs = []; + state.seenIDs.clear(); + renderLogs(); + } + + const params = new URLSearchParams(); + params.set("limit", "0"); // backend: 0 = no limit + if (!full && state.lastLogID > 0) { + params.set("since_id", String(state.lastLogID)); + } + + try { + const res = await fetch(`/api/jobs/${jobID}/tasks/${taskID}/logs?${params.toString()}`, { + credentials: "include", + }); + if (!res.ok) { + setStatus(`Failed to fetch logs (HTTP ${res.status}).`); + return; + } + + const payload = await res.json(); + const rows = Array.isArray(payload) ? payload : (payload.logs || []); + if (rows.length > 0) { + for (const row of rows) { + const id = Number(row.id || 0); + if (id > 0 && !state.seenIDs.has(id)) { + state.seenIDs.add(id); + state.logs.push(row); + if (id > state.lastLogID) state.lastLogID = id; + } + } + if (!Array.isArray(payload) && Number(payload.last_id || 0) > state.lastLogID) { + state.lastLogID = Number(payload.last_id); + } + } + + setStatus(`Task #${taskID}: ${state.logs.length} log line(s).`); + renderLogs(); + } catch (err) { + setStatus(`Failed to fetch logs: ${err.message}`); + } + } + + function restartPolling() { + if (state.timer) { + clearInterval(state.timer); + state.timer = null; + } + if (!autoRefreshToggle?.checked) return; + state.timer = setInterval(() => { + if (taskSelect?.value) { + fetchLogs(); + } + }, 2000); + } + + if (tasksFragment) { + tasksFragment.addEventListener("htmx:afterSwap", () => { + collectTaskOptions(); + }); + } + collectTaskOptions(); + + document.body.addEventListener("click", (e) => { + const viewBtn = e.target.closest("[data-view-logs-task-id]"); + if (!viewBtn || !taskSelect) return; + const taskID = viewBtn.getAttribute("data-view-logs-task-id"); + if (!taskID) return; + taskSelect.value = taskID; + fetchLogs({ reset: true, full: true }); + }); + + if (taskSelect) { + taskSelect.addEventListener("change", () => fetchLogs({ reset: true, full: true })); + } + if (levelFilter) { + levelFilter.addEventListener("change", renderLogs); + } + if (refreshBtn) { + refreshBtn.addEventListener("click", () => fetchLogs({ reset: true, full: true })); + } + if (copyBtn) { + copyBtn.addEventListener("click", async () => { + const visible = getVisibleLogs(); + if (visible.length === 0) { + setStatus("No logs to copy."); + return; + } + try { + await navigator.clipboard.writeText(logsToText(visible)); + setStatus(`Copied ${visible.length} log line(s).`); + } catch (_) { + setStatus("Clipboard copy failed."); + } + }); + } + if (autoRefreshToggle) { + autoRefreshToggle.addEventListener("change", restartPolling); + } + + restartPolling(); +})(); diff --git a/web/static/jobs.js b/web/static/jobs.js new file mode 100644 index 0000000..6538bd0 --- /dev/null +++ b/web/static/jobs.js @@ -0,0 +1,41 @@ +(function () { + async function apiRequest(url, method) { + const res = await fetch(url, { + method, + credentials: "include", + headers: { "Content-Type": "application/json" }, + }); + const data = await res.json().catch(() => ({})); + if (!res.ok) { + throw new Error(data.error || "Request failed"); + } + return data; + } + + document.body.addEventListener("click", async (e) => { + const cancelBtn = e.target.closest("[data-cancel-job]"); + const deleteBtn = e.target.closest("[data-delete-job]"); + if (!cancelBtn && !deleteBtn) return; + + try { + if (cancelBtn) { + const id = cancelBtn.getAttribute("data-cancel-job"); + if (!confirm("Cancel this job?")) return; + await apiRequest(`/api/jobs/${id}`, "DELETE"); + } + if (deleteBtn) { + const id = deleteBtn.getAttribute("data-delete-job"); + if (!confirm("Delete this job permanently?")) return; + await apiRequest(`/api/jobs/${id}/delete`, "POST"); + } + if (window.htmx) { + htmx.trigger("#jobs-fragment", "refresh"); + htmx.ajax("GET", "/ui/fragments/jobs", "#jobs-fragment"); + } else { + window.location.reload(); + } + } catch (err) { + alert(err.message); + } + }); +})(); diff --git a/web/static/login.js b/web/static/login.js new file mode 100644 index 0000000..92260f3 --- /dev/null +++ b/web/static/login.js @@ -0,0 +1,65 @@ +(function () { + const loginForm = document.getElementById("login-form"); + const registerForm = document.getElementById("register-form"); + const errorEl = document.getElementById("auth-error"); + + function setError(msg) { + if (!errorEl) return; + if (!msg) { + errorEl.classList.add("hidden"); + errorEl.textContent = ""; + return; + } + errorEl.textContent = msg; + errorEl.classList.remove("hidden"); + } + + async function postJSON(url, payload) { + const res = await fetch(url, { + method: "POST", + credentials: "include", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(payload), + }); + const body = await res.json().catch(() => ({})); + if (!res.ok) { + throw new Error(body.error || "Request failed"); + } + return body; + } + + if (loginForm) { + loginForm.addEventListener("submit", async (e) => { + e.preventDefault(); + setError(""); + const fd = new FormData(loginForm); + try { + await postJSON("/api/auth/local/login", { + username: fd.get("username"), + password: fd.get("password"), + }); + window.location.href = "/jobs"; + } catch (err) { + setError(err.message); + } + }); + } + + if (registerForm) { + registerForm.addEventListener("submit", async (e) => { + e.preventDefault(); + setError(""); + const fd = new FormData(registerForm); + try { + await postJSON("/api/auth/local/register", { + name: fd.get("name"), + email: fd.get("email"), + password: fd.get("password"), + }); + window.location.href = "/jobs"; + } catch (err) { + setError(err.message); + } + }); + } +})(); diff --git a/web/static/style.css b/web/static/style.css new file mode 100644 index 0000000..41762c0 --- /dev/null +++ b/web/static/style.css @@ -0,0 +1,241 @@ +* { box-sizing: border-box; } +body { + margin: 0; + font-family: system-ui, -apple-system, Segoe UI, Roboto, sans-serif; + background: #0f172a; + color: #e2e8f0; +} +.container { max-width: 1200px; margin: 24px auto; padding: 0 16px; } +.topbar { + display: flex; + align-items: center; + justify-content: space-between; + gap: 16px; + padding: 12px 16px; + border-bottom: 1px solid #334155; + background: #111827; +} +.brand { font-weight: 700; } +.nav { display: flex; gap: 12px; } +.nav a { color: #cbd5e1; text-decoration: none; padding: 8px 10px; border-radius: 6px; } +.nav a.active, .nav a:hover { background: #1f2937; color: #fff; } +.account { display: flex; gap: 12px; align-items: center; } + +.card { + background: #111827; + border: 1px solid #334155; + border-radius: 10px; + padding: 16px; + margin-bottom: 16px; +} +.card.narrow { max-width: 900px; margin-inline: auto; } +.section-head { display: flex; justify-content: space-between; align-items: center; gap: 12px; } + +.btn { + border: 1px solid #475569; + color: #e2e8f0; + background: #1f2937; + border-radius: 7px; + padding: 8px 12px; + cursor: pointer; + text-decoration: none; +} +.btn:hover { background: #334155; } +.btn.primary { background: #2563eb; border-color: #2563eb; color: white; } +.btn:disabled, +.btn[disabled] { + cursor: not-allowed; + opacity: 1; +} +.btn.primary:disabled, +.btn.primary[disabled] { + background: #1e293b; + border-color: #475569; + color: #94a3b8; +} +.btn.danger { background: #b91c1c; border-color: #b91c1c; color: white; } +.btn.subtle { background: transparent; } +.btn.tiny { padding: 4px 8px; font-size: 12px; } + +.table { width: 100%; border-collapse: collapse; } +.table th, .table td { border-bottom: 1px solid #334155; padding: 8px; text-align: left; vertical-align: top; } +.table th { font-size: 12px; text-transform: uppercase; color: #94a3b8; } +.job-link, +.job-link:visited, +.job-link:hover, +.job-link:active { + color: #93c5fd; + text-decoration: underline; + text-underline-offset: 2px; + text-decoration-thickness: 1px; + cursor: pointer; +} +.job-link:hover, +.job-link:focus-visible { + color: #bfdbfe; + text-decoration-thickness: 2px; +} + +.status { border-radius: 999px; padding: 2px 8px; font-size: 12px; } +.status-pending { background: #7c2d12; color: #fdba74; } +.status-running { background: #164e63; color: #67e8f9; } +.status-completed { background: #14532d; color: #86efac; } +.status-failed { background: #7f1d1d; color: #fca5a5; } +.status-cancelled { background: #334155; color: #cbd5e1; } +.status-online { background: #14532d; color: #86efac; } +.status-offline { background: #334155; color: #cbd5e1; } +.status-busy { background: #164e63; color: #67e8f9; } + +.progress { + width: 100%; + height: 10px; + background: #1e293b; + border-radius: 999px; + overflow: hidden; +} +.progress-fill { height: 100%; background: #2563eb; } + +.alert { + border-radius: 8px; + padding: 10px 12px; + margin: 10px 0; +} +.alert.error { background: #7f1d1d; color: #fee2e2; border: 1px solid #ef4444; } +.alert.notice { background: #1e3a8a; color: #dbeafe; border: 1px solid #3b82f6; } + +label { display: block; } +input, select { + width: 100%; + margin-top: 6px; + margin-bottom: 12px; + background: #0f172a; + border: 1px solid #334155; + border-radius: 6px; + color: #e2e8f0; + padding: 8px; +} +.stack { display: grid; gap: 8px; } +.grid-2 { display: grid; grid-template-columns: 1fr 1fr; gap: 12px; } +.split { display: grid; grid-template-columns: 1fr 1fr; gap: 16px; margin-top: 16px; } +.auth-grid { display: flex; gap: 10px; margin-bottom: 12px; } +.check-row { display: flex; gap: 12px; align-items: center; flex-wrap: wrap; } +.row { display: flex; gap: 8px; align-items: center; flex-wrap: wrap; } +.stepper { display: flex; gap: 10px; margin-bottom: 12px; } +.step { + border: 1px solid #334155; + border-radius: 999px; + padding: 6px 10px; + font-size: 12px; + color: #94a3b8; +} +.step.active { + border-color: #2563eb; + color: #bfdbfe; + background: #1e3a8a; +} +.step.complete { + border-color: #14532d; + color: #86efac; + background: #052e16; +} +.muted { color: #94a3b8; margin-top: 0; } +.metadata-grid { + display: grid; + grid-template-columns: repeat(2, minmax(0, 1fr)); + gap: 8px; + margin: 8px 0 12px; +} +.logs { + max-height: 320px; + overflow: auto; + background: #020617; + border: 1px solid #334155; + border-radius: 8px; + padding: 10px; + white-space: pre-wrap; +} +.log-controls { + display: grid; + grid-template-columns: 2fr 1fr auto auto auto auto; + gap: 10px; + align-items: end; + margin-bottom: 10px; +} +.log-toggle { + display: flex; + gap: 6px; + align-items: center; + margin-bottom: 12px; + white-space: nowrap; +} +.log-toggle input { + width: auto; + margin: 0; +} +.log-lines { + font-family: ui-monospace, SFMono-Regular, Menlo, monospace; + white-space: normal; +} +.log-line { + display: grid; + grid-template-columns: auto auto auto 1fr; + gap: 8px; + align-items: start; + padding: 4px 0; + border-bottom: 1px solid #1e293b; +} +.log-line.empty { + display: block; + color: #94a3b8; + border-bottom: none; +} +.log-time { color: #64748b; } +.log-level { + border-radius: 999px; + padding: 0 6px; + font-size: 11px; + line-height: 18px; +} +.log-info { background: #164e63; color: #67e8f9; } +.log-warn { background: #7c2d12; color: #fdba74; } +.log-error { background: #7f1d1d; color: #fca5a5; } +.log-debug { background: #334155; color: #cbd5e1; } +.log-step { color: #93c5fd; } +.log-message { + color: #e2e8f0; + overflow-wrap: anywhere; +} +.modal { + position: fixed; + inset: 0; + z-index: 1000; + display: grid; + place-items: center; +} +.modal-backdrop { + position: absolute; + inset: 0; + background: rgba(2, 6, 23, 0.8); +} +.modal-content { + position: relative; + width: min(1100px, 94vw); + max-height: 90vh; + overflow: auto; + background: #0b1220; + border: 1px solid #334155; + border-radius: 10px; + padding: 12px; +} +.modal-body { + min-height: 220px; +} +.preview-image { + display: block; + max-width: 100%; + max-height: 70vh; + margin: 0 auto; + border: 1px solid #334155; + border-radius: 8px; +} +.hidden { display: none; } diff --git a/web/style.css b/web/style.css deleted file mode 100644 index 806eaaa..0000000 --- a/web/style.css +++ /dev/null @@ -1,325 +0,0 @@ -* { - margin: 0; - padding: 0; - box-sizing: border-box; -} - -body { - font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif; - background: #f5f5f5; - color: #333; -} - -.hidden { - display: none !important; -} - -/* Login Page */ -#login-page { - display: flex; - justify-content: center; - align-items: center; - min-height: 100vh; - background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); -} - -.login-container { - background: white; - padding: 3rem; - border-radius: 10px; - box-shadow: 0 10px 40px rgba(0,0,0,0.2); - text-align: center; - max-width: 400px; - width: 100%; -} - -.login-container h1 { - margin-bottom: 0.5rem; - color: #667eea; -} - -.login-container p { - color: #666; - margin-bottom: 2rem; -} - -.login-buttons { - display: flex; - flex-direction: column; - gap: 1rem; -} - -/* Main Page */ -#main-page { - min-height: 100vh; -} - -header { - background: white; - padding: 1rem 2rem; - box-shadow: 0 2px 4px rgba(0,0,0,0.1); - display: flex; - justify-content: space-between; - align-items: center; -} - -header h1 { - color: #667eea; -} - -.user-info { - display: flex; - align-items: center; - gap: 1rem; -} - -nav { - background: white; - padding: 0 2rem; - border-bottom: 1px solid #e0e0e0; - display: flex; - gap: 1rem; -} - -.nav-btn { - padding: 1rem 1.5rem; - background: none; - border: none; - border-bottom: 2px solid transparent; - cursor: pointer; - font-size: 1rem; - color: #666; - transition: all 0.2s; -} - -.nav-btn:hover { - color: #667eea; -} - -.nav-btn.active { - color: #667eea; - border-bottom-color: #667eea; -} - -main { - max-width: 1200px; - margin: 2rem auto; - padding: 0 2rem; -} - -.content-page { - background: white; - padding: 2rem; - border-radius: 8px; - box-shadow: 0 2px 4px rgba(0,0,0,0.1); -} - -.content-page h2 { - margin-bottom: 1.5rem; - color: #333; -} - -/* Buttons */ -.btn { - padding: 0.75rem 1.5rem; - border: none; - border-radius: 5px; - cursor: pointer; - font-size: 1rem; - text-decoration: none; - display: inline-block; - transition: all 0.2s; -} - -.btn-primary { - background: #667eea; - color: white; -} - -.btn-primary:hover { - background: #5568d3; -} - -.btn-secondary { - background: #6c757d; - color: white; -} - -.btn-secondary:hover { - background: #5a6268; -} - -.btn-google { - background: #db4437; - color: white; -} - -.btn-google:hover { - background: #c23321; -} - -.btn-discord { - background: #5865F2; - color: white; -} - -.btn-discord:hover { - background: #4752C4; -} - -/* Forms */ -.form-group { - margin-bottom: 1.5rem; -} - -.form-group label { - display: block; - margin-bottom: 0.5rem; - font-weight: 500; - color: #333; -} - -.form-group input, -.form-group select { - width: 100%; - padding: 0.75rem; - border: 1px solid #ddd; - border-radius: 5px; - font-size: 1rem; -} - -.form-group input:focus, -.form-group select:focus { - outline: none; - border-color: #667eea; -} - -/* Jobs List */ -#jobs-list { - display: grid; - gap: 1rem; -} - -.job-card { - background: #f8f9fa; - padding: 1.5rem; - border-radius: 8px; - border-left: 4px solid #667eea; -} - -.job-card h3 { - margin-bottom: 0.5rem; - color: #333; -} - -.job-meta { - display: flex; - gap: 2rem; - margin: 1rem 0; - color: #666; - font-size: 0.9rem; -} - -.job-status { - display: inline-block; - padding: 0.25rem 0.75rem; - border-radius: 20px; - font-size: 0.85rem; - font-weight: 500; -} - -.job-status.pending { - background: #ffc107; - color: #000; -} - -.job-status.running { - background: #17a2b8; - color: white; -} - -.job-status.completed { - background: #28a745; - color: white; -} - -.job-status.failed { - background: #dc3545; - color: white; -} - -.job-status.cancelled { - background: #6c757d; - color: white; -} - -.progress-bar { - width: 100%; - height: 8px; - background: #e0e0e0; - border-radius: 4px; - overflow: hidden; - margin: 1rem 0; -} - -.progress-fill { - height: 100%; - background: #667eea; - transition: width 0.3s; -} - -.job-actions { - margin-top: 1rem; - display: flex; - gap: 1rem; -} - -/* Runners List */ -#runners-list { - display: grid; - gap: 1rem; -} - -.runner-card { - background: #f8f9fa; - padding: 1.5rem; - border-radius: 8px; - border-left: 4px solid #28a745; -} - -.runner-card h3 { - margin-bottom: 0.5rem; - color: #333; -} - -.runner-info { - display: flex; - gap: 2rem; - margin-top: 1rem; - color: #666; - font-size: 0.9rem; -} - -.runner-status { - display: inline-block; - padding: 0.25rem 0.75rem; - border-radius: 20px; - font-size: 0.85rem; - font-weight: 500; -} - -.runner-status.online { - background: #28a745; - color: white; -} - -.runner-status.offline { - background: #6c757d; - color: white; -} - -.runner-status.busy { - background: #ffc107; - color: #000; -} - diff --git a/web/tailwind.config.js b/web/tailwind.config.js deleted file mode 100644 index 9403c31..0000000 --- a/web/tailwind.config.js +++ /dev/null @@ -1,20 +0,0 @@ -/** @type {import('tailwindcss').Config} */ -export default { - content: [ - "./index.html", - "./src/**/*.{js,ts,jsx,tsx}", - ], - darkMode: 'class', - theme: { - extend: { - colors: { - primary: { - 500: '#f97316', // orange-500 - 600: '#ea580c', // orange-600 - }, - }, - }, - }, - plugins: [], -} - diff --git a/web/templates/admin.html b/web/templates/admin.html new file mode 100644 index 0000000..db681c1 --- /dev/null +++ b/web/templates/admin.html @@ -0,0 +1,49 @@ +{{ define "page_admin" }} +{{ $view := .Data }} +
+

Admin Panel

+
+ + +
+
+ +
+

Runners

+
+

Loading runners...

+
+
+ +
+

Users

+
+

Loading users...

+
+
+ +
+
+

Runner API Keys

+ +
+
+

Loading API keys...

+
+
+ + + +{{ end }} diff --git a/web/templates/base.html b/web/templates/base.html new file mode 100644 index 0000000..85337f9 --- /dev/null +++ b/web/templates/base.html @@ -0,0 +1,48 @@ +{{ define "base" }} + + + + + + {{ .Title }} - JiggaBlend + + + + + {{ if .User }} +
+
JiggaBlend
+ + +
+ {{ end }} + +
+ {{ if .Error }}
{{ .Error }}
{{ end }} + {{ if .Notice }}
{{ .Notice }}
{{ end }} + {{ if eq .ContentTemplate "page_login" }} + {{ template "page_login" . }} + {{ else if eq .ContentTemplate "page_jobs" }} + {{ template "page_jobs" . }} + {{ else if eq .ContentTemplate "page_jobs_new" }} + {{ template "page_jobs_new" . }} + {{ else if eq .ContentTemplate "page_job_show" }} + {{ template "page_job_show" . }} + {{ else if eq .ContentTemplate "page_admin" }} + {{ template "page_admin" . }} + {{ end }} +
+ + {{ if .PageScript }}{{ end }} + + +{{ end }} diff --git a/web/templates/job_new.html b/web/templates/job_new.html new file mode 100644 index 0000000..ba18a20 --- /dev/null +++ b/web/templates/job_new.html @@ -0,0 +1,60 @@ +{{ define "page_jobs_new" }} +
+

Create Render Job

+
+
1. Upload & Analyze
+
2. Review & Submit
+
+ +
+ + + +
+ + + +
+ + +{{ end }} diff --git a/web/templates/job_show.html b/web/templates/job_show.html new file mode 100644 index 0000000..76fef64 --- /dev/null +++ b/web/templates/job_show.html @@ -0,0 +1,97 @@ +{{ define "page_job_show" }} +{{ $view := .Data }} +{{ $job := index $view "job" }} +
+
+

Job #{{ $job.ID }} - {{ $job.Name }}

+ Back +
+

Status: {{ $job.Status }}

+

Progress: {{ progressInt $job.Progress }}%

+
+
+
+
+ {{ if $job.FrameStart }}Frames: {{ derefInt $job.FrameStart }}{{ if $job.FrameEnd }}-{{ derefInt $job.FrameEnd }}{{ end }}{{ end }} + {{ if $job.OutputFormat }}Format: {{ derefString $job.OutputFormat }}{{ end }} + Created: {{ formatTime $job.CreatedAt }} +
+
+ + +
+
+ +
+
+

Tasks

+ +
+
+

Loading tasks...

+
+
+ +
+
+

Files

+
+ Download all EXR (.zip) + +
+
+
+

Loading files...

+
+
+ + + +
+
+

Task Logs

+ Select a task to view logs. +
+
+ + + + + + +
+
+
+{{ end }} diff --git a/web/templates/jobs.html b/web/templates/jobs.html new file mode 100644 index 0000000..ee69293 --- /dev/null +++ b/web/templates/jobs.html @@ -0,0 +1,16 @@ +{{ define "page_jobs" }} +
+
+

Your Jobs

+ New Job +
+
+

Loading jobs...

+
+
+{{ end }} diff --git a/web/templates/login.html b/web/templates/login.html new file mode 100644 index 0000000..6acb999 --- /dev/null +++ b/web/templates/login.html @@ -0,0 +1,41 @@ +{{ define "page_login" }} +
+

Sign in to JiggaBlend

+ {{ $view := .Data }} + {{ if index $view "error" }} +
Login error: {{ index $view "error" }}
+ {{ end }} + +
+ {{ if index $view "google_enabled" }} + Continue with Google + {{ end }} + {{ if index $view "discord_enabled" }} + Continue with Discord + {{ end }} +
+ + {{ if index $view "local_enabled" }} +
+
+

Local Login

+ + + +
+ +
+

Register

+ + + + +
+
+ {{ else }} +

Local authentication is disabled.

+ {{ end }} + + +
+{{ end }} diff --git a/web/templates/partials/admin_apikeys.html b/web/templates/partials/admin_apikeys.html new file mode 100644 index 0000000..2a186f6 --- /dev/null +++ b/web/templates/partials/admin_apikeys.html @@ -0,0 +1,36 @@ +{{ define "partial_admin_apikeys" }} +{{ $keys := index . "keys" }} +{{ if not $keys }} +

No API keys generated yet.

+{{ else }} + + + + + + + + + + + + + + {{ range $key := $keys }} + + + + + + + + + + {{ end }} + +
IDNameScopePrefixActiveCreatedActions
{{ $key.ID }}{{ $key.Name }}{{ $key.Scope }}{{ $key.Key }}{{ if $key.IsActive }}yes{{ else }}no{{ end }}{{ formatTime $key.CreatedAt }} + + +
+{{ end }} +{{ end }} diff --git a/web/templates/partials/admin_runners.html b/web/templates/partials/admin_runners.html new file mode 100644 index 0000000..47ce702 --- /dev/null +++ b/web/templates/partials/admin_runners.html @@ -0,0 +1,35 @@ +{{ define "partial_admin_runners" }} +{{ $runners := index . "runners" }} +{{ if not $runners }} +

No runners registered.

+{{ else }} + + + + + + + + + + + + + + {{ range $runner := $runners }} + + + + + + + + + + {{ end }} + +
IDNameHostStatusPriorityHeartbeatActions
{{ $runner.ID }}{{ $runner.Name }}{{ $runner.Hostname }}{{ $runner.Status }}{{ $runner.Priority }}{{ formatTime $runner.LastHeartbeat }} + +
+{{ end }} +{{ end }} diff --git a/web/templates/partials/admin_users.html b/web/templates/partials/admin_users.html new file mode 100644 index 0000000..c6dc049 --- /dev/null +++ b/web/templates/partials/admin_users.html @@ -0,0 +1,44 @@ +{{ define "partial_admin_users" }} +{{ $users := index . "users" }} +{{ $currentUserID := index . "current_user_id" }} +{{ if not $users }} +

No users found.

+{{ else }} + + + + + + + + + + + + + + {{ range $user := $users }} + + + + + + + + + + {{ end }} + +
IDNameEmailProviderAdminCreatedActions
{{ $user.ID }}{{ $user.Name }}{{ $user.Email }}{{ if $user.OAuthProvider }}{{ $user.OAuthProvider }}{{ else }}local{{ end }}{{ if $user.IsAdmin }}yes{{ else }}no{{ end }}{{ formatTime $user.CreatedAt }} + {{ if and $user.IsAdmin (eq $user.ID $currentUserID) }} + + {{ else }} + + {{ end }} +
+{{ end }} +{{ end }} diff --git a/web/templates/partials/job_files.html b/web/templates/partials/job_files.html new file mode 100644 index 0000000..6e95dd1 --- /dev/null +++ b/web/templates/partials/job_files.html @@ -0,0 +1,82 @@ +{{ define "partial_job_files" }} +{{ $jobID := index . "job_id" }} +{{ $files := index . "files" }} +{{ $isAdmin := index . "is_admin" }} +{{ $adminInputFiles := index . "admin_input_files" }} +{{ if not $files }} +

No output files found yet.

+{{ else }} + + + + + + {{ if $isAdmin }}{{ end }} + + + + + + + {{ range $file := $files }} + + + + {{ if $isAdmin }}{{ end }} + + + + + {{ end }} + +
IDNameTypeSizeCreatedActions
{{ $file.ID }}{{ $file.FileName }}{{ $file.FileType }}{{ $file.FileSize }}{{ formatTime $file.CreatedAt }} + Download + {{ if hasSuffixFold $file.FileName ".exr" }} + + {{ end }} +
+{{ end }} + +{{ if $isAdmin }} +
+ Admin: context/input files + {{ if not $adminInputFiles }} +

No context/input files found.

+ {{ else }} + + + + + + + + + + + + + {{ range $file := $adminInputFiles }} + + + + + + + + + {{ end }} + +
IDNameTypeSizeCreatedDownload
{{ $file.ID }}{{ $file.FileName }}{{ $file.FileType }}{{ $file.FileSize }}{{ formatTime $file.CreatedAt }} + Download +
+ {{ end }} +
+{{ end }} +{{ end }} diff --git a/web/templates/partials/job_tasks.html b/web/templates/partials/job_tasks.html new file mode 100644 index 0000000..f574e9d --- /dev/null +++ b/web/templates/partials/job_tasks.html @@ -0,0 +1,37 @@ +{{ define "partial_job_tasks" }} +{{ $tasks := index . "tasks" }} +{{ if not $tasks }} +

No tasks yet.

+{{ else }} + + + + + + + + + + + + + + + {{ range $task := $tasks }} + + + + + + + + + + + {{ end }} + +
IDTypeStatusFrame(s)StepRetriesErrorLogs
{{ $task.ID }}{{ $task.TaskType }}{{ $task.Status }}{{ $task.Frame }}{{ if $task.FrameEnd }}-{{ derefInt $task.FrameEnd }}{{ end }}{{ if $task.CurrentStep }}{{ $task.CurrentStep }}{{ else }}-{{ end }}{{ $task.RetryCount }}{{ if $task.Error }}{{ $task.Error }}{{ else }}-{{ end }} + +
+{{ end }} +{{ end }} diff --git a/web/templates/partials/jobs_table.html b/web/templates/partials/jobs_table.html new file mode 100644 index 0000000..e45cc68 --- /dev/null +++ b/web/templates/partials/jobs_table.html @@ -0,0 +1,40 @@ +{{ define "partial_jobs_table" }} +{{ $jobs := index . "jobs" }} +{{ if not $jobs }} +

No jobs yet. Submit one to get started.

+{{ else }} + + + + + + + + + + + + + + {{ range $job := $jobs }} + + + + + + + + + + {{ end }} + +
NameStatusProgressFramesFormatCreatedActions
{{ $job.Name }}{{ $job.Status }}{{ progressInt $job.Progress }}%{{ if $job.FrameStart }}{{ derefInt $job.FrameStart }}{{ end }}{{ if $job.FrameEnd }}-{{ derefInt $job.FrameEnd }}{{ end }}{{ if $job.OutputFormat }}{{ derefString $job.OutputFormat }}{{ else }}-{{ end }}{{ formatTime $job.CreatedAt }} + {{ if or (eq $job.Status "pending") (eq $job.Status "running") }} + + {{ end }} + {{ if or (eq $job.Status "completed") (eq $job.Status "failed") (eq $job.Status "cancelled") }} + + {{ end }} +
+{{ end }} +{{ end }} diff --git a/web/vite.config.js b/web/vite.config.js deleted file mode 100644 index 9136091..0000000 --- a/web/vite.config.js +++ /dev/null @@ -1,19 +0,0 @@ -import { defineConfig } from 'vite' -import react from '@vitejs/plugin-react' - -export default defineConfig({ - plugins: [react()], - build: { - outDir: 'dist', - emptyOutDir: true, - }, - server: { - proxy: { - '/api': { - target: 'http://localhost:8080', - changeOrigin: true, - }, - }, - }, -}) -