diff --git a/.gitignore b/.gitignore index 1f895bd9..5fab1184 100644 --- a/.gitignore +++ b/.gitignore @@ -33,4 +33,11 @@ test-results playwright-report/ # Vitest browser mode screenshots -__screenshots__/ \ No newline at end of file +__screenshots__/ + +# Native downloads +vendor/ + +# AI workspaces +.qwen +.gemini \ No newline at end of file diff --git a/electron-builder.json5 b/electron-builder.json5 index 18498df6..277793a9 100644 --- a/electron-builder.json5 +++ b/electron-builder.json5 @@ -24,6 +24,11 @@ { "from": "public/wallpapers", "to": "assets/wallpapers" + }, + { + "from": "vendor/ffmpeg", + "to": "ffmpeg", + "filter": ["ffmpeg.exe", "ffprobe.exe", "ffmpeg", "ffprobe"] } ], diff --git a/electron/electron-env.d.ts b/electron/electron-env.d.ts index b2a37205..ec9b0ad1 100644 --- a/electron/electron-env.d.ts +++ b/electron/electron-env.d.ts @@ -139,6 +139,50 @@ interface Window { setHasUnsavedChanges: (hasChanges: boolean) => void; onRequestSaveBeforeClose: (callback: () => Promise | boolean) => () => void; setLocale: (locale: string) => Promise; + + // ---- FFmpeg Native Export ---- + ffmpegGetCapabilities: () => Promise<{ + available: boolean; + encoders: string[]; + bestEncoder: string | null; + path: string | null; + }>; + ffmpegExportStart: (config: { + width: number; + height: number; + frameRate: number; + encoder: string; + bitrate: number; + audioSourcePath?: string; + hasAudio?: boolean; + }) => Promise<{ + success: boolean; + sessionId?: string; + error?: string; + }>; + ffmpegExportFrame: ( + sessionId: string, + frameData: ArrayBuffer, + ) => Promise<{ + success: boolean; + backpressure?: boolean; + frameCount?: number; + error?: string; + }>; + ffmpegExportFinish: ( + sessionId: string, + fileName: string, + ) => Promise<{ + success: boolean; + path?: string; + message?: string; + canceled?: boolean; + error?: string; + }>; + ffmpegExportCancel: (sessionId: string) => Promise<{ + success: boolean; + error?: string; + }>; }; } diff --git a/electron/ffmpeg/ffmpegManager.ts b/electron/ffmpeg/ffmpegManager.ts new file mode 100644 index 00000000..96be131f --- /dev/null +++ b/electron/ffmpeg/ffmpegManager.ts @@ -0,0 +1,253 @@ +import { execFile } from "node:child_process"; +import fs from "node:fs"; +import path from "node:path"; +import { app } from "electron"; + +let cachedFFmpegPath: string | null = null; +let cachedEncoders: string[] | null = null; + +/** + * Resolves the FFmpeg binary path. + * - In packaged builds: looks in extraResources/ffmpeg/ + * - In development: looks for system FFmpeg on PATH, or a local vendor copy + */ +export function getFFmpegPath(): string | null { + if (cachedFFmpegPath !== null) { + return cachedFFmpegPath; + } + + const isWin = process.platform === "win32"; + const binaryName = isWin ? "ffmpeg.exe" : "ffmpeg"; + + // 1. Packaged build — extraResources + if (app.isPackaged) { + const resourcePath = path.join(process.resourcesPath, "ffmpeg", binaryName); + if (fs.existsSync(resourcePath)) { + cachedFFmpegPath = resourcePath; + return cachedFFmpegPath; + } + } + + // 2. Development — local vendor directory + const vendorPath = path.join(app.getAppPath(), "vendor", "ffmpeg", binaryName); + if (fs.existsSync(vendorPath)) { + cachedFFmpegPath = vendorPath; + return cachedFFmpegPath; + } + + // 3. System PATH fallback + const systemPath = findOnPath(binaryName); + if (systemPath) { + cachedFFmpegPath = systemPath; + return cachedFFmpegPath; + } + + cachedFFmpegPath = null; + return null; +} + +/** + * Checks if FFmpeg is available. + */ +export function isFFmpegAvailable(): boolean { + return getFFmpegPath() !== null; +} + +/** + * Probes available hardware encoders by running `ffmpeg -encoders`. + * Caches the result after the first call. + */ +export async function probeHardwareEncoders(): Promise { + if (cachedEncoders !== null) { + return cachedEncoders; + } + + const ffmpegPath = getFFmpegPath(); + if (!ffmpegPath) { + cachedEncoders = []; + return cachedEncoders; + } + + try { + const output = await execFileAsync(ffmpegPath, ["-hide_banner", "-encoders"]); + const encoders: string[] = []; + + // Check for hardware H.264 encoders + const hwEncoders = [ + "h264_nvenc", // NVIDIA + "h264_qsv", // Intel Quick Sync + "h264_amf", // AMD + ]; + + for (const encoder of hwEncoders) { + if (output.includes(encoder)) { + // Verify the encoder actually works by trying to initialize it + const works = await testEncoder(ffmpegPath, encoder); + if (works) { + encoders.push(encoder); + } + } + } + + // Software fallback is always available if FFmpeg exists + if (output.includes("libx264")) { + encoders.push("libx264"); + } + + cachedEncoders = encoders; + console.log("[FFmpegManager] Available encoders:", encoders); + return cachedEncoders; + } catch (error) { + console.warn("[FFmpegManager] Failed to probe encoders:", error); + cachedEncoders = []; + return cachedEncoders; + } +} + +/** + * Selects the best available encoder. + * Priority: NVENC > QSV > AMF > libx264 + */ +export async function selectBestEncoder(): Promise { + const encoders = await probeHardwareEncoders(); + const priority = ["h264_nvenc", "h264_qsv", "h264_amf", "libx264"]; + for (const encoder of priority) { + if (encoders.includes(encoder)) { + return encoder; + } + } + return null; +} + +/** + * Gets the full FFmpeg capabilities object for the renderer. + */ +export async function getFFmpegCapabilities(): Promise<{ + available: boolean; + encoders: string[]; + bestEncoder: string | null; + path: string | null; +}> { + const ffmpegPath = getFFmpegPath(); + if (!ffmpegPath) { + return { available: false, encoders: [], bestEncoder: null, path: null }; + } + + const encoders = await probeHardwareEncoders(); + const bestEncoder = await selectBestEncoder(); + + return { + available: true, + encoders, + bestEncoder, + path: ffmpegPath, + }; +} + +/** + * Builds FFmpeg arguments for encoding raw RGBA frames piped to stdin. + */ +export function buildFFmpegArgs(config: { + width: number; + height: number; + frameRate: number; + encoder: string; + bitrate: number; + outputPath: string; + audioSourcePath?: string; + hasAudio?: boolean; +}): string[] { + const args: string[] = [ + "-hide_banner", + "-loglevel", + "warning", + "-y", // overwrite output + + // Input 0: Raw H.264 video stream from stdin (encoded by Chrome's hardware encoder) + "-f", + "h264", + "-r", + String(config.frameRate), + "-i", + "pipe:0", + ]; + + // Input 1: audio from source file (if available) + if (config.audioSourcePath && config.hasAudio) { + args.push("-i", config.audioSourcePath); + } + + // Video encoding settings - we just copy the stream since it's already hardware-encoded H.264! + args.push("-map", "0:v", "-c:v", "copy"); + + // Audio settings + if (config.audioSourcePath && config.hasAudio) { + args.push("-map", "1:a", "-c:a", "aac", "-b:a", "192k", "-ac", "2"); + } + + // MP4 settings + args.push( + "-movflags", + "+faststart", + "-shortest", // end when shortest stream ends + config.outputPath, + ); + + return args; +} + +// ---- Helpers ---- + +function findOnPath(binaryName: string): string | null { + const pathEnv = process.env.PATH || ""; + const separator = process.platform === "win32" ? ";" : ":"; + const dirs = pathEnv.split(separator); + + for (const dir of dirs) { + const fullPath = path.join(dir, binaryName); + if (fs.existsSync(fullPath)) { + return fullPath; + } + } + + return null; +} + +function execFileAsync(cmd: string, args: string[]): Promise { + return new Promise((resolve, reject) => { + execFile(cmd, args, { maxBuffer: 1024 * 1024 }, (error, stdout, stderr) => { + if (error) { + reject(error); + return; + } + resolve(stdout + stderr); + }); + }); +} + +async function testEncoder(ffmpegPath: string, encoder: string): Promise { + try { + // Try encoding 1 black frame with the encoder to see if it actually initializes + // Using 256x256 because some hardware encoders (NVENC/QSV) fail on very small dimensions like 64x64 + await execFileAsync(ffmpegPath, [ + "-hide_banner", + "-loglevel", + "error", + "-f", + "lavfi", + "-i", + "color=c=black:s=256x256:d=0.1", + "-c:v", + encoder, + "-frames:v", + "1", + "-f", + "null", + "-", + ]); + return true; + } catch { + console.warn(`[FFmpegManager] Encoder ${encoder} failed validation test`); + return false; + } +} diff --git a/electron/ipc/handlers.ts b/electron/ipc/handlers.ts index 4cb48756..97cb2f1d 100644 --- a/electron/ipc/handlers.ts +++ b/electron/ipc/handlers.ts @@ -1,3 +1,4 @@ +import { type ChildProcess, spawn } from "node:child_process"; import fs from "node:fs/promises"; import path from "node:path"; import { fileURLToPath, pathToFileURL } from "node:url"; @@ -18,6 +19,7 @@ import { type RecordingSession, type StoreRecordedSessionInput, } from "../../src/lib/recordingSession"; +import { buildFFmpegArgs, getFFmpegCapabilities, getFFmpegPath } from "../ffmpeg/ffmpegManager"; import { mainT } from "../i18n"; import { RECORDINGS_DIR } from "../main"; @@ -949,4 +951,282 @@ export function registerIpcHandlers( return { success: false, error: String(error) }; } }); + + // ---- Security Check for Native APIs ---- + function isTrustedSender(event: Electron.IpcMainInvokeEvent): boolean { + try { + // In Electron 30+, senderFrame contains the actual frame URL + const urlStr = event.senderFrame?.url || event.sender.getURL(); + if (!urlStr) return false; + const url = new URL(urlStr); + return ( + url.protocol === "file:" || url.hostname === "localhost" || url.hostname === "127.0.0.1" + ); + } catch { + return false; + } + } + + // ---- FFmpeg Native Export Handlers ---- + + const ffmpegSessions = new Map< + string, + { + process: ChildProcess; + outputPath: string; + frameCount: number; + startedAt: number; + finished: boolean; + exitPromise: Promise<{ code: number | null; signal: string | null }>; + } + >(); + + ipcMain.handle("ffmpeg-get-capabilities", async (event) => { + if (!isTrustedSender(event)) { + console.error("[Security] Blocked unauthorized FFmpeg capabilities request"); + return { available: false, encoders: [], bestEncoder: null, path: null }; + } + try { + return await getFFmpegCapabilities(); + } catch (error) { + console.error("[FFmpeg] Failed to get capabilities:", error); + return { available: false, encoders: [], bestEncoder: null, path: null }; + } + }); + + ipcMain.handle( + "ffmpeg-export-start", + async ( + event, + config: { + width: number; + height: number; + frameRate: number; + encoder: string; + bitrate: number; + audioSourcePath?: string; + hasAudio?: boolean; + }, + ) => { + // Ensure only trusted local code can spawn arbitrary shell binaries + if (!isTrustedSender(event)) { + return { success: false, error: "Unauthorized caller" }; + } + + try { + const ffmpegPath = getFFmpegPath(); + if (!ffmpegPath) { + return { success: false, error: "FFmpeg not found" }; + } + + // Create temp output file + const sessionId = `ffmpeg-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`; + const tempDir = path.join(app.getPath("temp"), "openscreen-export"); + await fs.mkdir(tempDir, { recursive: true }); + const outputPath = path.join(tempDir, `${sessionId}.mp4`); + + // Resolve audio source path if it's a file:// URL + let resolvedAudioPath = config.audioSourcePath; + if (resolvedAudioPath && /^file:\/\//i.test(resolvedAudioPath)) { + try { + resolvedAudioPath = fileURLToPath(resolvedAudioPath); + } catch { + // Keep original path + } + } + + // Verify audio source exists + let hasAudio = config.hasAudio ?? false; + if (resolvedAudioPath && hasAudio) { + try { + await fs.access(resolvedAudioPath); + } catch { + console.warn( + "[FFmpeg] Audio source not accessible, exporting video only:", + resolvedAudioPath, + ); + hasAudio = false; + resolvedAudioPath = undefined; + } + } + + const args = buildFFmpegArgs({ + width: config.width, + height: config.height, + frameRate: config.frameRate, + encoder: config.encoder, + bitrate: config.bitrate, + outputPath, + audioSourcePath: resolvedAudioPath, + hasAudio, + }); + + console.log(`[FFmpeg] Starting export: ${ffmpegPath} ${args.join(" ")}`); + + const ffmpegProcess = spawn(ffmpegPath, args, { + stdio: ["pipe", "pipe", "pipe"], + }); + + const exitPromise = new Promise<{ code: number | null; signal: string | null }>( + (resolve) => { + ffmpegProcess.on("close", (code, signal) => { + resolve({ code, signal }); + }); + ffmpegProcess.on("error", (err) => { + console.error("[FFmpeg] Process error:", err); + resolve({ code: -1, signal: null }); + }); + }, + ); + + let stderrOutput = ""; + ffmpegProcess.stderr?.on("data", (data: Buffer) => { + const text = data.toString(); + stderrOutput += text; + // Only log warnings/errors, not progress + if (text.includes("Error") || text.includes("error") || text.includes("Warning")) { + console.warn("[FFmpeg stderr]", text.trim()); + } + }); + + ffmpegSessions.set(sessionId, { + process: ffmpegProcess, + outputPath, + frameCount: 0, + startedAt: Date.now(), + finished: false, + exitPromise, + }); + + return { success: true, sessionId }; + } catch (error) { + console.error("[FFmpeg] Failed to start export:", error); + return { success: false, error: String(error) }; + } + }, + ); + + ipcMain.handle( + "ffmpeg-export-frame", + async (event, sessionId: string, frameData: ArrayBuffer) => { + if (!isTrustedSender(event)) return { success: false, error: "Unauthorized" }; + + try { + const session = ffmpegSessions.get(sessionId); + if (!session || session.finished) { + return { success: false, error: "Invalid or finished session" }; + } + + const stdin = session.process.stdin; + if (!stdin || stdin.destroyed) { + return { success: false, error: "FFmpeg stdin not available" }; + } + + const buffer = Buffer.from(frameData); + const canWrite = stdin.write(buffer); + session.frameCount++; + + // Return backpressure signal so renderer can throttle + return { success: true, backpressure: !canWrite, frameCount: session.frameCount }; + } catch (error) { + return { success: false, error: String(error) }; + } + }, + ); + + ipcMain.handle("ffmpeg-export-finish", async (event, sessionId: string, fileName: string) => { + if (!isTrustedSender(event)) return { success: false, error: "Unauthorized" }; + + try { + const session = ffmpegSessions.get(sessionId); + if (!session) { + return { success: false, error: "Invalid session" }; + } + + session.finished = true; + + // Close stdin to signal end of input + if (session.process.stdin && !session.process.stdin.destroyed) { + session.process.stdin.end(); + } + + // Wait for FFmpeg to finish + const result = await session.exitPromise; + const elapsed = ((Date.now() - session.startedAt) / 1000).toFixed(1); + console.log( + `[FFmpeg] Export finished: ${session.frameCount} frames in ${elapsed}s (exit code: ${result.code})`, + ); + + if (result.code !== 0) { + ffmpegSessions.delete(sessionId); + return { + success: false, + error: `FFmpeg exited with code ${result.code}`, + }; + } + + // Verify output file exists + try { + await fs.access(session.outputPath); + } catch { + ffmpegSessions.delete(sessionId); + return { success: false, error: "FFmpeg output file not found" }; + } + + // Show save dialog + const saveResult = await dialog.showSaveDialog({ + title: mainT("dialogs", "fileDialogs.saveVideo"), + defaultPath: path.join(app.getPath("downloads"), fileName), + filters: [{ name: mainT("dialogs", "fileDialogs.mp4Video"), extensions: ["mp4"] }], + properties: ["createDirectory", "showOverwriteConfirmation"], + }); + + if (saveResult.canceled || !saveResult.filePath) { + // Clean up temp file + await fs.unlink(session.outputPath).catch(() => {}); + ffmpegSessions.delete(sessionId); + return { success: false, canceled: true, message: "Export canceled" }; + } + + // Move temp file to user-chosen location + await fs.copyFile(session.outputPath, saveResult.filePath); + // Clean up temp file + await fs.unlink(session.outputPath).catch(() => {}); + + ffmpegSessions.delete(sessionId); + + return { + success: true, + path: saveResult.filePath, + message: "Video exported successfully", + }; + } catch (error) { + console.error("[FFmpeg] Failed to finish export:", error); + ffmpegSessions.delete(sessionId); + return { success: false, error: String(error) }; + } + }); + + ipcMain.handle("ffmpeg-export-cancel", async (event, sessionId: string) => { + if (!isTrustedSender(event)) return { success: false, error: "Unauthorized" }; + + try { + const session = ffmpegSessions.get(sessionId); + if (!session) { + return { success: false, error: "Invalid session" }; + } + + session.finished = true; + session.process.kill("SIGKILL"); + ffmpegSessions.delete(sessionId); + + // Clean up temp file + fs.unlink(session.outputPath).catch(() => {}); + + console.log(`[FFmpeg] Export canceled: session ${sessionId}`); + return { success: true }; + } catch (error) { + return { success: false, error: String(error) }; + } + }); } diff --git a/electron/preload.ts b/electron/preload.ts index eeca25cd..30f454ac 100644 --- a/electron/preload.ts +++ b/electron/preload.ts @@ -142,4 +142,29 @@ contextBridge.exposeInMainWorld("electronAPI", { ipcRenderer.on("request-save-before-close", listener); return () => ipcRenderer.removeListener("request-save-before-close", listener); }, + + // ---- FFmpeg Native Export ---- + ffmpegGetCapabilities: () => { + return ipcRenderer.invoke("ffmpeg-get-capabilities"); + }, + ffmpegExportStart: (config: { + width: number; + height: number; + frameRate: number; + encoder: string; + bitrate: number; + audioSourcePath?: string; + hasAudio?: boolean; + }) => { + return ipcRenderer.invoke("ffmpeg-export-start", config); + }, + ffmpegExportFrame: (sessionId: string, frameData: ArrayBuffer) => { + return ipcRenderer.invoke("ffmpeg-export-frame", sessionId, frameData); + }, + ffmpegExportFinish: (sessionId: string, fileName: string) => { + return ipcRenderer.invoke("ffmpeg-export-finish", sessionId, fileName); + }, + ffmpegExportCancel: (sessionId: string) => { + return ipcRenderer.invoke("ffmpeg-export-cancel", sessionId); + }, }); diff --git a/package-lock.json b/package-lock.json index ba40beb4..f424c6ef 100644 --- a/package-lock.json +++ b/package-lock.json @@ -196,6 +196,7 @@ "integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.3", @@ -424,6 +425,7 @@ "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", "license": "MIT", + "peer": true, "engines": { "node": ">=6.9.0" } @@ -740,6 +742,7 @@ } ], "license": "MIT", + "peer": true, "engines": { "node": ">=20.19.0" }, @@ -788,6 +791,7 @@ } ], "license": "MIT", + "peer": true, "engines": { "node": ">=20.19.0" } @@ -1551,7 +1555,6 @@ "dev": true, "license": "BSD-2-Clause", "optional": true, - "peer": true, "dependencies": { "cross-dirname": "^0.1.0", "debug": "^4.3.4", @@ -1573,7 +1576,6 @@ "dev": true, "license": "MIT", "optional": true, - "peer": true, "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", @@ -1590,7 +1592,6 @@ "dev": true, "license": "MIT", "optional": true, - "peer": true, "dependencies": { "universalify": "^2.0.0" }, @@ -1605,7 +1606,6 @@ "dev": true, "license": "MIT", "optional": true, - "peer": true, "engines": { "node": ">= 10.0.0" } @@ -2304,6 +2304,7 @@ "integrity": "sha512-LTATglVUPGkPf15zX1wTMlZ0+AU7cGEGF6ekVF1crA8eHUWsGjrYTB+Ht4E3HTrCok8weQG+K01rJndCp/l4XA==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/runtime": "^7.7.2", "@jimp/core": "^0.16.13" @@ -2346,6 +2347,7 @@ "integrity": "sha512-8Z1k96ZFxlhK2bgrY1JNWNwvaBeI/bciLM0yDOni2+aZwfIIiC7Y6PeWHTAvjHNjphz+XCt01WQmOYWCn0ML6g==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/runtime": "^7.7.2", "@jimp/utils": "^0.16.13" @@ -2360,6 +2362,7 @@ "integrity": "sha512-PvLrfa8vkej3qinlebyhLpksJgCF5aiysDMSVhOZqwH5nQLLtDE9WYbnsofGw4r0VVpyw3H/ANCIzYTyCtP9Cg==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/runtime": "^7.7.2", "@jimp/utils": "^0.16.13" @@ -2388,6 +2391,7 @@ "integrity": "sha512-xW+9BtEvoIkkH/Wde9ql4nAFbYLkVINhpgAE7VcBUsuuB34WUbcBl/taOuUYQrPEFQJ4jfXiAJZ2H/rvKjCVnQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/runtime": "^7.7.2", "@jimp/utils": "^0.16.13", @@ -2437,6 +2441,7 @@ "integrity": "sha512-WEl2tPVYwzYL8OKme6Go2xqiWgKsgxlMwyHabdAU4tXaRwOCnOI7v4021gCcBb9zn/oWwguHuKHmK30Fw2Z/PA==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/runtime": "^7.7.2", "@jimp/utils": "^0.16.13" @@ -2580,6 +2585,7 @@ "integrity": "sha512-qoqtN8LDknm3fJm9nuPygJv30O3vGhSBD2TxrsCnhtOsxKAqVPJtFVdGd/qVuZ8nqQANQmTlfqTiK9mVWQ7MiQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/runtime": "^7.7.2", "@jimp/utils": "^0.16.13" @@ -2594,6 +2600,7 @@ "integrity": "sha512-Ev+Jjmj1nHYw897z9C3R9dYsPv7S2/nxdgfFb/h8hOwK0Ovd1k/+yYS46A0uj/JCKK0pQk8wOslYBkPwdnLorw==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/runtime": "^7.7.2", "@jimp/utils": "^0.16.13" @@ -2611,6 +2618,7 @@ "integrity": "sha512-05POQaEJVucjTiSGMoH68ZiELc7QqpIpuQlZ2JBbhCV+WCbPFUBcGSmE7w4Jd0E2GvCho/NoMODLwgcVGQA97A==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/runtime": "^7.7.2", "@jimp/utils": "^0.16.13" @@ -3037,7 +3045,6 @@ "resolved": "https://registry.npmjs.org/@pixi/color/-/color-7.4.3.tgz", "integrity": "sha512-a6R+bXKeXMDcRmjYQoBIK+v2EYqxSX49wcjAY579EYM/WrFKS98nSees6lqVUcLKrcQh2DT9srJHX7XMny3voQ==", "license": "MIT", - "peer": true, "dependencies": { "@pixi/colord": "^2.9.6" } @@ -3052,8 +3059,7 @@ "version": "7.4.3", "resolved": "https://registry.npmjs.org/@pixi/constants/-/constants-7.4.3.tgz", "integrity": "sha512-QGmwJUNQy/vVEHzL6VGQvnwawLZ1wceZMI8HwJAT4/I2uAzbBeFDdmCS8WsTpSWLZjF/DszDc1D8BFp4pVJ5UQ==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/@pixi/core": { "version": "7.4.3", @@ -3080,8 +3086,7 @@ "version": "7.4.3", "resolved": "https://registry.npmjs.org/@pixi/extensions/-/extensions-7.4.3.tgz", "integrity": "sha512-FhoiYkHQEDYHUE7wXhqfsTRz6KxLXjuMbSiAwnLb9uG1vAgp6q6qd6HEsf4X30YaZbLFY8a4KY6hFZWjF+4Fdw==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/@pixi/filter-drop-shadow": { "version": "5.2.0", @@ -3108,22 +3113,19 @@ "version": "7.4.3", "resolved": "https://registry.npmjs.org/@pixi/math/-/math-7.4.3.tgz", "integrity": "sha512-/uJOVhR2DOZ+zgdI6Bs/CwcXT4bNRKsS+TqX3ekRIxPCwaLra+Qdm7aDxT5cTToDzdxbKL5+rwiLu3Y1egILDw==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/@pixi/runner": { "version": "7.4.3", "resolved": "https://registry.npmjs.org/@pixi/runner/-/runner-7.4.3.tgz", "integrity": "sha512-TJyfp7y23u5vvRAyYhVSa7ytq0PdKSvPLXu4G3meoFh1oxTLHH6g/RIzLuxUAThPG2z7ftthuW3qWq6dRV+dhw==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/@pixi/settings": { "version": "7.4.3", "resolved": "https://registry.npmjs.org/@pixi/settings/-/settings-7.4.3.tgz", "integrity": "sha512-SmGK8smc0PxRB9nr0UJioEtE9hl4gvj9OedCvZx3bxBwA3omA5BmP3CyhQfN8XJ29+o2OUL01r3zAPVol4l4lA==", "license": "MIT", - "peer": true, "dependencies": { "@pixi/constants": "7.4.3", "@types/css-font-loading-module": "^0.0.12", @@ -3135,7 +3137,6 @@ "resolved": "https://registry.npmjs.org/@pixi/ticker/-/ticker-7.4.3.tgz", "integrity": "sha512-tHsAD0iOUb6QSGGw+c8cyRBvxsq/NlfzIFBZLEHhWZ+Bx4a0MmXup6I/yJDGmyPCYE+ctCcAfY13wKAzdiVFgQ==", "license": "MIT", - "peer": true, "dependencies": { "@pixi/extensions": "7.4.3", "@pixi/settings": "7.4.3", @@ -3147,7 +3148,6 @@ "resolved": "https://registry.npmjs.org/@pixi/utils/-/utils-7.4.3.tgz", "integrity": "sha512-NO3Y9HAn2UKS1YdxffqsPp+kDpVm8XWvkZcS/E+rBzY9VTLnNOI7cawSRm+dacdET3a8Jad3aDKEDZ0HmAqAFA==", "license": "MIT", - "peer": true, "dependencies": { "@pixi/color": "7.4.3", "@pixi/constants": "7.4.3", @@ -3162,22 +3162,19 @@ "version": "2.1.4", "resolved": "https://registry.npmjs.org/@types/earcut/-/earcut-2.1.4.tgz", "integrity": "sha512-qp3m9PPz4gULB9MhjGID7wpo3gJ4bTGXm7ltNDsmOvsPduTeHp8wSW9YckBj3mljeOh4F0m2z/0JKAALRKbmLQ==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/@pixi/utils/node_modules/earcut": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/earcut/-/earcut-2.2.4.tgz", "integrity": "sha512-/pjZsA1b4RPHbeWZQn66SWS8nZZWLQQ23oE3Eam7aroEFGEvwKAsJfZ9ytiEMycfzXWpca4FA9QIOehf7PocBQ==", - "license": "ISC", - "peer": true + "license": "ISC" }, "node_modules/@pixi/utils/node_modules/eventemitter3": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", @@ -4603,8 +4600,7 @@ "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz", "integrity": "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==", "dev": true, - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/@types/babel__core": { "version": "7.20.5", @@ -4814,6 +4810,7 @@ "integrity": "sha512-RFA/bURkcKzx/X9oumPG9Vp3D3JUgus/d0b67KB0t5S/raciymilkOa66olh78MUI92QLbEJevO7rvqU/kjwKA==", "devOptional": true, "license": "MIT", + "peer": true, "dependencies": { "@types/prop-types": "*", "csstype": "^3.0.2" @@ -4825,6 +4822,7 @@ "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", "devOptional": true, "license": "MIT", + "peer": true, "peerDependencies": { "@types/react": "^18.0.0" } @@ -5006,7 +5004,6 @@ "os": [ "aix" ], - "peer": true, "engines": { "node": ">=18" } @@ -5024,7 +5021,6 @@ "os": [ "android" ], - "peer": true, "engines": { "node": ">=18" } @@ -5042,7 +5038,6 @@ "os": [ "android" ], - "peer": true, "engines": { "node": ">=18" } @@ -5060,7 +5055,6 @@ "os": [ "android" ], - "peer": true, "engines": { "node": ">=18" } @@ -5078,7 +5072,6 @@ "os": [ "darwin" ], - "peer": true, "engines": { "node": ">=18" } @@ -5096,7 +5089,6 @@ "os": [ "darwin" ], - "peer": true, "engines": { "node": ">=18" } @@ -5114,7 +5106,6 @@ "os": [ "freebsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -5132,7 +5123,6 @@ "os": [ "freebsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -5150,7 +5140,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5168,7 +5157,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5186,7 +5174,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5204,7 +5191,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5222,7 +5208,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5240,7 +5225,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5258,7 +5242,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5276,7 +5259,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5294,7 +5276,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5312,7 +5293,6 @@ "os": [ "netbsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -5330,7 +5310,6 @@ "os": [ "netbsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -5348,7 +5327,6 @@ "os": [ "openbsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -5366,7 +5344,6 @@ "os": [ "openbsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -5384,7 +5361,6 @@ "os": [ "openharmony" ], - "peer": true, "engines": { "node": ">=18" } @@ -5402,7 +5378,6 @@ "os": [ "sunos" ], - "peer": true, "engines": { "node": ">=18" } @@ -5420,7 +5395,6 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=18" } @@ -5438,7 +5412,6 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=18" } @@ -5456,7 +5429,6 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=18" } @@ -5495,7 +5467,6 @@ "hasInstallScript": true, "license": "MIT", "optional": true, - "peer": true, "bin": { "esbuild": "bin/esbuild" }, @@ -5538,7 +5509,6 @@ "dev": true, "license": "MIT", "optional": true, - "peer": true, "engines": { "node": ">=12.0.0" }, @@ -5551,98 +5521,6 @@ } } }, - "node_modules/@vitest/browser-playwright/node_modules/picomatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", - "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/@vitest/browser-playwright/node_modules/vite": { - "version": "7.3.2", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.2.tgz", - "integrity": "sha512-Bby3NOsna2jsjfLVOHKes8sGwgl4TT0E6vvpYgnAYDIF/tie7MRaFthmKuHx1NSXjiTueXH3do80FMQgvEktRg==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "esbuild": "^0.27.0", - "fdir": "^6.5.0", - "picomatch": "^4.0.3", - "postcss": "^8.5.6", - "rollup": "^4.43.0", - "tinyglobby": "^0.2.15" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": "^20.19.0 || >=22.12.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^20.19.0 || >=22.12.0", - "jiti": ">=1.21.0", - "less": "^4.0.0", - "lightningcss": "^1.21.0", - "sass": "^1.70.0", - "sass-embedded": "^1.70.0", - "stylus": ">=0.54.8", - "sugarss": "^5.0.0", - "terser": "^5.16.0", - "tsx": "^4.8.1", - "yaml": "^2.4.2" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "jiti": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - }, - "tsx": { - "optional": true - }, - "yaml": { - "optional": true - } - } - }, "node_modules/@vitest/browser/node_modules/@esbuild/aix-ppc64": { "version": "0.27.7", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", @@ -5656,7 +5534,6 @@ "os": [ "aix" ], - "peer": true, "engines": { "node": ">=18" } @@ -5674,7 +5551,6 @@ "os": [ "android" ], - "peer": true, "engines": { "node": ">=18" } @@ -5692,7 +5568,6 @@ "os": [ "android" ], - "peer": true, "engines": { "node": ">=18" } @@ -5710,7 +5585,6 @@ "os": [ "android" ], - "peer": true, "engines": { "node": ">=18" } @@ -5728,7 +5602,6 @@ "os": [ "darwin" ], - "peer": true, "engines": { "node": ">=18" } @@ -5746,7 +5619,6 @@ "os": [ "darwin" ], - "peer": true, "engines": { "node": ">=18" } @@ -5764,7 +5636,6 @@ "os": [ "freebsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -5782,7 +5653,6 @@ "os": [ "freebsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -5800,7 +5670,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5818,7 +5687,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5836,7 +5704,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5854,7 +5721,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5872,7 +5738,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5890,7 +5755,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5908,7 +5772,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5926,7 +5789,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5944,7 +5806,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } @@ -5962,7 +5823,6 @@ "os": [ "netbsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -5980,7 +5840,6 @@ "os": [ "netbsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -5998,7 +5857,6 @@ "os": [ "openbsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -6016,7 +5874,6 @@ "os": [ "openbsd" ], - "peer": true, "engines": { "node": ">=18" } @@ -6034,7 +5891,6 @@ "os": [ "openharmony" ], - "peer": true, "engines": { "node": ">=18" } @@ -6052,7 +5908,6 @@ "os": [ "sunos" ], - "peer": true, "engines": { "node": ">=18" } @@ -6070,7 +5925,6 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=18" } @@ -6088,7 +5942,6 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=18" } @@ -6106,7 +5959,6 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=18" } @@ -6145,7 +5997,6 @@ "hasInstallScript": true, "license": "MIT", "optional": true, - "peer": true, "bin": { "esbuild": "bin/esbuild" }, @@ -6188,7 +6039,6 @@ "dev": true, "license": "MIT", "optional": true, - "peer": true, "engines": { "node": ">=12.0.0" }, @@ -6201,21 +6051,6 @@ } } }, - "node_modules/@vitest/browser/node_modules/picomatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", - "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, "node_modules/@vitest/browser/node_modules/pixelmatch": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/pixelmatch/-/pixelmatch-7.1.0.tgz", @@ -6237,83 +6072,6 @@ "node": ">=14.19.0" } }, - "node_modules/@vitest/browser/node_modules/vite": { - "version": "7.3.2", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.2.tgz", - "integrity": "sha512-Bby3NOsna2jsjfLVOHKes8sGwgl4TT0E6vvpYgnAYDIF/tie7MRaFthmKuHx1NSXjiTueXH3do80FMQgvEktRg==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "esbuild": "^0.27.0", - "fdir": "^6.5.0", - "picomatch": "^4.0.3", - "postcss": "^8.5.6", - "rollup": "^4.43.0", - "tinyglobby": "^0.2.15" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": "^20.19.0 || >=22.12.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^20.19.0 || >=22.12.0", - "jiti": ">=1.21.0", - "less": "^4.0.0", - "lightningcss": "^1.21.0", - "sass": "^1.70.0", - "sass-embedded": "^1.70.0", - "stylus": ">=0.54.8", - "sugarss": "^5.0.0", - "terser": "^5.16.0", - "tsx": "^4.8.1", - "yaml": "^2.4.2" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "jiti": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - }, - "tsx": { - "optional": true - }, - "yaml": { - "optional": true - } - } - }, "node_modules/@vitest/expect": { "version": "4.0.16", "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.16.tgz", @@ -6493,6 +6251,7 @@ "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -7332,6 +7091,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "baseline-browser-mapping": "^2.8.9", "caniuse-lite": "^1.0.30001746", @@ -7644,7 +7404,6 @@ "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", "license": "MIT", - "peer": true, "dependencies": { "call-bind-apply-helpers": "^1.0.2", "get-intrinsic": "^1.3.0" @@ -8093,8 +7852,7 @@ "integrity": "sha512-+R08/oI0nl3vfPcqftZRpytksBXDzOUveBq/NBVx0sUp1axwzPQrKinNx5yd5sxPu8j1wIy8AfnVQ+5eFdha6Q==", "dev": true, "license": "MIT", - "optional": true, - "peer": true + "optional": true }, "node_modules/cross-spawn": { "version": "7.0.6", @@ -8442,6 +8200,7 @@ "integrity": "sha512-uOOBA3f+kW3o4KpSoMQ6SNpdXU7WtxlJRb9vCZgOvqhTz4b3GjcoWKstdisizNZLsylhTMv8TLHFPFW0Uxsj/g==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "app-builder-lib": "26.7.0", "builder-util": "26.4.1", @@ -8534,8 +8293,7 @@ "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz", "integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==", "dev": true, - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/dom-walk": { "version": "0.1.2", @@ -8876,7 +8634,6 @@ "dev": true, "hasInstallScript": true, "license": "MIT", - "peer": true, "dependencies": { "@electron/asar": "^3.2.1", "debug": "^4.1.1", @@ -8897,7 +8654,6 @@ "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "graceful-fs": "^4.1.2", "jsonfile": "^4.0.0", @@ -10611,6 +10367,7 @@ "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", "license": "MIT", + "peer": true, "bin": { "jiti": "bin/jiti.js" } @@ -11431,7 +11188,6 @@ "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", "dev": true, "license": "MIT", - "peer": true, "bin": { "lz-string": "bin/bin.js" } @@ -12274,7 +12030,6 @@ "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", "license": "MIT", - "peer": true, "engines": { "node": ">= 0.4" }, @@ -12819,6 +12574,7 @@ "resolved": "https://registry.npmjs.org/pixi.js/-/pixi.js-8.14.0.tgz", "integrity": "sha512-ituDiEBb1Oqx56RYwTtC6MjPUhPfF/i15fpUv5oEqmzC/ce3SaSumulJcOjKG7+y0J0Ekl9Rl4XTxaUw+MVFZw==", "license": "MIT", + "peer": true, "dependencies": { "@pixi/colord": "^2.9.6", "@types/css-font-loading-module": "^0.0.12", @@ -12931,6 +12687,7 @@ "url": "https://github.com/sponsors/ai" } ], + "peer": true, "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", @@ -13075,7 +12832,6 @@ "dev": true, "license": "MIT", "optional": true, - "peer": true, "dependencies": { "commander": "^9.4.0" }, @@ -13093,7 +12849,6 @@ "dev": true, "license": "MIT", "optional": true, - "peer": true, "engines": { "node": "^12.20.0 || >=14" } @@ -13104,7 +12859,6 @@ "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "ansi-regex": "^5.0.1", "ansi-styles": "^5.0.0", @@ -13120,7 +12874,6 @@ "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=10" }, @@ -13133,8 +12886,7 @@ "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", "dev": true, - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/proc-log": { "version": "5.0.0", @@ -13280,7 +13032,6 @@ "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", "license": "BSD-3-Clause", - "peer": true, "dependencies": { "side-channel": "^1.1.0" }, @@ -13339,6 +13090,7 @@ "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", "license": "MIT", + "peer": true, "dependencies": { "loose-envify": "^1.1.0" }, @@ -13351,6 +13103,7 @@ "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", "license": "MIT", + "peer": true, "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" @@ -14178,7 +13931,6 @@ "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", "license": "MIT", - "peer": true, "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", @@ -14198,7 +13950,6 @@ "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", "license": "MIT", - "peer": true, "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" @@ -14215,7 +13966,6 @@ "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", "license": "MIT", - "peer": true, "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", @@ -14234,7 +13984,6 @@ "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", "license": "MIT", - "peer": true, "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", @@ -14915,6 +14664,7 @@ "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.18.tgz", "integrity": "sha512-6A2rnmW5xZMdw11LYjhcI5846rt9pbLSabY5XPxo+XWdxwZaFEn47Go4NzFiHu9sNNmr/kXivP1vStfvMaK1GQ==", "license": "MIT", + "peer": true, "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", @@ -14987,7 +14737,6 @@ "integrity": "sha512-yYrrsWnrXMcdsnu/7YMYAofM1ktpL5By7vZhf15CrXijWWrEYZks5AXBudalfSWJLlnen/QUJUB5aoB0kqZUGA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "mkdirp": "^0.5.1", "rimraf": "~2.6.2" @@ -15051,7 +14800,6 @@ "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "minimist": "^1.2.6" }, @@ -15066,7 +14814,6 @@ "deprecated": "Rimraf versions prior to v4 are no longer supported", "dev": true, "license": "ISC", - "peer": true, "dependencies": { "glob": "^7.1.3" }, @@ -15080,6 +14827,7 @@ "integrity": "sha512-t/R3R/n0MSwnnazuPpPNVO60LX0SKL45pyl9YlvxIdkH0Of7D5qM2EVe+yASRIlY5pZ73nclYJfNANGWPwFDZw==", "dev": true, "license": "BSD-2-Clause", + "peer": true, "dependencies": { "@jridgewell/source-map": "^0.3.3", "acorn": "^8.15.0", @@ -15232,6 +14980,7 @@ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -15516,7 +15265,6 @@ "resolved": "https://registry.npmjs.org/url/-/url-0.11.4.tgz", "integrity": "sha512-oCwdVC7mTuWiPyjLUz/COz5TLk6wgp0RCsN+wHZ2Ekneac9w8uuV0njcbbie2ME+Vs+d6duwmYuR3HgQXs1fOg==", "license": "MIT", - "peer": true, "dependencies": { "punycode": "^1.4.1", "qs": "^6.12.3" @@ -15529,8 +15277,7 @@ "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/use-callback-ref": { "version": "1.3.3", @@ -15644,6 +15391,7 @@ "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "esbuild": "^0.21.3", "postcss": "^8.4.43", @@ -15718,13 +15466,15 @@ "resolved": "https://registry.npmjs.org/vite-plugin-electron-renderer/-/vite-plugin-electron-renderer-0.14.6.tgz", "integrity": "sha512-oqkWFa7kQIkvHXG7+Mnl1RTroA4sP0yesKatmAy0gjZC4VwUqlvF9IvOpHd1fpLWsqYX/eZlVxlhULNtaQ78Jw==", "dev": true, - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/vitest": { "version": "4.0.16", "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.16.tgz", "integrity": "sha512-E4t7DJ9pESL6E3I8nFjPa4xGUd3PmiWDLsDztS2qXSJWfHtbQnwAWylaBvSNY48I3vr8PTqIZlyK8TE3V3CA4Q==", "dev": true, + "peer": true, "dependencies": { "@vitest/expect": "4.0.16", "@vitest/mocker": "4.0.16", @@ -16254,6 +16004,7 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, + "peer": true, "engines": { "node": ">=12" }, @@ -16266,6 +16017,7 @@ "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.2.tgz", "integrity": "sha512-2N/55r4JDJ4gdrCvGgINMy+HH3iRpNIz8K6SFwVsA+JbQScLiC+clmAxBgwiSPgcG9U15QmvqCGWzMbqda5zGQ==", "dev": true, + "peer": true, "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", diff --git a/scripts/download-ffmpeg.mjs b/scripts/download-ffmpeg.mjs new file mode 100644 index 00000000..e7b2756f --- /dev/null +++ b/scripts/download-ffmpeg.mjs @@ -0,0 +1,239 @@ +#!/usr/bin/env node + +/** + * Downloads the correct FFmpeg static build for the current platform. + * Run with: node scripts/download-ffmpeg.mjs + * + * Places the binary in vendor/ffmpeg// + * This is called at build time, NOT bundled in the repo. + */ + +import { execSync } from "node:child_process"; +import fs from "node:fs"; +import http from "node:http"; +import https from "node:https"; +import os from "node:os"; +import path from "node:path"; +import { pipeline } from "node:stream/promises"; +import { createGunzip } from "node:zlib"; + +const VENDOR_DIR = path.join(process.cwd(), "vendor", "ffmpeg"); + +// FFmpeg static build URLs +const SOURCES = { + win32: { + // gyan.dev essentials build — ~80MB, includes all common codecs + HW encoders + url: "https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-essentials.zip", + binaryPath: "ffmpeg-*-essentials_build/bin/ffmpeg.exe", + outputDir: "win32", + outputName: "ffmpeg.exe", + }, + darwin: { + url: "https://evermeet.cx/ffmpeg/getrelease/ffmpeg/zip", + binaryPath: "ffmpeg", + outputDir: "darwin", + outputName: "ffmpeg", + }, + linux: { + url: "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz", + binaryPath: "ffmpeg-*-amd64-static/ffmpeg", + outputDir: "linux", + outputName: "ffmpeg", + }, +}; + +const platform = os.platform(); + +async function main() { + const source = SOURCES[platform]; + if (!source) { + console.error(`Unsupported platform: ${platform}`); + console.error("Supported platforms: win32, darwin, linux"); + process.exit(1); + } + + const outputDir = VENDOR_DIR; + const outputPath = path.join(outputDir, source.outputName); + + // Check if already downloaded + if (fs.existsSync(outputPath)) { + console.log(`FFmpeg already exists at ${outputPath}`); + console.log("Delete it and re-run to re-download."); + return; + } + + console.log(`Downloading FFmpeg for ${platform}...`); + console.log(`URL: ${source.url}`); + + // Create output directory + fs.mkdirSync(outputDir, { recursive: true }); + + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "ffmpeg-download-")); + const ext = source.url.endsWith(".zip") + ? ".zip" + : source.url.endsWith(".tar.xz") + ? ".tar.xz" + : ".tar.gz"; + const tmpFile = path.join(tmpDir, `ffmpeg${ext}`); + + try { + // Download + await downloadFile(source.url, tmpFile); + console.log(`Downloaded to ${tmpFile}`); + + // Extract + console.log("Extracting..."); + if (ext === ".zip") { + if (platform === "win32") { + // Use PowerShell's Expand-Archive on Windows + execSync( + `powershell -NoProfile -Command "Expand-Archive -Force -Path '${tmpFile}' -DestinationPath '${tmpDir}'"`, + { stdio: "inherit" }, + ); + } else { + execSync(`unzip -o "${tmpFile}" -d "${tmpDir}"`, { stdio: "inherit" }); + } + } else if (ext === ".tar.xz") { + execSync(`tar xf "${tmpFile}" -C "${tmpDir}"`, { stdio: "inherit" }); + } else { + execSync(`tar xzf "${tmpFile}" -C "${tmpDir}"`, { stdio: "inherit" }); + } + + // Find the binary using glob pattern + const binaryPath = findFile(tmpDir, source.binaryPath); + if (!binaryPath) { + throw new Error( + `Could not find FFmpeg binary matching pattern: ${source.binaryPath}\nExtracted files: ${listFiles(tmpDir).join("\n")}`, + ); + } + + // Copy to vendor directory + fs.copyFileSync(binaryPath, outputPath); + + // Make executable on Unix + if (platform !== "win32") { + fs.chmodSync(outputPath, 0o755); + } + + console.log(`FFmpeg installed to ${outputPath}`); + + // Verify + const version = execSync(`"${outputPath}" -version`, { encoding: "utf-8" }).split("\n")[0]; + console.log(`Version: ${version}`); + } finally { + // Cleanup + fs.rmSync(tmpDir, { recursive: true, force: true }); + } +} + +function downloadFile(url, dest) { + return new Promise((resolve, reject) => { + const follow = (url, redirects = 0) => { + if (redirects > 5) { + reject(new Error("Too many redirects")); + return; + } + + const protocol = url.startsWith("https") ? https : http; + protocol + .get(url, { headers: { "User-Agent": "openscreen-build" } }, (res) => { + if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) { + follow(res.headers.location, redirects + 1); + return; + } + + if (res.statusCode !== 200) { + reject(new Error(`HTTP ${res.statusCode}: ${url}`)); + return; + } + + const file = fs.createWriteStream(dest); + let downloaded = 0; + const totalLength = parseInt(res.headers["content-length"] || "0", 10); + + res.on("data", (chunk) => { + downloaded += chunk.length; + if (totalLength > 0) { + const pct = ((downloaded / totalLength) * 100).toFixed(1); + process.stdout.write( + `\rDownloading: ${pct}% (${(downloaded / 1024 / 1024).toFixed(1)} MB)`, + ); + } + }); + + pipeline(res, file) + .then(() => { + console.log("\nDownload complete."); + resolve(); + }) + .catch(reject); + }) + .on("error", reject); + }; + + follow(url); + }); +} + +function findFile(dir, pattern) { + // Simple glob matching for patterns like "ffmpeg-*-essentials_build/bin/ffmpeg.exe" + const parts = pattern.split("/"); + return findFileRecursive(dir, parts, 0); +} + +function findFileRecursive(dir, parts, depth) { + if (depth >= parts.length) return null; + + const pattern = parts[depth]; + const isLastPart = depth === parts.length - 1; + + try { + const entries = fs.readdirSync(dir, { withFileTypes: true }); + for (const entry of entries) { + if (matchGlob(entry.name, pattern)) { + const fullPath = path.join(dir, entry.name); + if (isLastPart) { + if (entry.isFile()) return fullPath; + } else { + if (entry.isDirectory()) { + const result = findFileRecursive(fullPath, parts, depth + 1); + if (result) return result; + } + } + } + } + } catch { + // Directory doesn't exist or can't be read + } + + return null; +} + +function matchGlob(name, pattern) { + if (pattern === "*") return true; + if (!pattern.includes("*")) return name === pattern; + const regex = new RegExp("^" + pattern.replace(/\*/g, ".*") + "$"); + return regex.test(name); +} + +function listFiles(dir, prefix = "") { + const results = []; + try { + const entries = fs.readdirSync(dir, { withFileTypes: true }); + for (const entry of entries) { + const name = prefix ? `${prefix}/${entry.name}` : entry.name; + results.push(name); + if (entry.isDirectory()) { + results.push(...listFiles(path.join(dir, entry.name), name)); + } + } + } catch { + // ignore + } + return results; +} + +main().catch((error) => { + console.error("Failed to download FFmpeg:", error); + process.exit(1); +}); diff --git a/src/components/video-editor/VideoEditor.tsx b/src/components/video-editor/VideoEditor.tsx index b78d5b26..1aff9339 100644 --- a/src/components/video-editor/VideoEditor.tsx +++ b/src/components/video-editor/VideoEditor.tsx @@ -22,6 +22,7 @@ import { type ExportProgress, type ExportQuality, type ExportSettings, + FFmpegExporter, GIF_SIZE_PRESETS, GifExporter, type GifFrameRate, @@ -159,7 +160,7 @@ export default function VideoEditor() { const nextAnnotationIdRef = useRef(1); const nextAnnotationZIndexRef = useRef(1); - const exporterRef = useRef(null); + const exporterRef = useRef(null); const annotationOnlyRegions = useMemo( () => annotationRegions.filter((region) => region.type !== "blur"), @@ -1377,7 +1378,7 @@ export default function VideoEditor() { exporterRef.current = gifExporter as unknown as VideoExporter; const result = await gifExporter.export(); - if (result.success && result.blob) { + if (result.success && result.type === "blob") { const arrayBuffer = await result.blob.arrayBuffer(); const timestamp = Date.now(); const fileName = `export-${timestamp}.gif`; @@ -1394,29 +1395,26 @@ export default function VideoEditor() { setExportError(saveResult.message || "Failed to save GIF"); toast.error(saveResult.message || "Failed to save GIF"); } - } else { + } else if (!result.success) { setExportError(result.error || "GIF export failed"); toast.error(result.error || "GIF export failed"); } } else { - // MP4 Export + // MP4 Export — try FFmpeg native path first, fall back to WebCodecs const quality = settings.quality || exportQuality; let exportWidth: number; let exportHeight: number; let bitrate: number; if (quality === "source") { - // Use source resolution exportWidth = sourceWidth; exportHeight = sourceHeight; if (aspectRatioValue === 1) { - // Square (1:1): use smaller dimension to avoid codec limits const baseDimension = Math.floor(Math.min(sourceWidth, sourceHeight) / 2) * 2; exportWidth = baseDimension; exportHeight = baseDimension; } else if (aspectRatioValue > 1) { - // Landscape: find largest even dimensions that exactly match aspect ratio const baseWidth = Math.floor(sourceWidth / 2) * 2; let found = false; for (let w = baseWidth; w >= 100 && !found; w -= 2) { @@ -1432,7 +1430,6 @@ export default function VideoEditor() { exportHeight = Math.floor(baseWidth / aspectRatioValue / 2) * 2; } } else { - // Portrait: find largest even dimensions that exactly match aspect ratio const baseHeight = Math.floor(sourceHeight / 2) * 2; let found = false; for (let h = baseHeight; h >= 100 && !found; h -= 2) { @@ -1449,7 +1446,6 @@ export default function VideoEditor() { } } - // Calculate visually lossless bitrate matching screen recording optimization const totalPixels = exportWidth * exportHeight; bitrate = 30_000_000; if (totalPixels > 1920 * 1080 && totalPixels <= 2560 * 1440) { @@ -1458,14 +1454,10 @@ export default function VideoEditor() { bitrate = 80_000_000; } } else { - // Use quality-based target resolution const targetHeight = quality === "medium" ? 720 : 1080; - - // Calculate dimensions maintaining aspect ratio exportHeight = Math.floor(targetHeight / 2) * 2; exportWidth = Math.floor((exportHeight * aspectRatioValue) / 2) * 2; - // Adjust bitrate for lower resolutions const totalPixels = exportWidth * exportHeight; if (totalPixels <= 1280 * 720) { bitrate = 10_000_000; @@ -1476,61 +1468,119 @@ export default function VideoEditor() { } } - const exporter = new VideoExporter({ - videoUrl: videoPath, - webcamVideoUrl: webcamVideoPath || undefined, - width: exportWidth, - height: exportHeight, - frameRate: 60, - bitrate, - codec: "avc1.640033", - wallpaper, - zoomRegions, - trimRegions, - speedRegions, - showShadow: shadowIntensity > 0, - shadowIntensity, - showBlur, - motionBlurAmount, - borderRadius, - padding, - cropRegion, - annotationRegions, - webcamLayoutPreset, - webcamMaskShape, - webcamSizePreset, - webcamPosition, - previewWidth, - previewHeight, - cursorTelemetry, - onProgress: (progress: ExportProgress) => { - setExportProgress(progress); - }, - }); - - exporterRef.current = exporter; - const result = await exporter.export(); - - if (result.success && result.blob) { - const arrayBuffer = await result.blob.arrayBuffer(); - const timestamp = Date.now(); - const fileName = `export-${timestamp}.mp4`; - - const saveResult = await window.electronAPI.saveExportedVideo(arrayBuffer, fileName); - - if (saveResult.canceled) { - setUnsavedExport({ arrayBuffer, fileName, format: "mp4" }); - toast.info("Export canceled"); - } else if (saveResult.success && saveResult.path) { + // Check if FFmpeg native export is available + const ffmpegCheck = await FFmpegExporter.isAvailable(); + + if (ffmpegCheck.available) { + // ---- FFmpeg Native Export (fast) ---- + console.log( + `[Export] Using FFmpeg native export with encoder: ${ffmpegCheck.bestEncoder}`, + ); + + const ffmpegExporter = new FFmpegExporter({ + videoUrl: videoPath, + webcamVideoUrl: webcamVideoPath || undefined, + width: exportWidth, + height: exportHeight, + frameRate: 60, + bitrate, + wallpaper, + zoomRegions, + trimRegions, + speedRegions, + showShadow: shadowIntensity > 0, + shadowIntensity, + showBlur, + motionBlurAmount, + borderRadius, + padding, + cropRegion, + annotationRegions, + webcamLayoutPreset, + webcamMaskShape, + webcamSizePreset, + webcamPosition, + previewWidth, + previewHeight, + cursorTelemetry, + onProgress: (progress: ExportProgress) => { + setExportProgress(progress); + }, + }); + + exporterRef.current = ffmpegExporter; + const result = await ffmpegExporter.export(); + + // FFmpeg writes directly to disk — handle the result + if (result.success && result.type === "native") { setUnsavedExport(null); - handleExportSaved("Video", saveResult.path); + handleExportSaved("Video", result.path); + } else if (!result.success && result.error === "Export save canceled") { + toast.info("Export canceled"); } else { - setExportError(saveResult.message || "Failed to save video"); - toast.error(saveResult.message || "Failed to save video"); + setExportError(result.error || "FFmpeg export failed"); + toast.error(result.error || "FFmpeg export failed"); } } else { - setExportError(result.error || "Export failed"); - toast.error(result.error || "Export failed"); + // ---- WebCodecs Fallback (slower, no FFmpeg needed) ---- + console.log("[Export] FFmpeg not available, using WebCodecs fallback"); + + const exporter = new VideoExporter({ + videoUrl: videoPath, + webcamVideoUrl: webcamVideoPath || undefined, + width: exportWidth, + height: exportHeight, + frameRate: 60, + bitrate, + codec: "avc1.640033", + wallpaper, + zoomRegions, + trimRegions, + speedRegions, + showShadow: shadowIntensity > 0, + shadowIntensity, + showBlur, + motionBlurAmount, + borderRadius, + padding, + cropRegion, + annotationRegions, + webcamLayoutPreset, + webcamMaskShape, + webcamSizePreset, + webcamPosition, + previewWidth, + previewHeight, + cursorTelemetry, + onProgress: (progress: ExportProgress) => { + setExportProgress(progress); + }, + }); + + exporterRef.current = exporter; + const result = await exporter.export(); + + if (result.success && result.type === "blob") { + const arrayBuffer = await result.blob.arrayBuffer(); + const timestamp = Date.now(); + const fileName = `export-${timestamp}.mp4`; + + const saveResult = await window.electronAPI.saveExportedVideo(arrayBuffer, fileName); + + if (saveResult.canceled) { + setUnsavedExport({ arrayBuffer, fileName, format: "mp4" }); + toast.info("Export canceled"); + } else if (saveResult.success && saveResult.path) { + setUnsavedExport(null); + handleExportSaved("Video", saveResult.path); + } else { + setExportError(saveResult.message || "Failed to save video"); + toast.error(saveResult.message || "Failed to save video"); + } + } else if (!result.success) { + setExportError(result.error || "Export failed"); + toast.error(result.error || "Export failed"); + } } } diff --git a/src/lib/exporter/ffmpegExporter.ts b/src/lib/exporter/ffmpegExporter.ts new file mode 100644 index 00000000..ff999c8e --- /dev/null +++ b/src/lib/exporter/ffmpegExporter.ts @@ -0,0 +1,427 @@ +/** + * FFmpeg-based video exporter that pipes raw RGBA frames to an FFmpeg child process + * running in the Electron main process with hardware-accelerated encoding. + * + * This replaces the slow WebCodecs VideoEncoder path on Windows, providing + * 5-20x faster exports by leveraging NVENC/QSV/AMF hardware encoders. + * + * Falls back to libx264 (still faster than browser WebCodecs) when no GPU encoder + * is available. + */ + +import type { + AnnotationRegion, + CropRegion, + SpeedRegion, + TrimRegion, + WebcamLayoutPreset, + WebcamSizePreset, + ZoomRegion, +} from "@/components/video-editor/types"; +import { AsyncVideoFrameQueue } from "./asyncVideoFrameQueue"; +import { FrameRenderer } from "./frameRenderer"; +import { StreamingVideoDecoder } from "./streamingDecoder"; +import type { ExportProgress, ExportResult } from "./types"; + +// BACKPRESSURE variables removed as they are no longer needed for VideoEncoder + +interface FFmpegExporterConfig { + videoUrl: string; + webcamVideoUrl?: string; + width: number; + height: number; + frameRate: number; + bitrate: number; + wallpaper: string; + zoomRegions: ZoomRegion[]; + trimRegions?: TrimRegion[]; + speedRegions?: SpeedRegion[]; + showShadow: boolean; + shadowIntensity: number; + showBlur: boolean; + motionBlurAmount?: number; + borderRadius?: number; + padding?: number; + cropRegion: CropRegion; + webcamLayoutPreset?: WebcamLayoutPreset; + webcamMaskShape?: import("@/components/video-editor/types").WebcamMaskShape; + webcamSizePreset?: WebcamSizePreset; + webcamPosition?: { cx: number; cy: number } | null; + annotationRegions?: AnnotationRegion[]; + previewWidth?: number; + previewHeight?: number; + cursorTelemetry?: import("@/components/video-editor/types").CursorTelemetryPoint[]; + onProgress?: (progress: ExportProgress) => void; +} + +export class FFmpegExporter { + private config: FFmpegExporterConfig; + private cancelled = false; + private sessionId: string | null = null; + private streamingDecoder: StreamingVideoDecoder | null = null; + private renderer: FrameRenderer | null = null; + + constructor(config: FFmpegExporterConfig) { + this.config = config; + } + + /** + * Checks if FFmpeg native export is available. + * Call this before constructing an FFmpegExporter to decide which path to use. + */ + static async isAvailable(): Promise<{ + available: boolean; + bestEncoder: string | null; + }> { + try { + const caps = await window.electronAPI.ffmpegGetCapabilities(); + return { + available: caps.available && caps.bestEncoder !== null, + bestEncoder: caps.bestEncoder, + }; + } catch { + return { available: false, bestEncoder: null }; + } + } + + async export(): Promise { + this.cancelled = false; + + try { + // 1. Probe capabilities & select encoder + const caps = await window.electronAPI.ffmpegGetCapabilities(); + if (!caps.available || !caps.bestEncoder) { + return { success: false, error: "FFmpeg not available" }; + } + + const encoder = caps.bestEncoder; + console.log(`[FFmpegExporter] Using encoder: ${encoder}`); + + // 2. Initialize video decoder + const streamingDecoder = new StreamingVideoDecoder(); + this.streamingDecoder = streamingDecoder; + const videoInfo = await streamingDecoder.loadMetadata(this.config.videoUrl); + + let webcamDecoder: StreamingVideoDecoder | null = null; + let webcamInfo: Awaited> | null = null; + if (this.config.webcamVideoUrl) { + webcamDecoder = new StreamingVideoDecoder(); + webcamInfo = await webcamDecoder.loadMetadata(this.config.webcamVideoUrl); + } + + // 3. Initialize frame renderer (same as VideoExporter) + const renderer = new FrameRenderer({ + width: this.config.width, + height: this.config.height, + wallpaper: this.config.wallpaper, + zoomRegions: this.config.zoomRegions, + showShadow: this.config.showShadow, + shadowIntensity: this.config.shadowIntensity, + showBlur: this.config.showBlur, + motionBlurAmount: this.config.motionBlurAmount, + borderRadius: this.config.borderRadius, + padding: this.config.padding, + cropRegion: this.config.cropRegion, + videoWidth: videoInfo.width, + videoHeight: videoInfo.height, + webcamSize: webcamInfo ? { width: webcamInfo.width, height: webcamInfo.height } : null, + webcamLayoutPreset: this.config.webcamLayoutPreset, + webcamMaskShape: this.config.webcamMaskShape, + webcamSizePreset: this.config.webcamSizePreset, + webcamPosition: this.config.webcamPosition, + annotationRegions: this.config.annotationRegions, + speedRegions: this.config.speedRegions, + previewWidth: this.config.previewWidth, + previewHeight: this.config.previewHeight, + cursorTelemetry: this.config.cursorTelemetry, + }); + this.renderer = renderer; + await renderer.initialize(); + + if (this.cancelled) { + return { success: false, error: "Export cancelled" }; + } + + // 4. Start FFmpeg process + const startResult = await window.electronAPI.ffmpegExportStart({ + width: this.config.width, + height: this.config.height, + frameRate: this.config.frameRate, + encoder, + bitrate: this.config.bitrate, + audioSourcePath: this.config.videoUrl, + hasAudio: videoInfo.hasAudio, + }); + + if (!startResult.success || !startResult.sessionId) { + return { success: false, error: startResult.error || "Failed to start FFmpeg" }; + } + + this.sessionId = startResult.sessionId; + console.log(`[FFmpegExporter] Session started: ${this.sessionId}`); + + // 5. Calculate total frames + const effectiveDuration = streamingDecoder.getEffectiveDuration( + this.config.trimRegions, + this.config.speedRegions, + ); + const totalFrames = Math.ceil(effectiveDuration * this.config.frameRate); + + console.log( + `[FFmpegExporter] Duration: ${effectiveDuration.toFixed(2)}s, Frames: ${totalFrames}`, + ); + + // 6. Initialize WebCodecs VideoEncoder to encode hardware H.264 on the GPU + let encoderError: Error | null = null; + let frameErrors = 0; + const MAX_FRAME_ERRORS = 3; + + const vidEncoder = new VideoEncoder({ + output: async (chunk, meta) => { + // We may receive the SPS/PPS headers as decoder config + if (meta?.decoderConfig?.description) { + // Optionally send the header bytes to ffmpeg ahead of the stream? + // FFmpeg -f h264 usually detects the Annex-B stream automatically if + // we format the chunks right, but VideoEncoder spits out raw AVCC chunks. + // We need to write them to IPC. We will let FFmpeg parse the bare H264 stream. + } + + // We MUST output Annex B format for FFmpeg to parse it from a raw pipe. + // Fortunately avc1 chunks can be converted, or we can just configure WebCodecs to output avc! + // Actually, WebCodecs output is in Annex B format if we don't specify avc format. + const buffer = new ArrayBuffer(chunk.byteLength); + chunk.copyTo(buffer); + + const frameResult = await window.electronAPI.ffmpegExportFrame(this.sessionId!, buffer); + + if (!frameResult.success) { + console.error("Failed to send chunks to FFmpeg", frameResult.error); + frameErrors++; + if (frameErrors >= MAX_FRAME_ERRORS) { + encoderError = new Error(`FFmpeg IPC failed: ${frameResult.error}`); + } + } else { + frameErrors = 0; // reset on successful transmission + } + }, + error: (e) => { + console.error("[FFmpegExporter] VideoEncoder error:", e); + encoderError = e; + }, + }); + + vidEncoder.configure({ + codec: "avc1.640034", // H.264 High Profile Level 5.2 + width: this.config.width, + height: this.config.height, + bitrate: this.config.bitrate, + framerate: this.config.frameRate, + hardwareAcceleration: "prefer-hardware", + avc: { format: "annexb" }, // CRITICAL: FFmpeg raw h264 pipe needs Annex B format with start codes! + }); + + // 7. Decode & render frames, pipe directly from GPU encoder + let frameIndex = 0; + const exportStartTime = Date.now(); + const frameDurationUs = 1_000_000 / this.config.frameRate; + + // Add webcam queue + const webcamFrameQueue = this.config.webcamVideoUrl ? new AsyncVideoFrameQueue() : null; + let webcamDecodeError: Error | null = null; + let stopWebcamDecode = false; + + const webcamDecodePromise = + webcamDecoder && webcamFrameQueue + ? (() => { + const queue = webcamFrameQueue; + return webcamDecoder + .decodeAll( + this.config.frameRate, + this.config.trimRegions, + this.config.speedRegions, + async (webcamFrame) => { + while (queue.length >= 12 && !this.cancelled && !stopWebcamDecode) { + await new Promise((resolve) => setTimeout(resolve, 2)); + } + if (this.cancelled || stopWebcamDecode) { + webcamFrame.close(); + return; + } + queue.enqueue(webcamFrame); + }, + ) + .catch((error) => { + webcamDecodeError = error instanceof Error ? error : new Error(String(error)); + throw webcamDecodeError; + }) + .finally(() => { + if (webcamDecodeError) { + queue.fail(webcamDecodeError); + } else { + queue.close(); + } + }); + })() + : null; + + await streamingDecoder.decodeAll( + this.config.frameRate, + this.config.trimRegions, + this.config.speedRegions, + async (videoFrame, _exportTimestampUs, sourceTimestampMs) => { + let webcamFrame: VideoFrame | null = null; + try { + if (this.cancelled) { + return; + } + + if (encoderError) { + throw encoderError; + } + + const timestamp = frameIndex * frameDurationUs; // microseconds here + webcamFrame = webcamFrameQueue ? await webcamFrameQueue.dequeue() : null; + if (this.cancelled) { + return; + } + + const sourceTimestampUs = sourceTimestampMs * 1000; + await renderer.renderFrame(videoFrame, sourceTimestampUs, webcamFrame); + const canvas = renderer.getCanvas(); + + // Fastest path in existence: GPU texture -> Hardware H264 Encoder + const exportFrame = new VideoFrame(canvas, { timestamp, duration: frameDurationUs }); + + // Prevent encoding queue from flooding RAM + while (vidEncoder.encodeQueueSize >= 32) { + await new Promise((r) => setTimeout(r, 2)); + } + + vidEncoder.encode(exportFrame, { keyFrame: frameIndex % 150 === 0 }); + exportFrame.close(); + + frameIndex++; + + const elapsedMs = Date.now() - exportStartTime; + const framesPerSec = frameIndex / (elapsedMs / 1000); + const remainingFrames = totalFrames - frameIndex; + const estimatedTimeRemaining = remainingFrames / Math.max(1, framesPerSec); + + this.config.onProgress?.({ + currentFrame: frameIndex, + totalFrames, + percentage: (frameIndex / totalFrames) * 100, + estimatedTimeRemaining, + }); + } finally { + if (webcamFrame) { + webcamFrame.close(); + } + videoFrame.close(); + } + }, + ); + + stopWebcamDecode = true; + if (webcamDecodePromise) { + await webcamDecodePromise.catch(() => {}); // ignore error here, already caught + } + + // Flush remains of encoder + await vidEncoder.flush(); + vidEncoder.close(); + + if (this.cancelled) { + await this.cancelFFmpeg(); + return { success: false, error: "Export cancelled" }; + } + + // 7. Report finalizing phase + this.config.onProgress?.({ + currentFrame: totalFrames, + totalFrames, + percentage: 100, + estimatedTimeRemaining: 0, + phase: "finalizing", + }); + + // 8. Finish FFmpeg — close stdin, wait for process, show save dialog + const timestamp = Date.now(); + const fileName = `export-${timestamp}.mp4`; + const finishResult = await window.electronAPI.ffmpegExportFinish(this.sessionId, fileName); + + const totalTime = ((Date.now() - exportStartTime) / 1000).toFixed(1); + console.log(`[FFmpegExporter] Total export time: ${totalTime}s for ${frameIndex} frames`); + + if (finishResult.canceled) { + // User canceled the save dialog — return the result so VideoEditor can handle it + return { + success: false, + error: "Export save canceled", + }; + } + + if (!finishResult.success) { + return { + success: false, + error: finishResult.error || "FFmpeg export failed", + }; + } + + // Return a result that VideoEditor can handle for the "Show in Folder" toast + // We return a special result since FFmpegExporter doesn't produce a Blob + return { + success: true, + type: "native", + path: finishResult.path!, + }; + } catch (error) { + console.error("[FFmpegExporter] Export error:", error); + await this.cancelFFmpeg(); + return { + success: false, + error: error instanceof Error ? error.message : String(error), + }; + } finally { + this.cleanup(); + } + } + + cancel(): void { + this.cancelled = true; + this.streamingDecoder?.cancel(); + void this.cancelFFmpeg(); + this.cleanup(); + } + + private async cancelFFmpeg(): Promise { + if (this.sessionId) { + try { + await window.electronAPI.ffmpegExportCancel(this.sessionId); + } catch { + // Ignore cancel errors + } + this.sessionId = null; + } + } + + private cleanup(): void { + if (this.streamingDecoder) { + try { + this.streamingDecoder.destroy(); + } catch (e) { + console.warn("Error destroying streaming decoder:", e); + } + this.streamingDecoder = null; + } + + if (this.renderer) { + try { + this.renderer.destroy(); + } catch (e) { + console.warn("Error destroying renderer:", e); + } + this.renderer = null; + } + } +} diff --git a/src/lib/exporter/frameRenderer.ts b/src/lib/exporter/frameRenderer.ts index 80424b09..990ac3ed 100644 --- a/src/lib/exporter/frameRenderer.ts +++ b/src/lib/exporter/frameRenderer.ts @@ -1,13 +1,14 @@ -import { - Application, - BlurFilter, - Container, - Graphics, - Sprite, - Texture, - type TextureSourceLike, -} from "pixi.js"; -import { MotionBlurFilter } from "pixi-filters/motion-blur"; +/** + * Thin wrapper around frameRendererWorker.ts. + * + * Computes animation state (zoom, layout) on the main thread using existing + * helpers, then sends VideoFrames + pre-computed transform to a Web Worker + * that does all compositing via OffscreenCanvas + Canvas 2D (no Pixi.js). + * + * Public API is identical to the previous Pixi.js implementation so + * videoExporter.ts requires zero changes. + */ + import type { AnnotationRegion, CropRegion, @@ -33,28 +34,91 @@ import { import { clampFocusToStage as clampFocusToStageUtil } from "@/components/video-editor/videoPlayback/focusUtils"; import { findDominantRegion } from "@/components/video-editor/videoPlayback/zoomRegionUtils"; import { - applyZoomTransform, computeFocusFromTransform, computeZoomTransform, - createMotionBlurState, - type MotionBlurState, } from "@/components/video-editor/videoPlayback/zoomTransform"; -import { - computeCompositeLayout, - getWebcamLayoutPresetDefinition, - type Size, - type StyledRenderRect, -} from "@/lib/compositeLayout"; -import { drawCanvasClipPath } from "@/lib/webcamMaskShapes"; -import { renderAnnotations } from "./annotationRenderer"; -import { - getLinearGradientPoints, - getRadialGradientShape, - parseCssGradient, - resolveLinearGradientAngle, -} from "./gradientParser"; +import { computeCompositeLayout, type Size, type StyledRenderRect } from "@/lib/compositeLayout"; + +// ---------- Types matching the worker ---------- + +interface WorkerZoomRegion { + id: string; + startMs: number; + endMs: number; + depth: number; + focus: { cx: number; cy: number }; + focusMode?: "manual" | "auto"; + zoomInDurationMs?: number; + zoomOutDurationMs?: number; +} + +interface WorkerCropRegion { + x: number; + y: number; + width: number; + height: number; +} -interface FrameRenderConfig { +interface WorkerAnnotationRegion { + id: string; + startMs: number; + endMs: number; + type: "text" | "image" | "figure" | "blur"; + content: string; + textContent?: string; + imageContent?: string; + position: { x: number; y: number }; + size: { width: number; height: number }; + style: { + color: string; + backgroundColor: string; + fontSize: number; + fontFamily: string; + fontWeight: "normal" | "bold"; + fontStyle: "normal" | "italic"; + textDecoration: "none" | "underline"; + textAlign: "left" | "center" | "right"; + }; + zIndex: number; + figureData?: { + arrowDirection: string; + color: string; + strokeWidth: number; + }; + blurData?: { + shape: "rectangle" | "oval" | "freehand"; + intensity: number; + freehandPoints?: Array<{ x: number; y: number }>; + }; +} + +interface WorkerConfig { + width: number; + height: number; + wallpaper: string; + zoomRegions: WorkerZoomRegion[]; + showShadow: boolean; + shadowIntensity: number; + showBlur: boolean; + motionBlurAmount: number; + borderRadius: number; + padding: number; + cropRegion: WorkerCropRegion; + videoWidth: number; + videoHeight: number; + webcamSize?: { width: number; height: number } | null; + webcamLayoutPreset?: "picture-in-picture" | "vertical-stack" | "dual-frame"; + webcamMaskShape?: "rectangle" | "circle" | "square" | "rounded"; + webcamSizePreset?: number; + webcamPosition?: { cx: number; cy: number } | null; + annotationRegions?: WorkerAnnotationRegion[]; + previewWidth?: number; + previewHeight?: number; +} + +// ---------- Public interface (unchanged) ---------- + +export interface FrameRenderConfig { width: number; height: number; wallpaper: string; @@ -80,6 +144,8 @@ interface FrameRenderConfig { cursorTelemetry?: import("@/components/video-editor/types").CursorTelemetryPoint[]; } +// ---------- Animation state (same as original) ---------- + interface AnimationState { scale: number; focusX: number; @@ -99,31 +165,92 @@ interface LayoutCache { webcamRect: StyledRenderRect | null; } -// Renders video frames with all effects (background, zoom, crop, blur, shadow) to an offscreen canvas for export. +// ---------- Worker messages ---------- + +interface InitMessage { + type: "init"; + config: WorkerConfig; + wallpaperBitmap?: ImageBitmap; +} + +interface RenderMessage { + type: "render"; + frame: VideoFrame; + timestamp: number; + webcamFrame?: VideoFrame | null; + zoomTransform: { + scale: number; + x: number; + y: number; + focusX: number; + focusY: number; + progress: number; + }; + layoutInfo: { + stageWidth: number; + stageHeight: number; + videoWidth: number; + videoHeight: number; + baseScale: number; + baseOffsetX: number; + baseOffsetY: number; + maskX: number; + maskY: number; + maskWidth: number; + maskHeight: number; + scaledBorderRadius: number; + webcamRect?: { + x: number; + y: number; + width: number; + height: number; + borderRadius: number; + maskShape: "rectangle" | "circle" | "square" | "rounded"; + } | null; + screenCover: boolean; + }; +} + +interface DisposeMessage { + type: "dispose"; +} + +interface FrameReadyMessage { + type: "frame-ready"; + bitmap: ImageBitmap; +} + +interface ErrorMessage { + type: "error"; + error: string; +} + +interface ReadyMessage { + type: "ready"; +} + +type WorkerOutgoing = FrameReadyMessage | ErrorMessage | ReadyMessage; + +// ---------- FrameRenderer ---------- export class FrameRenderer { - private app: Application | null = null; - private cameraContainer: Container | null = null; - private videoContainer: Container | null = null; - private videoSprite: Sprite | null = null; - private backgroundSprite: HTMLCanvasElement | null = null; - private maskGraphics: Graphics | null = null; - private blurFilter: BlurFilter | null = null; - private motionBlurFilter: MotionBlurFilter | null = null; - private shadowCanvas: HTMLCanvasElement | null = null; - private shadowCtx: CanvasRenderingContext2D | null = null; - private compositeCanvas: HTMLCanvasElement | null = null; - private compositeCtx: CanvasRenderingContext2D | null = null; - private rasterCanvas: HTMLCanvasElement | null = null; - private rasterCtx: CanvasRenderingContext2D | null = null; + private worker: Worker | null = null; + private proxyCanvas: HTMLCanvasElement | null = null; + private proxyCtx: CanvasRenderingContext2D | null = null; private config: FrameRenderConfig; private animationState: AnimationState; private layoutCache: LayoutCache | null = null; private currentVideoTime = 0; - private motionBlurState: MotionBlurState = createMotionBlurState(); private smoothedAutoFocus: { cx: number; cy: number } | null = null; private prevAnimationTimeMs: number | null = null; private prevTargetProgress = 0; + private readyPromise: Promise; + private readyResolve!: () => void; + private pendingRender: { + resolve: () => void; + reject: (err: Error) => void; + } | null = null; + private disposed = false; constructor(config: FrameRenderConfig) { this.config = config; @@ -136,214 +263,136 @@ export class FrameRenderer { y: 0, appliedScale: 1, }; + this.readyPromise = new Promise((resolve) => { + this.readyResolve = resolve; + }); } async initialize(): Promise { - // Create canvas for rendering - const canvas = document.createElement("canvas"); - canvas.width = this.config.width; - canvas.height = this.config.height; - - // Try to set colorSpace if supported (may not be available on all platforms) - try { - if (canvas && "colorSpace" in canvas) { - canvas.colorSpace = "srgb"; - } - } catch (error) { - // Silently ignore colorSpace errors on platforms that don't support it - console.warn("[FrameRenderer] colorSpace not supported on this platform:", error); + // Create proxy canvas for getCanvas() compatibility + this.proxyCanvas = document.createElement("canvas"); + this.proxyCanvas.width = this.config.width; + this.proxyCanvas.height = this.config.height; + this.proxyCtx = this.proxyCanvas.getContext("2d")!; + + if (!this.proxyCtx) { + throw new Error("Failed to get 2D context for proxy canvas"); } - // Initialize PixiJS with optimized settings for export performance - this.app = new Application(); - await this.app.init({ - canvas, - width: this.config.width, - height: this.config.height, - backgroundAlpha: 0, - antialias: true, - resolution: 1, - autoDensity: true, - }); - - // Setup containers - this.cameraContainer = new Container(); - this.videoContainer = new Container(); - this.app.stage.addChild(this.cameraContainer); - this.cameraContainer.addChild(this.videoContainer); - - // Setup background (render separately, not in PixiJS) - await this.setupBackground(); - - // Setup blur filter for video container - this.blurFilter = new BlurFilter(); - this.blurFilter.quality = 5; - this.blurFilter.resolution = this.app.renderer.resolution; - this.blurFilter.blur = 0; - this.motionBlurFilter = new MotionBlurFilter([0, 0], 5, 0); - this.videoContainer.filters = [this.blurFilter, this.motionBlurFilter]; - - // Setup composite canvas for final output with shadows - this.compositeCanvas = document.createElement("canvas"); - this.compositeCanvas.width = this.config.width; - this.compositeCanvas.height = this.config.height; - this.compositeCtx = this.compositeCanvas.getContext("2d", { - willReadFrequently: false, - }); - - if (!this.compositeCtx) { - throw new Error("Failed to get 2D context for composite canvas"); - } + // Create worker — Vite bundles this automatically + // TODO: In production builds with certain Vite configs, worker + // bundling may need the `?worker` suffix. If the worker fails + // to load, switch to: `import FrameWorker from './frameRendererWorker?worker'` + const workerUrl = new URL("./frameRendererWorker.ts", import.meta.url); + this.worker = new Worker(workerUrl, { type: "module" }); - this.rasterCanvas = document.createElement("canvas"); - this.rasterCanvas.width = this.config.width; - this.rasterCanvas.height = this.config.height; - this.rasterCtx = this.rasterCanvas.getContext("2d"); - if (!this.rasterCtx) { - throw new Error("Failed to get 2D context for raster canvas"); - } + this.worker.onmessage = (e: MessageEvent) => { + this.handleWorkerMessage(e.data); + }; - // Setup shadow canvas if needed - if (this.config.showShadow) { - this.shadowCanvas = document.createElement("canvas"); - this.shadowCanvas.width = this.config.width; - this.shadowCanvas.height = this.config.height; - this.shadowCtx = this.shadowCanvas.getContext("2d", { - willReadFrequently: false, - }); - - if (!this.shadowCtx) { - throw new Error("Failed to get 2D context for shadow canvas"); + this.worker.onerror = (err: ErrorEvent) => { + console.error("[FrameRenderer] Worker error:", err.message); + if (this.pendingRender) { + this.pendingRender.reject(new Error(`Worker error: ${err.message}`)); + this.pendingRender = null; } - } - - // Setup mask - this.maskGraphics = new Graphics(); - this.videoContainer.addChild(this.maskGraphics); - this.videoContainer.mask = this.maskGraphics; - } + }; - private async setupBackground(): Promise { + // Pre-load wallpaper for file:// URLs that the worker may not be able to fetch + let wallpaperBitmap: ImageBitmap | undefined; const wallpaper = this.config.wallpaper; - - // Create background canvas for separate rendering (not affected by zoom) - const bgCanvas = document.createElement("canvas"); - bgCanvas.width = this.config.width; - bgCanvas.height = this.config.height; - const bgCtx = bgCanvas.getContext("2d")!; - try { - // Render background based on type if ( wallpaper.startsWith("file://") || - wallpaper.startsWith("data:") || - wallpaper.startsWith("/") || - wallpaper.startsWith("http") + (wallpaper.startsWith("/") && !wallpaper.startsWith("//")) ) { - // Image background - const img = new Image(); - // Don't set crossOrigin for same-origin images to avoid CORS taint - // Only set it for cross-origin URLs - let imageUrl: string; - if (wallpaper.startsWith("http")) { - imageUrl = wallpaper; - if (!imageUrl.startsWith(window.location.origin)) { - img.crossOrigin = "anonymous"; - } - } else if (wallpaper.startsWith("file://") || wallpaper.startsWith("data:")) { - imageUrl = wallpaper; - } else { - imageUrl = window.location.origin + wallpaper; + // Try to load via fetch and transfer as ImageBitmap + const response = await fetch(wallpaper); + if (response.ok) { + const blob = await response.blob(); + wallpaperBitmap = await createImageBitmap(blob); + } + } else if (wallpaper.startsWith("data:")) { + const response = await fetch(wallpaper); + if (response.ok) { + const blob = await response.blob(); + wallpaperBitmap = await createImageBitmap(blob); } + } + } catch { + // Worker will attempt its own loading or fall back + } - await new Promise((resolve, reject) => { - img.onload = () => resolve(); - img.onerror = (err) => { - console.error("[FrameRenderer] Failed to load background image:", imageUrl, err); - reject(new Error(`Failed to load background image: ${imageUrl}`)); - }; - img.src = imageUrl; - }); + // Send init config + const workerConfig: WorkerConfig = { + width: this.config.width, + height: this.config.height, + wallpaper: this.config.wallpaper, + zoomRegions: this.config.zoomRegions.map((z) => ({ ...z })), + showShadow: this.config.showShadow, + shadowIntensity: this.config.shadowIntensity, + showBlur: this.config.showBlur, + motionBlurAmount: this.config.motionBlurAmount ?? 0, + borderRadius: this.config.borderRadius ?? 0, + padding: this.config.padding ?? 0, + cropRegion: { ...this.config.cropRegion }, + videoWidth: this.config.videoWidth, + videoHeight: this.config.videoHeight, + webcamSize: this.config.webcamSize + ? { width: this.config.webcamSize.width, height: this.config.webcamSize.height } + : null, + webcamLayoutPreset: this.config.webcamLayoutPreset, + webcamMaskShape: this.config.webcamMaskShape, + webcamSizePreset: this.config.webcamSizePreset, + webcamPosition: this.config.webcamPosition, + annotationRegions: this.config.annotationRegions?.map((a) => ({ ...a })), + previewWidth: this.config.previewWidth, + previewHeight: this.config.previewHeight, + }; - // Draw the image using cover and center positioning - const imgAspect = img.width / img.height; - const canvasAspect = this.config.width / this.config.height; + const initMsg: InitMessage = { + type: "init", + config: workerConfig, + wallpaperBitmap, + }; - let drawWidth, drawHeight, drawX, drawY; + const transferList: Transferable[] = []; + if (wallpaperBitmap) transferList.push(wallpaperBitmap as Transferable); - if (imgAspect > canvasAspect) { - drawHeight = this.config.height; - drawWidth = drawHeight * imgAspect; - drawX = (this.config.width - drawWidth) / 2; - drawY = 0; - } else { - drawWidth = this.config.width; - drawHeight = drawWidth / imgAspect; - drawX = 0; - drawY = (this.config.height - drawHeight) / 2; + this.worker.postMessage(initMsg, transferList); + + // Wait for worker to acknowledge ready + await this.readyPromise; + } + + private handleWorkerMessage(msg: WorkerOutgoing): void { + switch (msg.type) { + case "ready": + this.readyResolve(); + break; + case "frame-ready": { + // Draw bitmap onto proxy canvas + if (this.proxyCtx && this.proxyCanvas) { + this.proxyCtx.clearRect(0, 0, this.proxyCanvas.width, this.proxyCanvas.height); + this.proxyCtx.drawImage(msg.bitmap, 0, 0); } + msg.bitmap.close(); - bgCtx.drawImage(img, drawX, drawY, drawWidth, drawHeight); - } else if (wallpaper.startsWith("#")) { - bgCtx.fillStyle = wallpaper; - bgCtx.fillRect(0, 0, this.config.width, this.config.height); - } else if ( - wallpaper.startsWith("linear-gradient") || - wallpaper.startsWith("radial-gradient") - ) { - const parsedGradient = parseCssGradient(wallpaper); - if (parsedGradient) { - const gradient = - parsedGradient.type === "linear" - ? (() => { - const points = getLinearGradientPoints( - resolveLinearGradientAngle(parsedGradient.descriptor), - this.config.width, - this.config.height, - ); - - return bgCtx.createLinearGradient(points.x0, points.y0, points.x1, points.y1); - })() - : (() => { - const shape = getRadialGradientShape( - parsedGradient.descriptor, - this.config.width, - this.config.height, - ); - - return bgCtx.createRadialGradient( - shape.cx, - shape.cy, - 0, - shape.cx, - shape.cy, - shape.radius, - ); - })(); - - parsedGradient.stops.forEach((stop) => { - gradient.addColorStop(stop.offset, stop.color); - }); - - bgCtx.fillStyle = gradient; - bgCtx.fillRect(0, 0, this.config.width, this.config.height); - } else { - console.warn("[FrameRenderer] Could not parse gradient, using black fallback"); - bgCtx.fillStyle = "#000000"; - bgCtx.fillRect(0, 0, this.config.width, this.config.height); + if (this.pendingRender) { + this.pendingRender.resolve(); + this.pendingRender = null; } - } else { - bgCtx.fillStyle = wallpaper; - bgCtx.fillRect(0, 0, this.config.width, this.config.height); + break; + } + case "error": { + console.error("[FrameRenderer] Worker error:", msg.error); + if (this.pendingRender) { + this.pendingRender.reject(new Error(msg.error)); + this.pendingRender = null; + } + break; } - } catch (error) { - console.error("[FrameRenderer] Error setting up background, using fallback:", error); - bgCtx.fillStyle = "#000000"; - bgCtx.fillRect(0, 0, this.config.width, this.config.height); } - - // Store the background canvas for compositing - this.backgroundSprite = bgCanvas; } async renderFrame( @@ -351,31 +400,21 @@ export class FrameRenderer { timestamp: number, webcamFrame?: VideoFrame | null, ): Promise { - if (!this.app || !this.videoContainer || !this.cameraContainer) { - throw new Error("Renderer not initialized"); + if (!this.worker || this.disposed) { + throw new Error("Renderer not initialized or disposed"); } - this.currentVideoTime = timestamp / 1000000; + // Wait for worker to be ready (first frame may arrive before init ack) + await this.readyPromise; - // Create or update video sprite from VideoFrame - if (!this.videoSprite) { - const texture = Texture.from(videoFrame as unknown as TextureSourceLike); - this.videoSprite = new Sprite(texture); - this.videoContainer.addChild(this.videoSprite); - } else { - // Destroy old texture to avoid memory leaks, then create new one - const oldTexture = this.videoSprite.texture; - const newTexture = Texture.from(videoFrame as unknown as TextureSourceLike); - this.videoSprite.texture = newTexture; - oldTexture.destroy(true); - } + this.currentVideoTime = timestamp / 1_000_000; + const timeMs = this.currentVideoTime * 1000; - // Apply layout + // Update layout (same logic as original updateLayout) this.updateLayout(webcamFrame); - const timeMs = this.currentVideoTime * 1000; + // Compute animation state (same as original updateAnimationState) const TICKS_PER_FRAME = 1; - let maxMotionIntensity = 0; for (let i = 0; i < TICKS_PER_FRAME; i++) { const motionIntensity = this.updateAnimationState(timeMs); @@ -387,63 +426,98 @@ export class FrameRenderer { throw new Error("Layout cache not initialized"); } - // Apply transform once with maximum motion intensity from all ticks - applyZoomTransform({ - cameraContainer: this.cameraContainer, - blurFilter: this.blurFilter, - motionBlurFilter: this.motionBlurFilter, + // Compute zoom transform using the same math as computeZoomTransform + const transform = computeZoomTransform({ stageSize: layoutCache.stageSize, baseMask: layoutCache.maskRect, zoomScale: this.animationState.scale, zoomProgress: this.animationState.progress, focusX: this.animationState.focusX, focusY: this.animationState.focusY, - motionIntensity: maxMotionIntensity, - isPlaying: true, - motionBlurAmount: this.config.motionBlurAmount ?? 0, - motionBlurState: this.motionBlurState, - frameTimeMs: timeMs, }); - // Render the PixiJS stage to its canvas (video only, transparent background) - this.app.renderer.render(this.app.stage); - - // Composite with shadows to final output canvas - this.compositeWithShadows(webcamFrame); - - // Render annotations on top if present - if ( - this.config.annotationRegions && - this.config.annotationRegions.length > 0 && - this.compositeCtx - ) { - // Calculate scale factor based on export vs preview dimensions - const previewWidth = this.config.previewWidth || 1920; - const previewHeight = this.config.previewHeight || 1080; - const scaleX = this.config.width / previewWidth; - const scaleY = this.config.height / previewHeight; - const scaleFactor = (scaleX + scaleY) / 2; - - await renderAnnotations( - this.compositeCtx, - this.config.annotationRegions, - this.config.width, - this.config.height, - timeMs, - scaleFactor, - ); + // Scale border radius (same logic as original updateLayout) + const previewWidth = this.config.previewWidth || 1920; + const previewHeight = this.config.previewHeight || 1080; + const canvasScaleFactor = Math.min( + this.config.width / previewWidth, + this.config.height / previewHeight, + ); + const borderRadius = this.config.borderRadius ?? 0; + const scaledBorderRadius = + layoutCache.webcamRect != null + ? 0 // screenBorderRadius was stored separately; we use mask border + : borderRadius * canvasScaleFactor; + + // Build layout info for worker + const layoutInfo = { + stageWidth: layoutCache.stageSize.width, + stageHeight: layoutCache.stageSize.height, + videoWidth: layoutCache.videoSize.width, + videoHeight: layoutCache.videoSize.height, + baseScale: layoutCache.baseScale, + baseOffsetX: layoutCache.baseOffset.x, + baseOffsetY: layoutCache.baseOffset.y, + maskX: layoutCache.maskRect.x, + maskY: layoutCache.maskRect.y, + maskWidth: layoutCache.maskRect.width, + maskHeight: layoutCache.maskRect.height, + scaledBorderRadius, + webcamRect: layoutCache.webcamRect + ? { + x: layoutCache.webcamRect.x, + y: layoutCache.webcamRect.y, + width: layoutCache.webcamRect.width, + height: layoutCache.webcamRect.height, + borderRadius: layoutCache.webcamRect.borderRadius, + maskShape: layoutCache.webcamRect.maskShape ?? "rectangle", + } + : null, + screenCover: this.config.webcamLayoutPreset === "vertical-stack", + }; + + // Create render message + const renderMsg: RenderMessage = { + type: "render", + frame: videoFrame, + timestamp, + webcamFrame: webcamFrame || null, + zoomTransform: { + scale: transform.scale, + x: transform.x, + y: transform.y, + focusX: this.animationState.focusX, + focusY: this.animationState.focusY, + progress: this.animationState.progress, + }, + layoutInfo, + }; + + // Create a promise that resolves when worker sends back the composited frame + const renderPromise = new Promise((resolve, reject) => { + this.pendingRender = { resolve, reject }; + }); + + // Transfer frames to worker (zero-copy) + const transferList: Transferable[] = [videoFrame as unknown as Transferable]; + if (webcamFrame) { + transferList.push(webcamFrame as unknown as Transferable); } + + this.worker.postMessage(renderMsg, transferList); + + // Wait for worker to finish compositing + await renderPromise; } - private updateLayout(webcamFrame?: VideoFrame | null): void { - if (!this.app || !this.videoSprite || !this.maskGraphics || !this.videoContainer) return; + // ---------- Animation state (same logic as original) ---------- + private updateLayout(webcamFrame?: VideoFrame | null): void { const { width, height } = this.config; const { cropRegion, borderRadius = 0, padding = 0 } = this.config; const videoWidth = this.config.videoWidth; const videoHeight = this.config.videoHeight; - // Calculate cropped video dimensions const cropStartX = cropRegion.x; const cropStartY = cropRegion.y; const cropEndX = cropRegion.x + cropRegion.width; @@ -452,13 +526,11 @@ export class FrameRenderer { const croppedVideoWidth = videoWidth * (cropEndX - cropStartX); const croppedVideoHeight = videoHeight * (cropEndY - cropStartY); - // Calculate scale to fit in viewport - // Padding is a percentage (0-100), where 50% ~ 0.8 scale - // Vertical stack ignores padding — it's full-bleed const effectivePadding = this.config.webcamLayoutPreset === "vertical-stack" ? 0 : padding; const paddingScale = 1.0 - (effectivePadding / 100) * 0.4; const viewportWidth = width * paddingScale; const viewportHeight = height * paddingScale; + const compositeLayout = computeCompositeLayout({ canvasSize: { width, height }, maxContentSize: { width: viewportWidth, height: viewportHeight }, @@ -473,7 +545,6 @@ export class FrameRenderer { const screenRect = compositeLayout.screenRect; - // Cover mode: scale to fill the rect (may crop), otherwise fit-to-width let scale: number; if (compositeLayout.screenCover) { scale = Math.max( @@ -484,26 +555,6 @@ export class FrameRenderer { scale = screenRect.width / croppedVideoWidth; } - // Position video sprite - this.videoSprite.width = videoWidth * scale; - this.videoSprite.height = videoHeight * scale; - - // Center the cropped region within the screenRect - const croppedDisplayWidth = croppedVideoWidth * scale; - const croppedDisplayHeight = croppedVideoHeight * scale; - const coverOffsetX = (screenRect.width - croppedDisplayWidth) / 2; - const coverOffsetY = (screenRect.height - croppedDisplayHeight) / 2; - - const cropPixelX = cropStartX * videoWidth * scale; - const cropPixelY = cropStartY * videoHeight * scale; - this.videoSprite.x = -cropPixelX + coverOffsetX; - this.videoSprite.y = -cropPixelY + coverOffsetY; - - // Position video container - this.videoContainer.x = screenRect.x; - this.videoContainer.y = screenRect.y; - - // scale border radius by export/preview canvas ratio const previewWidth = this.config.previewWidth || 1920; const previewHeight = this.config.previewHeight || 1080; const canvasScaleFactor = Math.min(width / previewWidth, height / previewHeight); @@ -514,10 +565,6 @@ export class FrameRenderer { ? 0 : borderRadius * canvasScaleFactor; - this.maskGraphics.clear(); - this.maskGraphics.roundRect(0, 0, screenRect.width, screenRect.height, scaledBorderRadius); - this.maskGraphics.fill({ color: 0xffffff }); - // Cache layout info this.layoutCache = { stageSize: { width, height }, @@ -525,7 +572,9 @@ export class FrameRenderer { baseScale: scale, baseOffset: { x: compositeLayout.screenRect.x, y: compositeLayout.screenRect.y }, maskRect: compositeLayout.screenRect, - webcamRect: compositeLayout.webcamRect, + webcamRect: compositeLayout.webcamRect + ? { ...compositeLayout.webcamRect, borderRadius: scaledBorderRadius } + : null, }; } @@ -538,7 +587,7 @@ export class FrameRenderer { } private updateAnimationState(timeMs: number): number { - if (!this.cameraContainer || !this.layoutCache) return 0; + if (!this.layoutCache) return 0; const { region, strength, blendedScale, transition } = findDominantRegion( this.config.zoomRegions, @@ -559,14 +608,12 @@ export class FrameRenderer { targetFocus = regionFocus; targetProgress = strength; - // Apply adaptive smoothing for auto-follow mode if (region.focusMode === "auto" && !transition) { const raw = targetFocus; const dtMs = this.prevAnimationTimeMs != null ? timeMs - this.prevAnimationTimeMs : 0; const framesElapsed = dtMs > 0 ? dtMs / (1000 / 60) : 1; const isZoomingIn = targetProgress < 0.999 && targetProgress >= this.prevTargetProgress; if (targetProgress >= 0.999) { - // Full zoom: adaptive smoothing — moves faster when far, decelerates when close const prev = this.smoothedAutoFocus ?? raw; const baseFactor = adaptiveSmoothFactor( raw, @@ -580,11 +627,8 @@ export class FrameRenderer { this.smoothedAutoFocus = smoothed; targetFocus = smoothed; } else if (isZoomingIn) { - // Zoom-in: track cursor directly so zoom always aims at current cursor - // position; keep ref in sync to avoid snap when full-zoom begins this.smoothedAutoFocus = raw; } else { - // Zoom-out: keep smoothing for continuity — avoids snap at zoom-out start const prev = this.smoothedAutoFocus ?? raw; const baseFactor = adaptiveSmoothFactor( raw, @@ -642,7 +686,6 @@ export class FrameRenderer { } const state = this.animationState; - const prevScale = state.appliedScale; const prevX = state.x; const prevY = state.y; @@ -687,183 +730,22 @@ export class FrameRenderer { ); } - // On Linux/Wayland the implicit GPU→2D texture-sharing path - // used by drawImage(webglCanvas) can fail silently (EGL/Ozone), - // producing green/empty frames. Explicit gl.readPixels always - // copies from GPU to CPU memory, bypassing that path. - private readbackVideoCanvas(): HTMLCanvasElement { - const glCanvas = this.app!.canvas as HTMLCanvasElement; - const gl = - (glCanvas.getContext("webgl2") as WebGL2RenderingContext | null) ?? - (glCanvas.getContext("webgl") as WebGLRenderingContext | null); - - if (!gl || !this.rasterCanvas || !this.rasterCtx) { - return glCanvas; - } - - const w = glCanvas.width; - const h = glCanvas.height; - const buf = new Uint8Array(w * h * 4); - gl.readPixels(0, 0, w, h, gl.RGBA, gl.UNSIGNED_BYTE, buf); - - // readPixels returns rows bottom-to-top; flip vertically - const rowSize = w * 4; - const temp = new Uint8Array(rowSize); - for (let top = 0, bot = h - 1; top < bot; top++, bot--) { - const tOff = top * rowSize; - const bOff = bot * rowSize; - temp.set(buf.subarray(tOff, tOff + rowSize)); - buf.copyWithin(tOff, bOff, bOff + rowSize); - buf.set(temp, bOff); - } - - const imageData = new ImageData(new Uint8ClampedArray(buf.buffer), w, h); - this.rasterCtx.putImageData(imageData, 0, 0); - - return this.rasterCanvas; - } - - private compositeWithShadows(webcamFrame?: VideoFrame | null): void { - if (!this.compositeCanvas || !this.compositeCtx || !this.app) return; - - const videoCanvas = this.readbackVideoCanvas(); - const ctx = this.compositeCtx; - const w = this.compositeCanvas.width; - const h = this.compositeCanvas.height; - - // Clear composite canvas - ctx.clearRect(0, 0, w, h); - - // Step 1: Draw background layer (with optional blur, not affected by zoom) - if (this.backgroundSprite) { - const bgCanvas = this.backgroundSprite; - - if (this.config.showBlur) { - ctx.save(); - ctx.filter = "blur(6px)"; // Canvas blur is weaker than CSS - ctx.drawImage(bgCanvas, 0, 0, w, h); - ctx.restore(); - } else { - ctx.drawImage(bgCanvas, 0, 0, w, h); - } - } else { - console.warn("[FrameRenderer] No background sprite found during compositing!"); - } - - // Draw video layer with shadows on top of background - if ( - this.config.showShadow && - this.config.shadowIntensity > 0 && - this.shadowCanvas && - this.shadowCtx - ) { - const shadowCtx = this.shadowCtx; - shadowCtx.clearRect(0, 0, w, h); - shadowCtx.save(); - - // Calculate shadow parameters based on intensity (0-1) - const intensity = this.config.shadowIntensity; - const baseBlur1 = 48 * intensity; - const baseBlur2 = 16 * intensity; - const baseBlur3 = 8 * intensity; - const baseAlpha1 = 0.7 * intensity; - const baseAlpha2 = 0.5 * intensity; - const baseAlpha3 = 0.3 * intensity; - const baseOffset = 12 * intensity; - - shadowCtx.filter = `drop-shadow(0 ${baseOffset}px ${baseBlur1}px rgba(0,0,0,${baseAlpha1})) drop-shadow(0 ${baseOffset / 3}px ${baseBlur2}px rgba(0,0,0,${baseAlpha2})) drop-shadow(0 ${baseOffset / 6}px ${baseBlur3}px rgba(0,0,0,${baseAlpha3}))`; - shadowCtx.drawImage(videoCanvas, 0, 0, w, h); - shadowCtx.restore(); - ctx.drawImage(this.shadowCanvas, 0, 0, w, h); - } else { - ctx.drawImage(videoCanvas, 0, 0, w, h); - } - - const webcamRect = this.layoutCache?.webcamRect ?? null; - if (webcamFrame && webcamRect) { - const preset = getWebcamLayoutPresetDefinition(this.config.webcamLayoutPreset); - const shape = webcamRect.maskShape ?? this.config.webcamMaskShape ?? "rectangle"; - const sourceWidth = - ("displayWidth" in webcamFrame && webcamFrame.displayWidth > 0 - ? webcamFrame.displayWidth - : webcamFrame.codedWidth) || webcamRect.width; - const sourceHeight = - ("displayHeight" in webcamFrame && webcamFrame.displayHeight > 0 - ? webcamFrame.displayHeight - : webcamFrame.codedHeight) || webcamRect.height; - const sourceAspect = sourceWidth / sourceHeight; - const targetAspect = webcamRect.width / webcamRect.height; - const sourceCropWidth = - sourceAspect > targetAspect ? Math.round(sourceHeight * targetAspect) : sourceWidth; - const sourceCropHeight = - sourceAspect > targetAspect ? sourceHeight : Math.round(sourceWidth / targetAspect); - const sourceCropX = Math.max(0, Math.round((sourceWidth - sourceCropWidth) / 2)); - const sourceCropY = Math.max(0, Math.round((sourceHeight - sourceCropHeight) / 2)); - ctx.save(); - drawCanvasClipPath( - ctx, - webcamRect.x, - webcamRect.y, - webcamRect.width, - webcamRect.height, - shape, - webcamRect.borderRadius, - ); - if (preset.shadow) { - ctx.shadowColor = preset.shadow.color; - ctx.shadowBlur = preset.shadow.blur; - ctx.shadowOffsetX = preset.shadow.offsetX; - ctx.shadowOffsetY = preset.shadow.offsetY; - } - ctx.fillStyle = "#000000"; - ctx.fill(); - ctx.clip(); - ctx.drawImage( - webcamFrame as unknown as CanvasImageSource, - sourceCropX, - sourceCropY, - sourceCropWidth, - sourceCropHeight, - webcamRect.x, - webcamRect.y, - webcamRect.width, - webcamRect.height, - ); - ctx.restore(); - } - } - getCanvas(): HTMLCanvasElement { - if (!this.compositeCanvas) { + if (!this.proxyCanvas) { throw new Error("Renderer not initialized"); } - return this.compositeCanvas; + return this.proxyCanvas; } destroy(): void { - if (this.videoSprite) { - this.videoSprite.destroy(); - this.videoSprite = null; - } - this.backgroundSprite = null; - if (this.app) { - this.app.destroy(true, { - children: true, - texture: true, - textureSource: true, - }); - this.app = null; + this.disposed = true; + if (this.worker) { + this.worker.postMessage({ type: "dispose" } as DisposeMessage); + this.worker.terminate(); + this.worker = null; } - this.cameraContainer = null; - this.videoContainer = null; - this.maskGraphics = null; - this.blurFilter = null; - this.motionBlurFilter = null; - this.shadowCanvas = null; - this.shadowCtx = null; - this.compositeCanvas = null; - this.compositeCtx = null; - this.rasterCanvas = null; - this.rasterCtx = null; + this.proxyCanvas = null; + this.proxyCtx = null; + this.layoutCache = null; } } diff --git a/src/lib/exporter/frameRendererWorker.ts b/src/lib/exporter/frameRendererWorker.ts new file mode 100644 index 00000000..76c19a30 --- /dev/null +++ b/src/lib/exporter/frameRendererWorker.ts @@ -0,0 +1,1115 @@ +/** + * Web Worker for compositing export frames using OffscreenCanvas + Canvas 2D. + * + * Receives VideoFrames (transferred), composites them with background/zoom/shadow/annotations, + * and returns an ImageBitmap (transferred back) — no Pixi.js, no GPU readback. + */ + +// ---------- Types (self-contained, no shared imports in worker) ---------- + +interface WorkerZoomRegion { + id: string; + startMs: number; + endMs: number; + depth: number; + focus: { cx: number; cy: number }; + focusMode?: "manual" | "auto"; + zoomInDurationMs?: number; + zoomOutDurationMs?: number; +} + +interface WorkerCropRegion { + x: number; + y: number; + width: number; + height: number; +} + +interface WorkerAnnotationRegion { + id: string; + startMs: number; + endMs: number; + type: "text" | "image" | "figure" | "blur"; + content: string; + textContent?: string; + imageContent?: string; + position: { x: number; y: number }; + size: { width: number; height: number }; + style: { + color: string; + backgroundColor: string; + fontSize: number; + fontFamily: string; + fontWeight: "normal" | "bold"; + fontStyle: "normal" | "italic"; + textDecoration: "none" | "underline"; + textAlign: "left" | "center" | "right"; + }; + zIndex: number; + figureData?: { + arrowDirection: string; + color: string; + strokeWidth: number; + }; + blurData?: { + shape: "rectangle" | "oval" | "freehand"; + intensity: number; + freehandPoints?: Array<{ x: number; y: number }>; + }; +} + +interface InitMessage { + type: "init"; + config: { + width: number; + height: number; + wallpaper: string; + zoomRegions: WorkerZoomRegion[]; + showShadow: boolean; + shadowIntensity: number; + showBlur: boolean; + motionBlurAmount: number; + borderRadius: number; + padding: number; + cropRegion: WorkerCropRegion; + videoWidth: number; + videoHeight: number; + webcamSize?: { width: number; height: number } | null; + webcamLayoutPreset?: "picture-in-picture" | "vertical-stack" | "dual-frame"; + webcamMaskShape?: "rectangle" | "circle" | "square" | "rounded"; + webcamSizePreset?: number; + webcamPosition?: { cx: number; cy: number } | null; + annotationRegions?: WorkerAnnotationRegion[]; + previewWidth?: number; + previewHeight?: number; + }; + wallpaperBitmap?: ImageBitmap; // transferred from main if pre-loaded +} + +interface RenderMessage { + type: "render"; + frame: VideoFrame; // transferred + timestamp: number; // microseconds + webcamFrame?: VideoFrame | null; // transferred + // Pre-computed animation state (main thread handles zoom math) + zoomTransform: { + scale: number; + x: number; + y: number; + focusX: number; + focusY: number; + progress: number; + }; + layoutInfo: { + stageWidth: number; + stageHeight: number; + videoWidth: number; // cropped video width + videoHeight: number; // cropped video height + baseScale: number; + baseOffsetX: number; + baseOffsetY: number; + maskX: number; + maskY: number; + maskWidth: number; + maskHeight: number; + scaledBorderRadius: number; + webcamRect?: { + x: number; + y: number; + width: number; + height: number; + borderRadius: number; + maskShape: "rectangle" | "circle" | "square" | "rounded"; + } | null; + screenCover: boolean; + }; +} + +interface DisposeMessage { + type: "dispose"; +} + +type WorkerIncoming = InitMessage | RenderMessage | DisposeMessage; + +interface FrameReadyMessage { + type: "frame-ready"; + bitmap: ImageBitmap; + timestamp: number; +} + +interface ErrorMessage { + type: "error"; + error: string; +} + +interface ReadyMessage { + type: "ready"; +} + +type WorkerOutgoing = FrameReadyMessage | ErrorMessage | ReadyMessage; + +// ---------- State ---------- + +let canvas: OffscreenCanvas | null = null; +let ctx: OffscreenCanvasRenderingContext2D | null = null; +let wallpaperImage: ImageBitmap | HTMLImageElement | null = null; +let motionBlurHistory: ImageBitmap[] = []; +const MAX_MOTION_HISTORY = 6; + +// ---------- Init ---------- + +function handleInit(msg: InitMessage) { + canvas = new OffscreenCanvas(msg.config.width, msg.config.height); + ctx = canvas.getContext("2d"); + if (!ctx) { + self.postMessage({ type: "error", error: "Failed to get 2D context in worker" }); + return; + } + + // Set wallpaper + if (msg.wallpaperBitmap) { + wallpaperImage = msg.wallpaperBitmap; + } else { + loadWallpaper(msg.config.wallpaper).catch((err) => { + console.error("[FrameRendererWorker] Wallpaper load failed:", err); + }); + } + + self.postMessage({ type: "ready" }); +} + +async function loadWallpaper(wallpaper: string): Promise { + if (!canvas) return; + + try { + if (wallpaper.startsWith("data:")) { + // Data URL — decode as ImageBitmap + const response = await fetch(wallpaper); + const blob = await response.blob(); + wallpaperImage = await createImageBitmap(blob); + return; + } + + if (wallpaper.startsWith("http://") || wallpaper.startsWith("https://")) { + const response = await fetch(wallpaper, { mode: "cors" }); + const blob = await response.blob(); + wallpaperImage = await createImageBitmap(blob); + return; + } + + if (wallpaper.startsWith("file://") || wallpaper.startsWith("/")) { + // Try fetch (may work in Electron renderer context) + try { + const response = await fetch(wallpaper); + if (response.ok) { + const blob = await response.blob(); + wallpaperImage = await createImageBitmap(blob); + return; + } + } catch { + // fall through to solid color + } + } + + if (wallpaper.startsWith("#")) { + // Solid color — handled at render time + return; + } + + if (wallpaper.startsWith("linear-gradient") || wallpaper.startsWith("radial-gradient")) { + // Gradient — parsed and drawn at render time + return; + } + + // Try as a solid color + if (wallpaper.match(/^#[0-9a-fA-F]{3,8}$/)) { + return; + } + + // Unknown — treat as solid color or fallback + console.warn( + "[FrameRendererWorker] Unknown wallpaper format, using black fallback:", + wallpaper, + ); + } catch (err) { + console.error("[FrameRendererWorker] Error loading wallpaper:", err); + } +} + +// ---------- Background rendering ---------- + +function drawBackground() { + if (!ctx || !canvas) return; + const w = canvas.width; + const h = canvas.height; + + // Wallpaper image + if (wallpaperImage) { + drawImageCover(ctx, wallpaperImage, 0, 0, w, h); + return; + } + + // Will be drawn as fallback in render +} + +function drawImageCover( + context: OffscreenCanvasRenderingContext2D, + source: CanvasImageSource, + x: number, + y: number, + w: number, + h: number, +) { + // Get natural dimensions + let natW: number, natH: number; + if ("width" in source && "height" in source) { + natW = (source as ImageBitmap).width; + natH = (source as ImageBitmap).height; + } else { + // HTMLImageElement or similar + natW = (source as { naturalWidth?: number }).naturalWidth ?? w; + natH = (source as { naturalHeight?: number }).naturalHeight ?? h; + } + + const imgAspect = natW / natH; + const canvasAspect = w / h; + + let drawWidth: number, drawHeight: number, drawX: number, drawY: number; + + if (imgAspect > canvasAspect) { + drawHeight = h; + drawWidth = drawHeight * imgAspect; + drawX = x + (w - drawWidth) / 2; + drawY = y; + } else { + drawWidth = w; + drawHeight = drawWidth / imgAspect; + drawX = x; + drawY = y + (h - drawHeight) / 2; + } + + context.drawImage(source, drawX, drawY, drawWidth, drawHeight); +} + +function parseCssGradientStopColor(input: string): string | null { + // Extract color and percentage from a stop like "rgba(0,0,0,0.5) 30%" + const match = input.match(/(#[0-9a-fA-F]{3,8}|(?:rgba?|hsla?)\([^)]*\)|[a-zA-Z-]+)\s*(\d+)?%?/); + if (match) { + return match[1]; + } + return null; +} + +function drawGradientBackground( + context: OffscreenCanvasRenderingContext2D, + wallpaper: string, + width: number, + height: number, +) { + // Simplified gradient parser for worker context + const gradientMatch = wallpaper.match(/^(linear|radial)-gradient\((.+)\)$/i); + if (!gradientMatch) { + context.fillStyle = wallpaper.startsWith("#") ? wallpaper : "#000000"; + context.fillRect(0, 0, width, height); + return; + } + + const type = gradientMatch[1].toLowerCase(); + const args = gradientMatch[2]; + + // Split stops, handling rgba/parenthetical commas + const stopParts = splitGradientArgs(args); + + if (stopParts.length < 2) { + context.fillStyle = "#000000"; + context.fillRect(0, 0, width, height); + return; + } + + // Separate descriptor from stops + let descriptor: string | null = null; + let stops = stopParts; + + if (type === "linear") { + if (/^\s*to\s+/i.test(stops[0]) || /-?\d*\.?\d+deg/i.test(stops[0])) { + descriptor = stops[0].trim(); + stops = stops.slice(1); + } + } + + const parsedStops: Array<{ color: string; offset: number | null }> = []; + for (const part of stops) { + const colorMatch = part.trim().match(/(#[0-9a-fA-F]{3,8}|(?:rgba?|hsla?)\([^)]*\)|[a-zA-Z-]+)/); + if (colorMatch) { + const offsetMatch = part.match(/(\d+)%/); + parsedStops.push({ + color: colorMatch[1], + offset: offsetMatch ? Number.parseInt(offsetMatch[1], 10) / 100 : null, + }); + } + } + + if (parsedStops.length < 2) { + context.fillStyle = "#000000"; + context.fillRect(0, 0, width, height); + return; + } + + // Normalize offsets + const offsets = parsedStops.map((s) => s.offset); + const definedCount = offsets.filter((o) => o !== null).length; + + if (definedCount === 0) { + parsedStops.forEach((s, i) => { + s.offset = i / (parsedStops.length - 1); + }); + } else { + // Simple linear interpolation for undefined offsets + let lastDefined = -1; + for (let i = 0; i < offsets.length; i++) { + if (offsets[i] !== null) { + if (lastDefined === -1) { + // Fill from start + for (let j = 0; j < i; j++) { + parsedStops[j].offset = (offsets[i]! * j) / i; + } + } else { + const start = offsets[lastDefined]!; + const end = offsets[i]!; + const gap = i - lastDefined; + for (let j = lastDefined + 1; j < i; j++) { + parsedStops[j].offset = start + ((end - start) * (j - lastDefined)) / gap; + } + } + lastDefined = i; + } + } + // Fill remaining at end + if (lastDefined >= 0 && lastDefined < offsets.length - 1) { + const start = offsets[lastDefined]!; + const remaining = offsets.length - 1 - lastDefined; + for (let i = lastDefined + 1; i < offsets.length; i++) { + parsedStops[i].offset = start + ((1 - start) * (i - lastDefined)) / remaining; + } + } + } + + let gradient: CanvasGradient; + + if (type === "linear") { + const angle = resolveGradientAngle(descriptor); + const radians = (angle * Math.PI) / 180; + const vx = Math.sin(radians); + const vy = -Math.cos(radians); + const halfSpan = (Math.abs(vx) * width + Math.abs(vy) * height) / 2; + const cx = width / 2; + const cy = height / 2; + gradient = context.createLinearGradient( + cx - vx * halfSpan, + cy - vy * halfSpan, + cx + vx * halfSpan, + cy + vy * halfSpan, + ); + } else { + // Radial + const cx = width / 2; + const cy = height / 2; + const radius = Math.sqrt(cx * cx + cy * cy); + gradient = context.createRadialGradient(cx, cy, 0, cx, cy, radius); + } + + for (const stop of parsedStops) { + gradient.addColorStop(stop.offset ?? 0, stop.color); + } + + context.fillStyle = gradient; + context.fillRect(0, 0, width, height); +} + +function splitGradientArgs(input: string): string[] { + const parts: string[] = []; + let current = ""; + let depth = 0; + + for (const char of input) { + if (char === "(") { + depth++; + current += char; + } else if (char === ")") { + depth = Math.max(0, depth - 1); + current += char; + } else if (char === "," && depth === 0) { + const trimmed = current.trim(); + if (trimmed) parts.push(trimmed); + current = ""; + } else { + current += char; + } + } + + const trimmed = current.trim(); + if (trimmed) parts.push(trimmed); + return parts; +} + +function resolveGradientAngle(descriptor: string | null): number { + if (!descriptor) return 180; + const angleMatch = descriptor.match(/(-?\d*\.?\d+)deg/i); + if (angleMatch) return Number.parseFloat(angleMatch[1]); + const normalized = descriptor.trim().toLowerCase().replace(/\s+/g, " "); + const dirMap: Record = { + "to top": 0, + "to top right": 45, + "to right": 90, + "to bottom right": 135, + "to bottom": 180, + "to bottom left": 225, + "to left": 270, + "to top left": 315, + }; + return dirMap[normalized] ?? 180; +} + +function renderWallpaper( + wallpaper: string, + context: OffscreenCanvasRenderingContext2D, + w: number, + h: number, +) { + if (wallpaperImage) { + drawImageCover(context, wallpaperImage, 0, 0, w, h); + } else if (wallpaper.startsWith("data:") || wallpaper.startsWith("http")) { + // Not yet loaded — fallback + context.fillStyle = "#000000"; + context.fillRect(0, 0, w, h); + } else if (wallpaper.startsWith("#")) { + context.fillStyle = wallpaper; + context.fillRect(0, 0, w, h); + } else if (wallpaper.startsWith("linear-gradient") || wallpaper.startsWith("radial-gradient")) { + drawGradientBackground(context, wallpaper, w, h); + } else { + context.fillStyle = wallpaper || "#000000"; + context.fillRect(0, 0, w, h); + } +} + +// ---------- Annotation rendering (ported from annotationRenderer.ts) ---------- + +const ARROW_PATHS: Record = { + up: ["M 50 20 L 50 80", "M 50 20 L 35 35", "M 50 20 L 65 35"], + down: ["M 50 20 L 50 80", "M 50 80 L 35 65", "M 50 80 L 65 65"], + left: ["M 80 50 L 20 50", "M 20 50 L 35 35", "M 20 50 L 35 65"], + right: ["M 20 50 L 80 50", "M 80 50 L 65 35", "M 80 50 L 65 65"], + "up-right": ["M 25 75 L 75 25", "M 75 25 L 60 30", "M 75 25 L 70 40"], + "up-left": ["M 75 75 L 25 25", "M 25 25 L 40 30", "M 25 25 L 30 40"], + "down-right": ["M 25 25 L 75 75", "M 75 75 L 70 60", "M 75 75 L 60 70"], + "down-left": ["M 75 25 L 25 75", "M 25 75 L 30 60", "M 25 75 L 40 70"], +}; + +function renderArrow( + context: OffscreenCanvasRenderingContext2D, + direction: string, + color: string, + strokeWidth: number, + x: number, + y: number, + width: number, + height: number, + scaleFactor: number, +) { + const paths = ARROW_PATHS[direction]; + if (!paths) return; + + context.save(); + context.translate(x, y); + + const padding = 8 * scaleFactor; + const availableWidth = Math.max(0, width - padding * 2); + const availableHeight = Math.max(0, height - padding * 2); + const scale = Math.min(availableWidth / 100, availableHeight / 100); + const offsetX = padding + (availableWidth - 100 * scale) / 2; + const offsetY = padding + (availableHeight - 100 * scale) / 2; + + context.translate(offsetX, offsetY); + context.shadowColor = "rgba(0, 0, 0, 0.3)"; + context.shadowBlur = 8 * scale; + context.shadowOffsetX = 0; + context.shadowOffsetY = 4 * scale; + context.strokeStyle = color; + context.lineWidth = strokeWidth * scale; + context.lineCap = "round"; + context.lineJoin = "round"; + + for (const pathString of paths) { + const parts = pathString.trim().split(/\s+/); + let i = 0; + while (i < parts.length) { + const cmd = parts[i]; + if (cmd === "M" || cmd === "L") { + const px = Number.parseFloat(parts[i + 1]) * scale; + const py = Number.parseFloat(parts[i + 2]) * scale; + if (cmd === "M") context.moveTo(px, py); + else context.lineTo(px, py); + i += 3; + } else { + i++; + } + } + } + context.stroke(); + context.restore(); +} + +function renderText( + context: OffscreenCanvasRenderingContext2D, + annotation: WorkerAnnotationRegion, + x: number, + y: number, + width: number, + height: number, + scaleFactor: number, +) { + const style = annotation.style; + + context.save(); + context.beginPath(); + context.rect(x, y, width, height); + context.clip(); + + const fontWeight = style.fontWeight === "bold" ? "bold" : "normal"; + const fontStyle = style.fontStyle === "italic" ? "italic" : "normal"; + const scaledFontSize = style.fontSize * scaleFactor; + context.font = `${fontStyle} ${fontWeight} ${scaledFontSize}px ${style.fontFamily}`; + context.textBaseline = "middle"; + + const containerPadding = 8 * scaleFactor; + let textX = x; + const textY = y + height / 2; + + if (style.textAlign === "center") { + textX = x + width / 2; + context.textAlign = "center"; + } else if (style.textAlign === "right") { + textX = x + width - containerPadding; + context.textAlign = "right"; + } else { + textX = x + containerPadding; + context.textAlign = "left"; + } + + const availableWidth = width - containerPadding * 2; + const rawLines = annotation.content.split("\n"); + const lines: string[] = []; + for (const rawLine of rawLines) { + if (!rawLine) { + lines.push(""); + continue; + } + const words = rawLine.split(/(\s+)/); + let current = ""; + for (const word of words) { + const test = current + word; + if (current && context.measureText(test).width > availableWidth) { + lines.push(current); + current = word.trimStart(); + } else { + current = test; + } + } + if (current) lines.push(current); + } + + const lineHeight = scaledFontSize * 1.4; + const startY = textY - ((lines.length - 1) * lineHeight) / 2; + + lines.forEach((line, index) => { + const currentY = startY + index * lineHeight; + + if (style.backgroundColor && style.backgroundColor !== "transparent") { + const metrics = context.measureText(line); + const verticalPadding = scaledFontSize * 0.1; + const horizontalPadding = scaledFontSize * 0.2; + const borderRadius = 4 * scaleFactor; + const contentHeight = scaledFontSize * 1.4; + const bgHeight = contentHeight + verticalPadding * 2; + const bgY = currentY - bgHeight / 2; + + let bgX = textX - horizontalPadding; + const bgWidth = metrics.width + horizontalPadding * 2; + + if (style.textAlign === "center") { + bgX = textX - bgWidth / 2; + } else if (style.textAlign === "right") { + bgX = textX - bgWidth; + } + + context.fillStyle = style.backgroundColor; + context.beginPath(); + context.roundRect(bgX, bgY, bgWidth, bgHeight, borderRadius); + context.fill(); + } + + context.fillStyle = style.color; + context.fillText(line, textX, currentY); + + if (style.textDecoration === "underline") { + const metrics = context.measureText(line); + let underlineX = textX; + const underlineY = currentY + scaledFontSize * 0.15; + + if (style.textAlign === "center") underlineX = textX - metrics.width / 2; + else if (style.textAlign === "right") underlineX = textX - metrics.width; + + context.strokeStyle = style.color; + context.lineWidth = Math.max(1, scaledFontSize / 16); + context.beginPath(); + context.moveTo(underlineX, underlineY); + context.lineTo(underlineX + metrics.width, underlineY); + context.stroke(); + } + }); + + context.restore(); +} + +async function renderImageAnnotation( + context: OffscreenCanvasRenderingContext2D, + annotation: WorkerAnnotationRegion, + x: number, + y: number, + width: number, + height: number, +): Promise { + const src = annotation.imageContent || annotation.content; + if (!src || !src.startsWith("data:image")) return; + + try { + const response = await fetch(src); + const blob = await response.blob(); + const bitmap = await createImageBitmap(blob); + + const imgAspect = bitmap.width / bitmap.height; + const boxAspect = width / height; + + let drawWidth = width; + let drawHeight = height; + let drawX = x; + let drawY = y; + + if (imgAspect > boxAspect) { + drawHeight = width / imgAspect; + drawY = y + (height - drawHeight) / 2; + } else { + drawWidth = height * imgAspect; + drawX = x + (width - drawWidth) / 2; + } + + context.drawImage(bitmap, drawX, drawY, drawWidth, drawHeight); + bitmap.close(); + } catch (err) { + console.error("[FrameRendererWorker] Failed to load image annotation:", err); + } +} + +function renderBlurAnnotation( + context: OffscreenCanvasRenderingContext2D, + annotation: WorkerAnnotationRegion, + x: number, + y: number, + width: number, + height: number, + scaleFactor: number, +) { + const configuredIntensity = annotation.blurData?.intensity ?? 12; + const blurRadius = Math.max(1, Math.round(clamp(configuredIntensity, 2, 40) * scaleFactor)); + const samplePadding = Math.max(2, Math.ceil(blurRadius * 2)); + const sx = Math.max(0, Math.floor(x) - samplePadding); + const sy = Math.max(0, Math.floor(y) - samplePadding); + const ex = Math.min(context.canvas.width, Math.ceil(x + width) + samplePadding); + const ey = Math.min(context.canvas.height, Math.ceil(y + height) + samplePadding); + const sw = Math.max(0, ex - sx); + const sh = Math.max(0, ey - sy); + if (sw <= 0 || sh <= 0) return; + + const scratch = new OffscreenCanvas(sw, sh); + const scratchCtx = scratch.getContext("2d")!; + scratchCtx.drawImage(context.canvas, sx, sy, sw, sh, 0, 0, sw, sh); + + context.save(); + // Draw clip path + const shape = annotation.blurData?.shape || "rectangle"; + context.beginPath(); + if (shape === "oval") { + context.ellipse(x + width / 2, y + height / 2, width / 2, height / 2, 0, 0, Math.PI * 2); + } else if (shape === "freehand") { + const points = annotation.blurData?.freehandPoints; + if (points && points.length >= 3) { + context.moveTo(x + (points[0].x / 100) * width, y + (points[0].y / 100) * height); + for (let i = 1; i < points.length; i++) { + context.lineTo(x + (points[i].x / 100) * width, y + (points[i].y / 100) * height); + } + context.closePath(); + } else { + context.rect(x, y, width, height); + } + } else { + context.rect(x, y, width, height); + } + context.clip(); + context.filter = `blur(${blurRadius}px)`; + context.drawImage(scratch, sx, sy); + context.filter = "none"; + context.restore(); +} + +function clamp(value: number, min: number, max: number) { + return Math.min(max, Math.max(min, value)); +} + +async function renderAnnotations( + context: OffscreenCanvasRenderingContext2D, + annotations: WorkerAnnotationRegion[] | undefined, + canvasWidth: number, + canvasHeight: number, + timeMs: number, + scaleFactor: number, +) { + if (!annotations || annotations.length === 0) return; + + const active = annotations.filter((a) => timeMs >= a.startMs && timeMs <= a.endMs); + const sorted = [...active].sort((a, b) => a.zIndex - b.zIndex); + + for (const annotation of sorted) { + const x = (annotation.position.x / 100) * canvasWidth; + const y = (annotation.position.y / 100) * canvasHeight; + const w = (annotation.size.width / 100) * canvasWidth; + const h = (annotation.size.height / 100) * canvasHeight; + + switch (annotation.type) { + case "text": + renderText(context, annotation, x, y, w, h, scaleFactor); + break; + case "image": + await renderImageAnnotation(context, annotation, x, y, w, h); + break; + case "figure": + if (annotation.figureData) { + renderArrow( + context, + annotation.figureData.arrowDirection, + annotation.figureData.color, + annotation.figureData.strokeWidth, + x, + y, + w, + h, + scaleFactor, + ); + } + break; + case "blur": + renderBlurAnnotation(context, annotation, x, y, w, h, scaleFactor); + break; + } + } +} + +// ---------- Webcam mask ---------- + +function drawWebcamMask( + context: OffscreenCanvasRenderingContext2D, + x: number, + y: number, + w: number, + h: number, + shape: string, + borderRadius: number, +) { + context.beginPath(); + switch (shape) { + case "circle": { + const cx = x + w / 2; + const cy = y + h / 2; + const r = Math.min(w, h) / 2; + context.arc(cx, cy, r, 0, Math.PI * 2); + break; + } + default: + context.roundRect(x, y, w, h, borderRadius); + break; + } + context.closePath(); +} + +// ---------- Render ---------- + +async function handleRender(msg: RenderMessage) { + if (!canvas || !ctx) { + self.postMessage({ type: "error", error: "Worker not initialized" } as ErrorMessage); + msg.frame.close(); + msg.webcamFrame?.close(); + return; + } + + try { + const { frame, webcamFrame, timestamp, zoomTransform, layoutInfo } = msg; + const w = canvas.width; + const h = canvas.height; + const config = initConfig!; // set by init + + // ---- 1. Clear + Background ---- + ctx.clearRect(0, 0, w, h); + + if (config.showBlur) { + ctx.save(); + ctx.filter = "blur(6px)"; + renderWallpaper(config.wallpaper, ctx, w, h); + ctx.restore(); + } else { + renderWallpaper(config.wallpaper, ctx, w, h); + } + + // ---- 2. Motion blur (history frames) ---- + if (config.motionBlurAmount > 0 && motionBlurHistory.length > 0) { + ctx.save(); + const historyLen = motionBlurHistory.length; + for (let i = 0; i < historyLen; i++) { + const bitmap = motionBlurHistory[i]; + if (!bitmap) continue; + const opacity = ((i + 1) / (historyLen + 1)) * config.motionBlurAmount * 0.3; + ctx.globalAlpha = opacity; + ctx.drawImage(bitmap, 0, 0); + } + ctx.globalAlpha = 1; + ctx.restore(); + } + + // ---- 3. Video frame with zoom transform + crop + border radius + shadow ---- + const { scale, x: tx, y: ty } = zoomTransform; + const { + stageWidth, + stageHeight, + baseScale, + baseOffsetX, + baseOffsetY, + maskX, + maskY, + maskWidth, + maskHeight, + scaledBorderRadius, + } = layoutInfo; + + // Compute the video sprite dimensions and position (same logic as updateLayout in original) + const { videoWidth: fullVideoW, videoHeight: fullVideoH, cropRegion } = config; + const croppedVideoW = fullVideoW * cropRegion.width; + const croppedVideoH = fullVideoH * cropRegion.height; + + // Video sprite size in stage pixels + const spriteW = fullVideoW * baseScale; + const spriteH = fullVideoH * baseScale; + + // Cropped display size + const croppedDisplayW = croppedVideoW * baseScale; + const croppedDisplayH = croppedVideoH * baseScale; + const coverOffsetX = (maskWidth - croppedDisplayW) / 2; + const coverOffsetY = (maskHeight - croppedDisplayH) / 2; + + const cropPixelX = cropRegion.x * fullVideoW * baseScale; + const cropPixelY = cropRegion.y * fullVideoH * baseScale; + const spriteX = -cropPixelX + coverOffsetX; + const spriteY = -cropPixelY + coverOffsetY; + + // Zoom transform math + const stageCenterX = stageWidth / 2; + const stageCenterY = stageHeight / 2; + const effectiveScale = scale; + const effectiveX = tx; + const effectiveY = ty; + + // Draw shadow first on a separate offscreen canvas (shadow extends outside the mask) + if (config.showShadow && config.shadowIntensity > 0) { + const intensity = config.shadowIntensity; + const shadowOffset = 12 * intensity; + const shadowBlur = 48 * intensity; + const shadowAlpha = 0.7 * intensity; + + // Create a slightly larger offscreen canvas to prevent shadow clipping + const shadowPad = Math.ceil(shadowBlur + shadowOffset); + const shadowCanvas = new OffscreenCanvas(w + shadowPad * 2, h + shadowPad * 2); + const shadowCtx = shadowCanvas.getContext("2d")!; + + shadowCtx.save(); + shadowCtx.shadowColor = `rgba(0, 0, 0, ${shadowAlpha})`; + shadowCtx.shadowBlur = shadowBlur; + shadowCtx.shadowOffsetX = 0; + shadowCtx.shadowOffsetY = shadowOffset; + + // Apply same clip + transform + shadowCtx.beginPath(); + shadowCtx.roundRect( + maskX + shadowPad, + maskY + shadowPad, + maskWidth, + maskHeight, + scaledBorderRadius, + ); + shadowCtx.clip(); + shadowCtx.translate(stageCenterX + shadowPad, stageCenterY + shadowPad); + shadowCtx.translate(effectiveX, effectiveY); + shadowCtx.scale(effectiveScale, effectiveScale); + shadowCtx.translate(-stageCenterX - shadowPad, -stageCenterY - shadowPad); + shadowCtx.drawImage(frame, baseOffsetX + spriteX, baseOffsetY + spriteY, spriteW, spriteH); + shadowCtx.restore(); + + // Composite shadow onto main canvas + ctx.drawImage(shadowCanvas, -shadowPad, -shadowPad); + } + + // Draw video inside the mask clip (on top of shadow) + ctx.save(); + ctx.beginPath(); + ctx.roundRect(maskX, maskY, maskWidth, maskHeight, scaledBorderRadius); + ctx.clip(); + ctx.translate(stageCenterX, stageCenterY); + ctx.translate(effectiveX, effectiveY); + ctx.scale(effectiveScale, effectiveScale); + ctx.translate(-stageCenterX, -stageCenterY); + ctx.drawImage(frame, baseOffsetX + spriteX, baseOffsetY + spriteY, spriteW, spriteH); + ctx.restore(); + + // ---- 4. Motion blur: store current frame ---- + if (config.motionBlurAmount > 0) { + // Store a copy of the current frame for motion blur accumulation + const frameCopy = await createImageBitmap(canvas); + motionBlurHistory.push(frameCopy); + if (motionBlurHistory.length > MAX_MOTION_HISTORY) { + const old = motionBlurHistory.shift(); + old?.close(); + } + } + + // ---- 5. Webcam overlay ---- + if (webcamFrame && layoutInfo.webcamRect) { + const rect = layoutInfo.webcamRect; + const shape = rect.maskShape || config.webcamMaskShape || "rectangle"; + + const sourceWidth = webcamFrame.displayWidth || webcamFrame.codedWidth || rect.width; + const sourceHeight = webcamFrame.displayHeight || webcamFrame.codedHeight || rect.height; + const sourceAspect = sourceWidth / sourceHeight; + const targetAspect = rect.width / rect.height; + + let sourceCropW = sourceWidth; + let sourceCropH = sourceHeight; + if (sourceAspect > targetAspect) { + sourceCropW = Math.round(sourceHeight * targetAspect); + } else { + sourceCropH = Math.round(sourceWidth / targetAspect); + } + const sourceCropX = Math.max(0, Math.round((sourceWidth - sourceCropW) / 2)); + const sourceCropY = Math.max(0, Math.round((sourceHeight - sourceCropH) / 2)); + + ctx.save(); + drawWebcamMask(ctx, rect.x, rect.y, rect.width, rect.height, shape, rect.borderRadius); + + // Shadow for webcam (picture-in-picture preset) + if (config.webcamLayoutPreset === "picture-in-picture") { + ctx.shadowColor = "rgba(0,0,0,0.35)"; + ctx.shadowBlur = 24; + ctx.shadowOffsetX = 0; + ctx.shadowOffsetY = 10; + } + ctx.fillStyle = "#000000"; + ctx.fill(); + ctx.shadowColor = "transparent"; + ctx.shadowBlur = 0; + ctx.shadowOffsetX = 0; + ctx.shadowOffsetY = 0; + + ctx.clip(); + ctx.drawImage( + webcamFrame, + sourceCropX, + sourceCropY, + sourceCropW, + sourceCropH, + rect.x, + rect.y, + rect.width, + rect.height, + ); + ctx.restore(); + } + + // ---- 6. Annotations ---- + const previewWidth = config.previewWidth || 1920; + const previewHeight = config.previewHeight || 1080; + const scaleX = w / previewWidth; + const scaleY = h / previewHeight; + const scaleFactor = (scaleX + scaleY) / 2; + + await renderAnnotations( + ctx, + config.annotationRegions, + w, + h, + timestamp / 1000, // convert microseconds to ms + scaleFactor, + ); + + // ---- 7. Transfer result back ---- + const bitmap = canvas.transferToImageBitmap(); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (self as any).postMessage({ type: "frame-ready", bitmap, timestamp }, [bitmap]); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + self.postMessage({ type: "error", error: message } as ErrorMessage); + } finally { + msg.frame.close(); + msg.webcamFrame?.close(); + } +} + +// ---------- Dispose ---------- + +function handleDispose() { + for (const bitmap of motionBlurHistory) { + bitmap.close(); + } + motionBlurHistory = []; + if (wallpaperImage instanceof ImageBitmap) { + wallpaperImage.close(); + } + wallpaperImage = null; + if (canvas) { + // Can't destroy OffscreenCanvas, but clear references + canvas = null; + ctx = null; + } +} + +// ---------- Message handler ---------- + +let initConfig: InitMessage["config"] | null = null; + +self.addEventListener("message", (e: MessageEvent) => { + const msg = e.data; + + switch (msg.type) { + case "init": + initConfig = msg.config; + handleInit(msg); + break; + case "render": + if (!initConfig) { + self.postMessage({ type: "error", error: "Not initialized" } as ErrorMessage); + msg.frame.close(); + msg.webcamFrame?.close(); + return; + } + handleRender(msg); + break; + case "dispose": + handleDispose(); + break; + } +}); diff --git a/src/lib/exporter/gifExporter.ts b/src/lib/exporter/gifExporter.ts index 747e34e2..877faff7 100644 --- a/src/lib/exporter/gifExporter.ts +++ b/src/lib/exporter/gifExporter.ts @@ -319,7 +319,7 @@ export class GifExporter { this.gif!.render(); }); - return { success: true, blob }; + return { success: true, type: "blob", blob }; } catch (error) { console.error("GIF Export error:", error); return { diff --git a/src/lib/exporter/index.ts b/src/lib/exporter/index.ts index e93166c7..d852f37f 100644 --- a/src/lib/exporter/index.ts +++ b/src/lib/exporter/index.ts @@ -1,3 +1,4 @@ +export { FFmpegExporter } from "./ffmpegExporter"; export { FrameRenderer } from "./frameRenderer"; export { calculateOutputDimensions, GifExporter } from "./gifExporter"; export { VideoMuxer } from "./muxer"; diff --git a/src/lib/exporter/types.ts b/src/lib/exporter/types.ts index b6e08e8b..061a2d4f 100644 --- a/src/lib/exporter/types.ts +++ b/src/lib/exporter/types.ts @@ -11,15 +11,25 @@ export interface ExportProgress { totalFrames: number; percentage: number; estimatedTimeRemaining: number; // in seconds - phase?: "extracting" | "finalizing"; // Phase of export + phase?: "extracting" | "finalizing" | "encoding"; // Phase of export renderProgress?: number; // 0-100, progress of GIF rendering phase } -export interface ExportResult { - success: boolean; - blob?: Blob; - error?: string; -} +export type ExportResult = + | { + success: true; + type: "blob"; + blob: Blob; + } + | { + success: true; + type: "native"; + path: string; + } + | { + success: false; + error: string; + }; export interface VideoFrameData { frame: VideoFrame; diff --git a/src/lib/exporter/videoExporter.ts b/src/lib/exporter/videoExporter.ts index d0affd17..3d7ebd77 100644 --- a/src/lib/exporter/videoExporter.ts +++ b/src/lib/exporter/videoExporter.ts @@ -150,6 +150,10 @@ export class VideoExporter { this.renderer = renderer; await renderer.initialize(); + // Detect OS here to figure out if we can use zero-copy GPU frames + const platform = window.electronAPI ? await window.electronAPI.getPlatform() : "win32"; + const isLinux = platform === "linux"; + await this.initializeEncoder(encoderPreference); const hasAudio = videoInfo.hasAudio; @@ -237,25 +241,31 @@ export class VideoExporter { const canvas = renderer.getCanvas(); - // Read raw pixels from the canvas instead of passing - // the canvas directly to VideoFrame. On some Linux - // systems the GPU shared-image path (EGL/Ozone) fails - // silently, producing empty frames. - const canvasCtx = canvas.getContext("2d")!; - const imageData = canvasCtx.getImageData(0, 0, canvas.width, canvas.height); - const exportFrame = new VideoFrame(imageData.data.buffer, { - format: "RGBA", - codedWidth: canvas.width, - codedHeight: canvas.height, - timestamp, - duration: frameDuration, - colorSpace: { - primaries: "bt709", - transfer: "iec61966-2-1", - matrix: "rgb", - fullRange: true, - }, - }); + // Use zero-copy GPU texturing on Windows/Mac to completely bypass CPU stalls. + // On some Linux systems the GPU shared-image path (EGL/Ozone) fails + // silently, producing empty frames, so we maintain the getImageData fallback. + let exportFrame: VideoFrame; + + if (isLinux) { + const canvasCtx = canvas.getContext("2d")!; + const imageData = canvasCtx.getImageData(0, 0, canvas.width, canvas.height); + exportFrame = new VideoFrame(imageData.data.buffer, { + format: "RGBA", + codedWidth: canvas.width, + codedHeight: canvas.height, + timestamp, + duration: frameDuration, + colorSpace: { + primaries: "bt709", + transfer: "iec61966-2-1", + matrix: "rgb", + fullRange: true, + }, + }); + } else { + // Blazing fast zero-copy GPU path + exportFrame = new VideoFrame(canvas, { timestamp, duration: frameDuration }); + } while ( this.encoder && @@ -352,7 +362,15 @@ export class VideoExporter { } const blob = await muxer.finalize(); - return { success: true, blob }; + return { success: true, type: "blob", blob }; + } catch (error) { + stopWebcamDecode = true; + webcamFrameQueue?.destroy(); + webcamDecoder?.cancel(); + if (webcamDecodePromise) { + await webcamDecodePromise.catch(() => undefined); + } + throw error; } finally { stopWebcamDecode = true; webcamFrameQueue?.destroy(); diff --git a/src/vite-env.d.ts b/src/vite-env.d.ts index d76ee157..6bea08c6 100644 --- a/src/vite-env.d.ts +++ b/src/vite-env.d.ts @@ -122,5 +122,53 @@ interface Window { setHasUnsavedChanges: (hasChanges: boolean) => void; onRequestSaveBeforeClose: (callback: () => Promise | boolean) => () => void; setLocale: (locale: string) => Promise; + getPlatform: () => Promise; + revealInFolder: ( + filePath: string, + ) => Promise<{ success: boolean; error?: string; message?: string }>; + + // ---- FFmpeg Native Export ---- + ffmpegGetCapabilities: () => Promise<{ + available: boolean; + encoders: string[]; + bestEncoder: string | null; + path: string | null; + }>; + ffmpegExportStart: (config: { + width: number; + height: number; + frameRate: number; + encoder: string; + bitrate: number; + audioSourcePath?: string; + hasAudio?: boolean; + }) => Promise<{ + success: boolean; + sessionId?: string; + error?: string; + }>; + ffmpegExportFrame: ( + sessionId: string, + frameData: ArrayBuffer, + ) => Promise<{ + success: boolean; + backpressure?: boolean; + frameCount?: number; + error?: string; + }>; + ffmpegExportFinish: ( + sessionId: string, + fileName: string, + ) => Promise<{ + success: boolean; + path?: string; + message?: string; + canceled?: boolean; + error?: string; + }>; + ffmpegExportCancel: (sessionId: string) => Promise<{ + success: boolean; + error?: string; + }>; }; }