From fd70cab0d77fedcc48e906c7f5c473798cdc924f Mon Sep 17 00:00:00 2001 From: amitay keisar Date: Thu, 19 Mar 2026 18:15:38 +0200 Subject: [PATCH 01/15] feat: add reel mode (9:16), zoom-out blur background, and per-section PIP scale MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Reel Mode (9:16 Vertical Output) --------------------------------- - Add `outputMode` project setting ('landscape' | 'reel') with persistence - Add `normalizeOutputMode()`, `normalizeReelCropX()` to domain model - Add `resolveOutputSize()` reel branch returning 9:16 dimensions (e.g. 608x1080 for 1080p source) - Add `reelCropX` per-keyframe property (-1..+1) controlling horizontal crop position within the 16:9 source frame - Add crop overlay in editor preview: semi-transparent dark regions outside the crop area with dashed white boundary - Add draggable crop region on canvas for repositioning reelCropX - Add crop preset buttons (left/center/right) for quick positioning - Add 16:9 / 9:16 toggle button group in editor controls - Build animated crop transitions in ffmpeg using `buildNumericExpr()` for smooth 0.3s interpolation between sections Zoom-Out with Blur Background (Reel Mode) ------------------------------------------ - Allow `backgroundZoom` range 0.5–3.0 in reel mode (was 1.0–3.0) - Add `MIN_REEL_BACKGROUND_ZOOM` constant and reel-aware `normalizeBackgroundZoom(value, outputMode)` - When zoom < 1.0 in reel mode, render a darkened background (colorlevels 20% brightness) behind the zoomed-out content - Static zoom-out: uniform scale + centered overlay on darkened bg - Animated zoom crossing 1.0 boundary: split pipeline with zoompan (clamped to max(1, zoom)) + dynamic scale(eval=frame) for the sub-1.0 portion, overlaid on darkened background - Clamp zoom values to 1.0 when switching back to landscape mode Per-Section PIP Scale --------------------- - Add `pipScale` per-keyframe property (0.15–0.50, default 0.22) - Add `normalizePipScale()` to domain model and keyframe normalization - PIP size slider now controls current section's anchor `pipScale` instead of a global project setting - Compute PIP pixel size as `round(effectiveCanvasW * pipScale)` - Re-snap PIP position to nearest corner when scale changes - Add smooth 0.3s animated PIP size transitions between sections - Static pipScale: single fixed scale in ffmpeg (no expression overhead) - Animated pipScale: two-stage ffmpeg pipeline — 1) scale to max pip size (fixed) for format/geq round corners 2) animated scale(eval=frame) after geq for actual size 3) overlay with eval=frame to handle variable-size input - Add `pipScale` to section operations: split, apply-to-future, sync anchors, render keyframes, render sections, project snapshot Bug Fixes --------- - Fix `resolveOutputSize()` to accept `sourceHeight` parameter (was ignored, causing incorrect output dimensions) - Fix hardcoded 1920x1080 camera black fallback in render-service to use correct dimensions per output mode - Fix overlay filter missing `eval=frame` flag, which caused PIP position to freeze at first frame when using animated expressions - Fix `snapToNearestCorner()` to accept effective canvas dimensions and pip size parameters instead of using hardcoded globals Tests ----- - Add unit tests for `normalizeReelCropX`, `normalizeOutputMode`, `normalizePipScale`, and keyframe normalization with new properties - Add unit tests for `resolveOutputSize` in reel and landscape modes - Add unit tests for `buildScreenFilter` with reel crop (static and animated), zoom-out blur background (static and animated) - Add unit tests for `buildFilterComplex` with reel output dimensions, static pipScale, animated pipScale (two-stage scale), and defaults - Add unit tests for `normalizeSectionInput` with reelCropX and pipScale Co-Authored-By: Claude Opus 4.6 --- src/index.html | 15 + src/main/services/render-filter-service.js | 186 +++++++- src/main/services/render-service.js | 20 +- src/renderer/app.js | 427 ++++++++++++++++-- .../features/timeline/keyframe-ops.js | 2 + src/renderer/styles/main.css | 2 +- src/shared/domain/project.js | 55 ++- tests/unit/project-domain.test.js | 114 +++++ tests/unit/render-filter-service.test.js | 230 ++++++++++ tests/unit/render-service.test.js | 46 ++ 10 files changed, 1031 insertions(+), 66 deletions(-) diff --git a/src/index.html b/src/index.html index fae61a7..e07d486 100644 --- a/src/index.html +++ b/src/index.html @@ -202,6 +202,21 @@

R class="w-28 accent-white disabled:opacity-50" /> 1.00x + +
+ + +
+
diff --git a/src/main/services/render-filter-service.js b/src/main/services/render-filter-service.js index 16da020..176c84b 100644 --- a/src/main/services/render-filter-service.js +++ b/src/main/services/render-filter-service.js @@ -1,6 +1,12 @@ const TRANSITION_DURATION = 0.3; -function resolveOutputSize(sourceWidth, _sourceHeight) { +function resolveOutputSize(sourceWidth, sourceHeight, outputMode = 'landscape') { + if (outputMode === 'reel') { + let outH = sourceHeight % 2 === 0 ? sourceHeight : sourceHeight - 1; + let outW = Math.round((outH * 9) / 16); + if (outW % 2 !== 0) outW -= 1; + return { outW, outH }; + } let outW = sourceWidth % 2 === 0 ? sourceWidth : sourceWidth - 1; let outH = Math.round((outW * 9) / 16); if (outH % 2 !== 0) outH -= 1; @@ -128,24 +134,32 @@ function buildScreenFilter( _canvasH, outputLabel = '[screen]', screenPreprocessed = false, - targetFps = 30 + targetFps = 30, + outputMode = 'landscape' ) { - const { outW, outH } = resolveOutputSize(sourceWidth, sourceHeight); + const landscapeSize = resolveOutputSize(sourceWidth, sourceHeight, 'landscape'); + const { outW: landscapeW, outH: landscapeH } = landscapeSize; + const isReel = outputMode === 'reel'; + const { outW: finalW, outH: finalH } = isReel + ? resolveOutputSize(sourceWidth, sourceHeight, 'reel') + : landscapeSize; + const normalizedKeyframes = (Array.isArray(keyframes) ? keyframes : []).map((keyframe) => ({ ...keyframe, backgroundZoom: Number.isFinite(Number(keyframe?.backgroundZoom)) ? Number(keyframe.backgroundZoom) : 1, backgroundPanX: Number.isFinite(Number(keyframe?.backgroundPanX)) ? Number(keyframe.backgroundPanX) : 0, backgroundPanY: Number.isFinite(Number(keyframe?.backgroundPanY)) ? Number(keyframe.backgroundPanY) : 0, backgroundFocusX: panToFocusCoord(keyframe?.backgroundZoom, keyframe?.backgroundPanX, 0.5), - backgroundFocusY: panToFocusCoord(keyframe?.backgroundZoom, keyframe?.backgroundPanY, 0.5) + backgroundFocusY: panToFocusCoord(keyframe?.backgroundZoom, keyframe?.backgroundPanY, 0.5), + reelCropX: Number.isFinite(Number(keyframe?.reelCropX)) ? Number(keyframe.reelCropX) : 0 })); const baseFilter = screenPreprocessed ? '[0:v]setpts=PTS-STARTPTS[screen_base]' : screenFitMode === 'fill' - ? `[0:v]scale=${outW}:${outH}:force_original_aspect_ratio=increase,crop=${outW}:${outH}[screen_base]` - : `[0:v]scale=${outW}:${outH}:force_original_aspect_ratio=decrease,pad=${outW}:${outH}:'(ow-iw)/2':'(oh-ih)/2':color=black[screen_base]`; + ? `[0:v]scale=${landscapeW}:${landscapeH}:force_original_aspect_ratio=increase,crop=${landscapeW}:${landscapeH}[screen_base]` + : `[0:v]scale=${landscapeW}:${landscapeH}:force_original_aspect_ratio=decrease,pad=${landscapeW}:${landscapeH}:'(ow-iw)/2':'(oh-ih)/2':color=black[screen_base]`; const hasBackgroundAnimation = normalizedKeyframes.some((keyframe) => { return Math.abs(keyframe.backgroundZoom - 1) > 0.0001 @@ -153,14 +167,104 @@ function buildScreenFilter( || Math.abs(keyframe.backgroundPanY) > 0.0001; }); + // Build reel crop suffix if in reel mode + let reelCropSuffix = ''; + if (isReel) { + const maxOffset = landscapeW - finalW; + const hasAnimatedCrop = normalizedKeyframes.some((kf, i) => { + if (i === 0) return false; + return Math.abs(kf.reelCropX - normalizedKeyframes[i - 1].reelCropX) > 0.0001; + }); + + if (hasAnimatedCrop) { + const cropXExpr = buildNumericExpr(normalizedKeyframes, 'reelCropX', 3, 0, 't'); + reelCropSuffix = `,crop=${finalW}:${finalH}:'max(0,min(${maxOffset},(${cropXExpr}+1)/2*${maxOffset}))':0,setsar=1`; + } else { + const cropX = Math.max(0, Math.min(maxOffset, Math.round(((normalizedKeyframes[0]?.reelCropX || 0) + 1) / 2 * maxOffset))); + reelCropSuffix = `,crop=${finalW}:${finalH}:${cropX}:0,setsar=1`; + } + } + + // Check if any keyframe has zoom < 1 (zoom-out in reel mode) + const hasZoomOut = isReel && normalizedKeyframes.some(kf => kf.backgroundZoom < 0.9999); + + // --- Zoom-out pipeline (reel mode with zoom < 1) --- + if (hasZoomOut) { + const darkenFilter = 'colorlevels=romax=0.2:gomax=0.2:bomax=0.2'; + + const hasAnimatedCrop = normalizedKeyframes.some((kf, i) => { + if (i === 0) return false; + return Math.abs(kf.reelCropX - normalizedKeyframes[i - 1].reelCropX) > 0.0001; + }); + + // Check if zoom/focus actually vary between keyframes + const hasZoomAnimation = normalizedKeyframes.length > 1 && normalizedKeyframes.some((kf, i) => { + if (i === 0) return false; + const prev = normalizedKeyframes[i - 1]; + return Math.abs(kf.backgroundZoom - prev.backgroundZoom) > 0.0001 + || Math.abs(kf.backgroundFocusX - prev.backgroundFocusX) > 0.0001 + || Math.abs(kf.backgroundFocusY - prev.backgroundFocusY) > 0.0001; + }); + + if (!hasZoomAnimation) { + // Static zoom-out: all keyframes same zoom < 1, no pan — uniform scale + const zoom = normalizedKeyframes[0].backgroundZoom; + let scaledW = Math.round(landscapeW * zoom); + if (scaledW % 2 !== 0) scaledW -= 1; + scaledW = Math.max(2, scaledW); + let scaledH = Math.round(landscapeH * zoom); + if (scaledH % 2 !== 0) scaledH -= 1; + scaledH = Math.max(2, scaledH); + const offsetX = Math.round((landscapeW - scaledW) / 2); + const offsetY = Math.round((landscapeH - scaledH) / 2); + + // Crop constrained to scaled screen bounds + const scaledLeft = Math.round((landscapeW - landscapeW * zoom) / 2); + const maxCropRange = Math.max(0, Math.round(landscapeW * zoom - finalW)); + let zoCropSuffix; + if (hasAnimatedCrop) { + const cropXExpr = buildNumericExpr(normalizedKeyframes, 'reelCropX', 3, 0, 't'); + zoCropSuffix = `,crop=${finalW}:${finalH}:'max(0,${scaledLeft}+((${cropXExpr})+1)/2*${maxCropRange})':0,setsar=1`; + } else { + const cropX = Math.max(0, scaledLeft + Math.round(((normalizedKeyframes[0]?.reelCropX || 0) + 1) / 2 * maxCropRange)); + zoCropSuffix = `,crop=${finalW}:${finalH}:${cropX}:0,setsar=1`; + } + + return `${baseFilter};[screen_base]split[for_zoom][for_bg];[for_bg]${darkenFilter}[dark_bg];[for_zoom]scale=${scaledW}:${scaledH}[content];[dark_bg][content]overlay=${offsetX}:${offsetY}${zoCropSuffix}${outputLabel}`; + } + + // Animated zoom-out: zoom may cross 1.0 boundary — uniform scale both dimensions + const zoomExprIT = buildNumericExpr(normalizedKeyframes, 'backgroundZoom', 3, 1, 'it'); + const zoomExprT = buildNumericExpr(normalizedKeyframes, 'backgroundZoom', 3, 1, 't'); + const focusXExprIT = buildNumericExpr(normalizedKeyframes, 'backgroundFocusX', 6, 0.5, 'it'); + const focusYExprIT = buildNumericExpr(normalizedKeyframes, 'backgroundFocusY', 6, 0.5, 'it'); + + const zoompanPart = `zoompan=z='max(1.000,${zoomExprIT})':x='max(0,min(iw-iw/zoom,iw*(${focusXExprIT})-iw/zoom/2))':y='max(0,min(ih-ih/zoom,ih*(${focusYExprIT})-ih/zoom/2))':d=1:s=${landscapeW}x${landscapeH}:fps=${targetFps},setsar=1`; + const scalePart = `scale=w='max(2,2*floor(${landscapeW}*min(1.0,${zoomExprT})/2))':h='max(2,2*floor(${landscapeH}*min(1.0,${zoomExprT})/2))':eval=frame`; + const overlayPart = `overlay=x='(main_w-overlay_w)/2':y='(main_h-overlay_h)/2':eval=frame`; + + // Crop constrained to scaled screen: cropX = scaledLeft + ((reelCropX+1)/2) * maxRange + const cropXExpr = buildNumericExpr(normalizedKeyframes, 'reelCropX', 3, 0, 't'); + const zMinExpr = `min(1,${zoomExprT})`; + const fullCropExpr = `${landscapeW}*(1-${zMinExpr})/2+((${cropXExpr})+1)/2*max(0,${landscapeW}*${zMinExpr}-${finalW})`; + const zoCropSuffix = `,crop=${finalW}:${finalH}:'max(0,min(${landscapeW - finalW},${fullCropExpr}))':0,setsar=1`; + + return `${baseFilter};[screen_base]split[for_zoom][for_bg];[for_bg]${darkenFilter}[dark_bg];[for_zoom]${zoompanPart}[zoomed];[zoomed]${scalePart}[scaled];[dark_bg][scaled]${overlayPart}${zoCropSuffix}${outputLabel}`; + } + + // --- Standard pipeline (no zoom-out) --- if (!hasBackgroundAnimation) { + if (isReel) { + // Need an intermediate label for the reel crop + return `${baseFilter};[screen_base]null${reelCropSuffix}${outputLabel}`; + } return baseFilter.replace('[screen_base]', outputLabel); } const zoomExpr = buildNumericExpr(normalizedKeyframes, 'backgroundZoom', 3, 1, 'it'); const focusXExpr = buildNumericExpr(normalizedKeyframes, 'backgroundFocusX', 6, 0.5, 'it'); const focusYExpr = buildNumericExpr(normalizedKeyframes, 'backgroundFocusY', 6, 0.5, 'it'); - const animatedFilter = `[screen_base]zoompan=z='${zoomExpr}':x='max(0,min(iw-iw/zoom,iw*(${focusXExpr})-iw/zoom/2))':y='max(0,min(ih-ih/zoom,ih*(${focusYExpr})-ih/zoom/2))':d=1:s=${outW}x${outH}:fps=${targetFps},setsar=1${outputLabel}`; + const animatedFilter = `[screen_base]zoompan=z='${zoomExpr}':x='max(0,min(iw-iw/zoom,iw*(${focusXExpr})-iw/zoom/2))':y='max(0,min(ih-ih/zoom,ih*(${focusYExpr})-ih/zoom/2))':d=1:s=${landscapeW}x${landscapeH}:fps=${targetFps},setsar=1${reelCropSuffix}${outputLabel}`; return `${baseFilter};${animatedFilter}`; } @@ -173,17 +277,30 @@ function buildFilterComplex( canvasW, _canvasH, screenPreprocessed = false, - targetFps = 30 + targetFps = 30, + outputMode = 'landscape' ) { - const { outW, outH } = resolveOutputSize(sourceWidth, sourceHeight); + const { outW, outH } = resolveOutputSize(sourceWidth, sourceHeight, outputMode); const scale = outW / canvasW; - const actualPipSize = Math.round(pipSize * scale); const radius = Math.round(12 * scale); - const maxCoord = actualPipSize - 1 - radius; const radiusSquared = radius * radius; - const scaledKeyframes = keyframes.map((keyframe) => ({ + // Determine per-keyframe pipScale values + const DEFAULT_PIP_SCALE = 0.22; + const normalizedKeyframes = (Array.isArray(keyframes) ? keyframes : []).map((kf) => ({ + ...kf, + pipScale: Number.isFinite(Number(kf.pipScale)) ? Number(kf.pipScale) : DEFAULT_PIP_SCALE + })); + + // Check if pipScale is static (same across all keyframes) + const firstPipScale = normalizedKeyframes.length > 0 ? normalizedKeyframes[0].pipScale : DEFAULT_PIP_SCALE; + const isStaticPipScale = normalizedKeyframes.every(kf => Math.abs(kf.pipScale - firstPipScale) < 0.0001); + + // For static case, use fixed pip size; for animated, build expressions + const actualPipSize = isStaticPipScale ? Math.round(outW * firstPipScale) : null; + + const scaledKeyframes = normalizedKeyframes.map((keyframe) => ({ ...keyframe, pipX: Math.round(keyframe.pipX * scale), pipY: Math.round(keyframe.pipY * scale) @@ -198,39 +315,58 @@ function buildFilterComplex( _canvasH, '[screen]', screenPreprocessed, - targetFps + targetFps, + outputMode ); - const hasPip = keyframes.some((keyframe) => keyframe.pipVisible); - const hasCamFull = keyframes.some((keyframe) => keyframe.cameraFullscreen); + const hasPip = normalizedKeyframes.some((keyframe) => keyframe.pipVisible); + const hasCamFull = normalizedKeyframes.some((keyframe) => keyframe.cameraFullscreen); - if (hasPip && hasCamFull) { - const alphaExpr = buildAlphaExpr(keyframes); + // Build camera PIP filter (scale + round corners + alpha) + function buildCamPipFilter(inputLabel, outputLabel) { + const alphaExpr = buildAlphaExpr(normalizedKeyframes); + + if (isStaticPipScale) { + const maxCoord = actualPipSize - 1 - radius; + const roundCornerExpr = `lte(pow(max(0,max(${radius}-X,X-${maxCoord})),2)+pow(max(0,max(${radius}-Y,Y-${maxCoord})),2),${radiusSquared})`; + return `${inputLabel}setpts=PTS-STARTPTS,crop='min(iw,ih)':'min(iw,ih)':'(iw-min(iw,ih))/2':'(ih-min(iw,ih))/2',scale=${actualPipSize}:${actualPipSize},format=yuva420p,geq=lum='lum(X,Y)':cb='cb(X,Y)':cr='cr(X,Y)':a='255*${roundCornerExpr}*(${alphaExpr})'${outputLabel}`; + } + + // Animated pipScale: scale to fixed max size, apply round corners, then animated downscale. + // format+geq lock to first frame dimensions, so animated scale must come AFTER geq. + // overlay handles variable-size overlay input correctly. + const maxPipScale = Math.max(...normalizedKeyframes.map(kf => kf.pipScale)); + const maxPipSize = Math.max(2, Math.round(outW * maxPipScale)); + const maxCoord = maxPipSize - 1 - radius; const roundCornerExpr = `lte(pow(max(0,max(${radius}-X,X-${maxCoord})),2)+pow(max(0,max(${radius}-Y,Y-${maxCoord})),2),${radiusSquared})`; - const camPipFilter = `[cam1]setpts=PTS-STARTPTS,crop='min(iw,ih)':'min(iw,ih)':'(iw-min(iw,ih))/2':'(ih-min(iw,ih))/2',scale=${actualPipSize}:${actualPipSize},format=yuva420p,geq=lum='lum(X,Y)':cb='cb(X,Y)':cr='cr(X,Y)':a='255*${roundCornerExpr}*(${alphaExpr})'[cam]`; + const pipSizeExpr = buildNumericExpr(normalizedKeyframes, 'pipScale', 3, DEFAULT_PIP_SCALE, 't'); + const sizeExpr = `max(2,2*floor(${outW}*${pipSizeExpr}/2))`; + return `${inputLabel}setpts=PTS-STARTPTS,crop='min(iw,ih)':'min(iw,ih)':'(iw-min(iw,ih))/2':'(ih-min(iw,ih))/2',scale=${maxPipSize}:${maxPipSize},format=yuva420p,geq=lum='lum(X,Y)':cb='cb(X,Y)':cr='cr(X,Y)':a='255*${roundCornerExpr}*(${alphaExpr})',scale=w='${sizeExpr}':h='${sizeExpr}':eval=frame${outputLabel}`; + } + + if (hasPip && hasCamFull) { + const camPipFilter = buildCamPipFilter('[cam1]', '[cam]'); - const camFullAlpha = buildCamFullAlphaExpr(keyframes); + const camFullAlpha = buildCamFullAlphaExpr(normalizedKeyframes); const camFullFilter = `[cam2]setpts=PTS-STARTPTS,scale=${outW}:${outH}:force_original_aspect_ratio=increase,crop=${outW}:${outH},format=yuva420p,geq=lum='lum(X,Y)':cb='cb(X,Y)':cr='cr(X,Y)':a='255*(${camFullAlpha})'[camfull]`; const xExpr = buildPosExpr(scaledKeyframes, 'pipX'); const yExpr = buildPosExpr(scaledKeyframes, 'pipY'); - return `${screenFilter};[1:v]split[cam1][cam2];${camPipFilter};${camFullFilter};[screen][cam]overlay=x='${xExpr}':y='${yExpr}':format=auto[with_pip];[with_pip][camfull]overlay=0:0:format=auto[out]`; + return `${screenFilter};[1:v]split[cam1][cam2];${camPipFilter};${camFullFilter};[screen][cam]overlay=x='${xExpr}':y='${yExpr}':format=auto:eval=frame[with_pip];[with_pip][camfull]overlay=0:0:format=auto[out]`; } if (hasCamFull) { - const camFullAlpha = buildCamFullAlphaExpr(keyframes); + const camFullAlpha = buildCamFullAlphaExpr(normalizedKeyframes); const camFullFilter = `[1:v]setpts=PTS-STARTPTS,scale=${outW}:${outH}:force_original_aspect_ratio=increase,crop=${outW}:${outH},format=yuva420p,geq=lum='lum(X,Y)':cb='cb(X,Y)':cr='cr(X,Y)':a='255*(${camFullAlpha})'[camfull]`; return `${screenFilter};${camFullFilter};[screen][camfull]overlay=0:0:format=auto[out]`; } - const alphaExpr = buildAlphaExpr(keyframes); - const roundCornerExpr = `lte(pow(max(0,max(${radius}-X,X-${maxCoord})),2)+pow(max(0,max(${radius}-Y,Y-${maxCoord})),2),${radiusSquared})`; - const camFilter = `[1:v]setpts=PTS-STARTPTS,crop='min(iw,ih)':'min(iw,ih)':'(iw-min(iw,ih))/2':'(ih-min(iw,ih))/2',scale=${actualPipSize}:${actualPipSize},format=yuva420p,geq=lum='lum(X,Y)':cb='cb(X,Y)':cr='cr(X,Y)':a='255*${roundCornerExpr}*(${alphaExpr})'[cam]`; + const camFilter = buildCamPipFilter('[1:v]', '[cam]'); const xExpr = buildPosExpr(scaledKeyframes, 'pipX'); const yExpr = buildPosExpr(scaledKeyframes, 'pipY'); - return `${screenFilter};${camFilter};[screen][cam]overlay=x='${xExpr}':y='${yExpr}':format=auto[out]`; + return `${screenFilter};${camFilter};[screen][cam]overlay=x='${xExpr}':y='${yExpr}':format=auto:eval=frame[out]`; } module.exports = { diff --git a/src/main/services/render-service.js b/src/main/services/render-service.js index 878d421..952e812 100644 --- a/src/main/services/render-service.js +++ b/src/main/services/render-service.js @@ -6,6 +6,9 @@ const { normalizeBackgroundPan, normalizeExportAudioPreset, normalizeCameraSyncOffsetMs, + normalizeReelCropX, + normalizeOutputMode, + normalizePipScale, EXPORT_AUDIO_PRESET_COMPRESSED } = require('../../shared/domain/project'); const { chooseRenderFps, probeVideoFpsWithFfmpeg } = require('./fps-service'); @@ -25,7 +28,9 @@ function normalizeSectionInput(rawSections) { sourceEnd, backgroundZoom: normalizeBackgroundZoom(section.backgroundZoom), backgroundPanX: normalizeBackgroundPan(section.backgroundPanX), - backgroundPanY: normalizeBackgroundPan(section.backgroundPanY) + backgroundPanY: normalizeBackgroundPan(section.backgroundPanY), + reelCropX: normalizeReelCropX(section.reelCropX), + pipScale: normalizePipScale(section.pipScale) }; }) .filter(Boolean); @@ -176,6 +181,7 @@ async function renderComposite(opts = {}, deps = {}) { const cameraSyncOffsetMs = normalizeCameraSyncOffsetMs(opts.cameraSyncOffsetMs); const sourceWidth = Number.isFinite(Number(opts.sourceWidth)) ? Number(opts.sourceWidth) : 1920; const sourceHeight = Number.isFinite(Number(opts.sourceHeight)) ? Number(opts.sourceHeight) : 1080; + const outputMode = normalizeOutputMode(opts.outputMode); const outputFolder = typeof opts.outputFolder === 'string' ? opts.outputFolder : ''; const probeFps = deps.probeVideoFpsWithFfmpeg || probeVideoFpsWithFfmpeg; @@ -192,8 +198,8 @@ async function renderComposite(opts = {}, deps = {}) { if (!ffmpegPath) throw new Error('ffmpeg-static is unavailable on this platform'); const outputPath = path.join(outputFolder, `recording-${now()}-edited.mp4`); - const canvasW = 1920; const canvasH = 1080; + const canvasW = outputMode === 'reel' ? Math.round(canvasH * 9 / 16) : 1920; const takeMap = new Map(); for (const take of takes) { @@ -256,7 +262,9 @@ async function renderComposite(opts = {}, deps = {}) { if (cameraIdx >= 0) { filterParts.push(buildCameraTrimFilter(cameraIdx, section, targetFps, i, cameraSyncOffsetMs)); } else { - filterParts.push(`color=black:s=1920x1080:d=${duration}[cv${i}]`); + const fallbackW = outputMode === 'reel' ? Math.round((sourceHeight * 9) / 16) : 1920; + const fallbackH = outputMode === 'reel' ? sourceHeight : 1080; + filterParts.push(`color=black:s=${fallbackW}x${fallbackH}:d=${duration}[cv${i}]`); } } @@ -271,7 +279,8 @@ async function renderComposite(opts = {}, deps = {}) { canvasW, canvasH, true, - targetFps + targetFps, + outputMode ); const adaptedOverlay = overlayFilter .replace(/\[0:v\]/g, '[screen_raw]') @@ -287,7 +296,8 @@ async function renderComposite(opts = {}, deps = {}) { canvasH, '[out]', true, - targetFps + targetFps, + outputMode ).replace(/\[0:v\]/g, '[screen_raw]'); filterParts.push(screenOnlyFilter); } diff --git a/src/renderer/app.js b/src/renderer/app.js index 541ab7d..14ce826 100644 --- a/src/renderer/app.js +++ b/src/renderer/app.js @@ -82,6 +82,15 @@ import { const editorBgZoomInput = document.getElementById('editorBgZoomInput'); const editorBgZoomValue = document.getElementById('editorBgZoomValue'); const editorApplyFutureBtn = document.getElementById('editorApplyFutureBtn'); + const editorModeLandscapeBtn = document.getElementById('editorModeLandscape'); + const editorModeReelBtn = document.getElementById('editorModeReel'); + const editorPipSizeControl = document.getElementById('editorPipSizeControl'); + const editorPipSizeInput = document.getElementById('editorPipSizeInput'); + const editorPipSizeValue = document.getElementById('editorPipSizeValue'); + const editorCropPresets = document.getElementById('editorCropPresets'); + const editorCropLeftBtn = document.getElementById('editorCropLeft'); + const editorCropCenterBtn = document.getElementById('editorCropCenter'); + const editorCropRightBtn = document.getElementById('editorCropRight'); const editorTimeEl = document.getElementById('editorTime'); const editorTimelineWrapper = document.getElementById('editorTimelineWrapper'); const editorTimeline = document.getElementById('editorTimeline'); @@ -153,26 +162,45 @@ import { const PIP_MARGIN = 20; const PIP_SIZE = Math.round(CANVAS_W * PIP_FRACTION); const MIN_SECTION_ZOOM = 1; + const MIN_REEL_SECTION_ZOOM = 0.5; const MAX_SECTION_ZOOM = 3; const DEFAULT_SECTION_ZOOM = 1; const MIN_SECTION_PAN = -1; const MAX_SECTION_PAN = 1; const EXPORT_AUDIO_PRESET_OFF = 'off'; const EXPORT_AUDIO_PRESET_COMPRESSED = 'compressed'; - - function snapToNearestCorner(cursorX, cursorY) { - const midX = CANVAS_W / 2; - const midY = CANVAS_H / 2; + const REEL_CANVAS_W = Math.round(CANVAS_H * 9 / 16); + const REEL_CANVAS_H = CANVAS_H; + const MIN_REEL_CROP_X = -1; + const MAX_REEL_CROP_X = 1; + const DEFAULT_PIP_SCALE = 0.22; + const MIN_PIP_SCALE = 0.15; + const MAX_PIP_SCALE = 0.50; + + function normalizePipScale(value) { + if (value === null || value === undefined) return DEFAULT_PIP_SCALE; + const v = Number(value); + if (!Number.isFinite(v)) return DEFAULT_PIP_SCALE; + return Math.max(MIN_PIP_SCALE, Math.min(MAX_PIP_SCALE, v)); + } + + function snapToNearestCorner(cursorX, cursorY, effectiveW, effectiveH, pipSize) { + const w = effectiveW || CANVAS_W; + const h = effectiveH || CANVAS_H; + const ps = pipSize || PIP_SIZE; + const midX = w / 2; + const midY = h / 2; return { - x: cursorX < midX ? PIP_MARGIN : CANVAS_W - PIP_SIZE - PIP_MARGIN, - y: cursorY < midY ? PIP_MARGIN : CANVAS_H - PIP_SIZE - PIP_MARGIN + x: cursorX < midX ? PIP_MARGIN : w - ps - PIP_MARGIN, + y: cursorY < midY ? PIP_MARGIN : h - ps - PIP_MARGIN }; } function clampSectionZoom(value) { + const minZoom = editorState && editorState.outputMode === 'reel' ? MIN_REEL_SECTION_ZOOM : MIN_SECTION_ZOOM; const zoom = Number(value); if (!Number.isFinite(zoom)) return DEFAULT_SECTION_ZOOM; - return Math.max(MIN_SECTION_ZOOM, Math.min(MAX_SECTION_ZOOM, zoom)); + return Math.max(minZoom, Math.min(MAX_SECTION_ZOOM, zoom)); } function formatSectionZoom(value) { @@ -185,6 +213,95 @@ import { return Math.max(MIN_SECTION_PAN, Math.min(MAX_SECTION_PAN, pan)); } + function clampReelCropX(value) { + const v = Number(value); + if (!Number.isFinite(v)) return 0; + return Math.max(MIN_REEL_CROP_X, Math.min(MAX_REEL_CROP_X, v)); + } + + function getEffectiveCanvasDimensions() { + if (!editorState || editorState.outputMode !== 'reel') return { w: CANVAS_W, h: CANVAS_H }; + return { w: REEL_CANVAS_W, h: REEL_CANVAS_H }; + } + + function computePipSize(pipScale, effectiveW) { + return Math.round(effectiveW * pipScale); + } + + function reelCropXToPixelOffset(reelCropX, zoom) { + const z = Math.min(1, Math.max(0, zoom != null ? zoom : 1)); + const scaledW = CANVAS_W * z; + const scaledLeft = (CANVAS_W - scaledW) / 2; + const maxCropRange = Math.max(0, scaledW - REEL_CANVAS_W); + return scaledLeft + ((clampReelCropX(reelCropX) + 1) / 2) * maxCropRange; + } + + function setOutputMode(mode) { + if (!editorState) return; + const newMode = mode === 'reel' ? 'reel' : 'landscape'; + if (editorState.outputMode === newMode) return; + pushUndo(); + editorState.outputMode = newMode; + + const { w, h } = getEffectiveCanvasDimensions(); + const defaultPs = editorState.pipScale || DEFAULT_PIP_SCALE; + editorState.pipSize = computePipSize(defaultPs, w); + + // Re-map PIP positions and clamp zoom values for the new mode + if (editorState.keyframes) { + for (const kf of editorState.keyframes) { + // When switching to landscape, clamp any zoom < 1 up to 1 + if (newMode === 'landscape' && kf.backgroundZoom < 1) { + kf.backgroundZoom = 1; + } + if (kf.sectionId) { + const kfPipSize = computePipSize(kf.pipScale || defaultPs, w); + const snapped = snapToNearestCorner(kf.pipX, kf.pipY, w, h, kfPipSize); + kf.pipX = snapped.x; + kf.pipY = snapped.y; + } + } + } + editorState.defaultPipX = w - editorState.pipSize - PIP_MARGIN; + editorState.defaultPipY = h - editorState.pipSize - PIP_MARGIN; + + updateOutputModeUI(); + scheduleProjectSave(); + } + + function updateOutputModeUI() { + if (!editorModeLandscapeBtn || !editorModeReelBtn) return; + const isReel = editorState && editorState.outputMode === 'reel'; + editorModeLandscapeBtn.className = isReel + ? 'px-2.5 py-1 text-xs text-neutral-400 hover:text-neutral-200 transition-colors' + : 'px-2.5 py-1 text-xs bg-white text-black transition-colors'; + editorModeReelBtn.className = isReel + ? 'px-2.5 py-1 text-xs bg-white text-black transition-colors' + : 'px-2.5 py-1 text-xs text-neutral-400 hover:text-neutral-200 transition-colors'; + // Show/hide PIP size control when camera is present + if (editorPipSizeControl) { + const showPipControl = editorState && editorState.hasCamera; + editorPipSizeControl.classList.toggle('hidden', !showPipControl); + editorPipSizeControl.classList.toggle('flex', !!showPipControl); + } + if (editorPipSizeInput && editorState) { + const selectedSection = getSelectedSection(); + const sectionAnchor = selectedSection ? getSectionAnchorKeyframe(selectedSection.id, false) : null; + const currentPipScale = sectionAnchor ? normalizePipScale(sectionAnchor.pipScale) : (editorState.pipScale || DEFAULT_PIP_SCALE); + editorPipSizeInput.value = String(currentPipScale); + if (editorPipSizeValue) editorPipSizeValue.textContent = currentPipScale.toFixed(2); + } + if (editorCropPresets) { + editorCropPresets.classList.toggle('hidden', !isReel); + editorCropPresets.classList.toggle('flex', !!isReel); + } + // Update zoom slider range based on mode + if (editorBgZoomInput) { + editorBgZoomInput.min = isReel ? '0.5' : '1'; + } + updateSectionZoomControls(); + } + function normalizeExportAudioPreset(value) { return value === EXPORT_AUDIO_PRESET_OFF ? EXPORT_AUDIO_PRESET_OFF @@ -267,6 +384,9 @@ import { let activePlaybackSection = null; let cameraResyncCooldownUntil = 0; let lastCameraDriftLogAt = 0; + let draggingCrop = false; + let cropDragMoved = false; + let cropDragState = null; const editorZoomBuffer = document.createElement('canvas'); editorZoomBuffer.width = CANVAS_W; editorZoomBuffer.height = CANVAS_H; @@ -488,7 +608,9 @@ import { ...kf, backgroundZoom: clampSectionZoom(kf.backgroundZoom), backgroundPanX: clampSectionPan(kf.backgroundPanX), - backgroundPanY: clampSectionPan(kf.backgroundPanY) + backgroundPanY: clampSectionPan(kf.backgroundPanY), + reelCropX: clampReelCropX(kf.reelCropX), + pipScale: normalizePipScale(kf.pipScale) })) : [], selectedSectionId: editorState.selectedSectionId || null, @@ -506,7 +628,9 @@ import { screenFitMode: screenFitSelect.value || 'fill', hideFromRecording: hideFromRecording === 'true', exportAudioPreset: normalizeExportAudioPreset(exportAudioPresetSelect.value), - cameraSyncOffsetMs: normalizeCameraSyncOffsetMs(cameraSyncOffsetInput.value) + cameraSyncOffsetMs: normalizeCameraSyncOffsetMs(cameraSyncOffsetInput.value), + outputMode: editorState?.outputMode || 'landscape', + pipScale: editorState?.pipScale || DEFAULT_PIP_SCALE }, timeline: getProjectTimelineSnapshot() }; @@ -751,6 +875,8 @@ import { sourceWidth: project.timeline.sourceWidth || null, sourceHeight: project.timeline.sourceHeight || null, cameraSyncOffsetMs: project.settings?.cameraSyncOffsetMs, + outputMode: project.settings?.outputMode, + pipScale: project.settings?.pipScale, initialView: preferredView === 'recording' ? 'recording' : 'timeline' } ); @@ -826,6 +952,14 @@ import { editorBgZoomInput.disabled = disabled; editorBgZoomInput.value = String(zoom); editorBgZoomValue.textContent = formatSectionZoom(zoom); + + // Update PIP size slider to reflect current section's pipScale + if (editorPipSizeInput && editorState) { + const sectionAnchor = selectedSection ? getSectionAnchorKeyframe(selectedSection.id, false) : null; + const sectionPipScale = sectionAnchor ? normalizePipScale(sectionAnchor.pipScale) : (editorState.pipScale || DEFAULT_PIP_SCALE); + editorPipSizeInput.value = String(sectionPipScale); + if (editorPipSizeValue) editorPipSizeValue.textContent = sectionPipScale.toFixed(2); + } } function getSectionAnchorKeyframe(sectionId, createIfMissing) { @@ -847,6 +981,8 @@ import { backgroundZoom: clampSectionZoom(fallback.backgroundZoom), backgroundPanX: clampSectionPan(fallback.backgroundPanX), backgroundPanY: clampSectionPan(fallback.backgroundPanY), + reelCropX: clampReelCropX(fallback.reelCropX), + pipScale: normalizePipScale(fallback.pipScale), sectionId: section.id, autoSection: true }; @@ -877,6 +1013,8 @@ import { backgroundZoom: existing ? clampSectionZoom(existing.backgroundZoom) : DEFAULT_SECTION_ZOOM, backgroundPanX: existing ? clampSectionPan(existing.backgroundPanX) : 0, backgroundPanY: existing ? clampSectionPan(existing.backgroundPanY) : 0, + reelCropX: existing ? clampReelCropX(existing.reelCropX) : 0, + pipScale: existing ? normalizePipScale(existing.pipScale) : (editorState.pipScale || DEFAULT_PIP_SCALE), sectionId: section.id, autoSection: true }; @@ -925,6 +1063,8 @@ import { anchor.backgroundZoom = clampSectionZoom(currentAnchor.backgroundZoom); anchor.backgroundPanX = clampSectionPan(currentAnchor.backgroundPanX); anchor.backgroundPanY = clampSectionPan(currentAnchor.backgroundPanY); + anchor.reelCropX = clampReelCropX(currentAnchor.reelCropX); + anchor.pipScale = normalizePipScale(currentAnchor.pipScale); } renderSectionMarkers(); @@ -1249,7 +1389,9 @@ import { cameraFullscreen: !!kf.cameraFullscreen, backgroundZoom: clampSectionZoom(kf.backgroundZoom), backgroundPanX: clampSectionPan(kf.backgroundPanX), - backgroundPanY: clampSectionPan(kf.backgroundPanY) + backgroundPanY: clampSectionPan(kf.backgroundPanY), + reelCropX: clampReelCropX(kf.reelCropX), + pipScale: normalizePipScale(kf.pipScale) })); if (minimal.length === 0 || minimal[0].time > 0.0001) { @@ -1261,7 +1403,9 @@ import { cameraFullscreen: false, backgroundZoom: DEFAULT_SECTION_ZOOM, backgroundPanX: 0, - backgroundPanY: 0 + backgroundPanY: 0, + reelCropX: 0, + pipScale: editorState.pipScale || DEFAULT_PIP_SCALE }); } @@ -1281,7 +1425,9 @@ import { sourceEnd: section.sourceEnd, backgroundZoom: clampSectionZoom(anchor?.backgroundZoom), backgroundPanX: clampSectionPan(anchor?.backgroundPanX), - backgroundPanY: clampSectionPan(anchor?.backgroundPanY) + backgroundPanY: clampSectionPan(anchor?.backgroundPanY), + reelCropX: clampReelCropX(anchor?.reelCropX), + pipScale: normalizePipScale(anchor?.pipScale) }; }); } @@ -1545,11 +1691,38 @@ import { const zoom = clampSectionZoom(backgroundZoom); const drawBase = fitMode === 'fill' ? drawFill : drawFit; - if (zoom <= 1.0001) { + if (zoom <= 1.0001 && zoom >= 0.9999) { drawBase(targetCtx, video, 0, 0, CANVAS_W, CANVAS_H); return; } + if (zoom < 0.9999) { + // Zoom-out: draw the full frame at reduced scale, centered vertically. + // The reel crop overlay will carve out the 608px strip later. + // First draw base content to the buffer at 1:1 + editorZoomBufferCtx.fillStyle = '#000'; + editorZoomBufferCtx.fillRect(0, 0, CANVAS_W, CANVAS_H); + drawBase(editorZoomBufferCtx, video, 0, 0, CANVAS_W, CANVAS_H); + + // Fill target with black + targetCtx.fillStyle = '#000'; + targetCtx.fillRect(0, 0, CANVAS_W, CANVAS_H); + + // Draw darkened scaled-to-fill background (the content scaled up to fill, darkened) + targetCtx.save(); + targetCtx.globalAlpha = 0.2; + targetCtx.drawImage(editorZoomBuffer, 0, 0, CANVAS_W, CANVAS_H); + targetCtx.restore(); + + // Draw the sharp content at reduced scale, centered (uniform scale preserves aspect ratio) + const scaledW = Math.round(CANVAS_W * zoom); + const scaledH = Math.round(CANVAS_H * zoom); + const offsetX = Math.round((CANVAS_W - scaledW) / 2); + const offsetY = Math.round((CANVAS_H - scaledH) / 2); + targetCtx.drawImage(editorZoomBuffer, 0, 0, CANVAS_W, CANVAS_H, offsetX, offsetY, scaledW, scaledH); + return; + } + editorZoomBufferCtx.fillStyle = '#000'; editorZoomBufferCtx.fillRect(0, 0, CANVAS_W, CANVAS_H); drawBase(editorZoomBufferCtx, video, 0, 0, CANVAS_W, CANVAS_H); @@ -2385,6 +2558,7 @@ import { backgroundZoom: DEFAULT_SECTION_ZOOM, backgroundPanX: 0, backgroundPanY: 0, + reelCropX: 0, sectionId: section.id, autoSection: true })); @@ -2394,18 +2568,32 @@ import { ...kf, backgroundZoom: clampSectionZoom(kf.backgroundZoom), backgroundPanX: clampSectionPan(kf.backgroundPanX), - backgroundPanY: clampSectionPan(kf.backgroundPanY) + backgroundPanY: clampSectionPan(kf.backgroundPanY), + reelCropX: clampReelCropX(kf.reelCropX), + pipScale: normalizePipScale(kf.pipScale) })) : null; const keyframes = (providedKeyframes || sectionKeyframes).sort((a, b) => a.time - b.time); + const outputMode = opts.outputMode === 'reel' ? 'reel' : 'landscape'; + const pipScale = (() => { + const v = Number(opts.pipScale); + if (opts.pipScale == null || !Number.isFinite(v)) return DEFAULT_PIP_SCALE; + return Math.max(MIN_PIP_SCALE, Math.min(MAX_PIP_SCALE, v)); + })(); + const effectiveW = outputMode === 'reel' ? REEL_CANVAS_W : CANVAS_W; + const effectiveH = outputMode === 'reel' ? REEL_CANVAS_H : CANVAS_H; + const effectivePipSize = computePipSize(pipScale, effectiveW); + const effDefaultPipX = effectiveW - effectivePipSize - PIP_MARGIN; + const effDefaultPipY = effectiveH - effectivePipSize - PIP_MARGIN; + editorState = { duration, currentTime: 0, playing: false, - pipSize: PIP_SIZE, - defaultPipX, - defaultPipY, + pipSize: effectivePipSize, + defaultPipX: effDefaultPipX, + defaultPipY: effDefaultPipY, keyframes, sections, selectedSectionId: opts.selectedSectionId || sections[0]?.id || null, @@ -2416,11 +2604,14 @@ import { cameraSyncOffsetMs: normalizeCameraSyncOffsetMs(opts.cameraSyncOffsetMs), hasCamera: typeof opts.hasCamera === 'boolean' ? opts.hasCamera : false, sourceWidth: opts.sourceWidth || null, - sourceHeight: opts.sourceHeight || null + sourceHeight: opts.sourceHeight || null, + outputMode, + pipScale }; screenFitSelect.value = editorState.screenFitMode === 'fit' ? 'fit' : 'fill'; cameraSyncOffsetInput.value = String(editorState.cameraSyncOffsetMs); updateSectionZoomControls(); + updateOutputModeUI(); // Pre-create video elements for all referenced takes const referencedTakeIds = new Set(sections.map(s => s.takeId).filter(Boolean)); @@ -2488,7 +2679,9 @@ import { cameraFullscreen: false, backgroundZoom: DEFAULT_SECTION_ZOOM, backgroundPanX: 0, - backgroundPanY: 0 + backgroundPanY: 0, + reelCropX: 0, + pipScale: editorState.pipScale || DEFAULT_PIP_SCALE }; const userKfs = editorState.keyframes; const kfs = userKfs.length > 0 && userKfs[0].time === 0 ? userKfs : [defaultKf, ...userKfs]; @@ -2513,6 +2706,8 @@ import { let backgroundPanY = clampSectionPan(active.backgroundPanY); let backgroundFocusX = panToFocusCoord(backgroundZoom, backgroundPanX, 0.5); let backgroundFocusY = panToFocusCoord(backgroundZoom, backgroundPanY, 0.5); + let reelCropX = clampReelCropX(active.reelCropX); + let pipScale = normalizePipScale(active.pipScale); // Transition toward next keyframe at end of current section if (next) { @@ -2563,6 +2758,16 @@ import { backgroundFocusY = backgroundFocusY + (nextFocusY - backgroundFocusY) * t; backgroundPanX = focusToPanCoord(backgroundZoom, backgroundFocusX, backgroundPanX); backgroundPanY = focusToPanCoord(backgroundZoom, backgroundFocusY, backgroundPanY); + + const nextReelCropX = clampReelCropX(next.reelCropX); + if (Math.abs(reelCropX - nextReelCropX) > 0.0001) { + reelCropX = reelCropX + (nextReelCropX - reelCropX) * t; + } + + const nextPipScale = normalizePipScale(next.pipScale); + if (Math.abs(pipScale - nextPipScale) > 0.0001) { + pipScale = pipScale + (nextPipScale - pipScale) * t; + } } } @@ -2577,7 +2782,9 @@ import { backgroundPanX, backgroundPanY, backgroundFocusX, - backgroundFocusY + backgroundFocusY, + reelCropX, + pipScale }; } @@ -2729,6 +2936,17 @@ import { } } editorPlayBtn.textContent = 'Play'; + // If a video-frame callback was pending it will never fire now that the + // video is paused, so cancel it and restart the draw loop on the paused + // timer path so the canvas keeps updating (e.g. during drag operations). + if (editorVideoFrameCallbackId !== null && editorVideoFrameHost) { + editorVideoFrameHost.cancelVideoFrameCallback(editorVideoFrameCallbackId); + editorVideoFrameCallbackId = null; + editorVideoFrameHost = null; + } + if (!hasPendingEditorDraw()) { + scheduleEditorDrawLoop(); + } } function editorTogglePlay() { @@ -2847,26 +3065,57 @@ import { ); } + const isReel = editorState.outputMode === 'reel'; + const cropPixelX = isReel ? reelCropXToPixelOffset(state.reelCropX, state.backgroundZoom) : 0; + const effectiveW = isReel ? REEL_CANVAS_W : CANVAS_W; + const currentPipSize = computePipSize(state.pipScale, effectiveW); + if (hasCamera) { if (state.camTransition > 0 && state.opacity > 0) { editorCtx.save(); if (state.opacity < 1) editorCtx.globalAlpha = state.opacity; const t = easeInOut(state.camTransition); - const camX = state.pipX * (1 - t); - const camY = state.pipY * (1 - t); - const camW = editorState.pipSize + (CANVAS_W - editorState.pipSize) * t; - const camH = editorState.pipSize + (CANVAS_H - editorState.pipSize) * t; + const fullW = isReel ? REEL_CANVAS_W : CANVAS_W; + const fullH = isReel ? REEL_CANVAS_H : CANVAS_H; + const drawPipX = isReel ? state.pipX + cropPixelX : state.pipX; + const drawPipY = state.pipY; + const camX = drawPipX * (1 - t) + (isReel ? cropPixelX : 0) * t; + const camY = drawPipY * (1 - t); + const camW = currentPipSize + (fullW - currentPipSize) * t; + const camH = currentPipSize + (fullH - currentPipSize) * t; const camR = 12 * (1 - t); drawCameraRect(editorCtx, activeVideos.camera, camX, camY, camW, camH, camR); editorCtx.restore(); } else if (state.opacity > 0) { editorCtx.save(); editorCtx.globalAlpha = state.opacity; - drawPip(editorCtx, activeVideos.camera, state.pipX, state.pipY, editorState.pipSize, editorState.pipSize); + const drawPipX = isReel ? state.pipX + cropPixelX : state.pipX; + drawPip(editorCtx, activeVideos.camera, drawPipX, state.pipY, currentPipSize, currentPipSize); editorCtx.restore(); } } + // Draw reel crop overlay + if (isReel) { + editorCtx.save(); + editorCtx.fillStyle = 'rgba(0, 0, 0, 0.55)'; + // Left overlay + if (cropPixelX > 0) { + editorCtx.fillRect(0, 0, cropPixelX, CANVAS_H); + } + // Right overlay + const rightEdge = cropPixelX + REEL_CANVAS_W; + if (rightEdge < CANVAS_W) { + editorCtx.fillRect(rightEdge, 0, CANVAS_W - rightEdge, CANVAS_H); + } + // Dashed crop boundary + editorCtx.strokeStyle = 'rgba(255, 255, 255, 0.6)'; + editorCtx.lineWidth = 2; + editorCtx.setLineDash([8, 6]); + editorCtx.strokeRect(cropPixelX, 0, REEL_CANVAS_W, CANVAS_H); + editorCtx.restore(); + } + scheduleEditorDrawLoop(); } @@ -2968,10 +3217,16 @@ import { if (activeSection) selectEditorSection(activeSection.id); const { x, y } = canvasToEditorCoords(e.clientX, e.clientY); const kf = getStateAtTime(editorState.currentTime); + const isReel = editorState.outputMode === 'reel'; + const cropOffsetX = isReel ? reelCropXToPixelOffset(kf.reelCropX, kf.backgroundZoom) : 0; + + // PIP hit-test: in reel mode, PIP coords are relative to crop region if (editorState.hasCamera && kf.pipVisible && kf.camTransition <= 0) { - const pipW = editorState.pipSize; - const pipH = editorState.pipSize; - if (x >= kf.pipX && x <= kf.pipX + pipW && y >= kf.pipY && y <= kf.pipY + pipH) { + const hitEffW = isReel ? REEL_CANVAS_W : CANVAS_W; + const pipW = computePipSize(kf.pipScale, hitEffW); + const pipH = pipW; + const drawPipX = isReel ? kf.pipX + cropOffsetX : kf.pipX; + if (x >= drawPipX && x <= drawPipX + pipW && y >= kf.pipY && y <= kf.pipY + pipH) { pipDragMoved = false; pushUndo(); draggingPip = true; @@ -2980,6 +3235,26 @@ import { } } + // Crop region drag: in reel mode, detect mousedown within crop region + if (isReel && activeSection) { + const cropLeft = cropOffsetX; + const cropRight = cropOffsetX + REEL_CANVAS_W; + if (x >= cropLeft && x <= cropRight && y >= 0 && y <= CANVAS_H) { + cropDragMoved = false; + pushUndo(); + draggingCrop = true; + const anchor = getSectionAnchorKeyframe(activeSection.id, true); + cropDragState = { + sectionId: activeSection.id, + startMouseX: x, + startCropX: anchor ? clampReelCropX(anchor.reelCropX) : 0, + zoom: kf.backgroundZoom || 1 + }; + e.preventDefault(); + return; + } + } + if (!activeSection || kf.backgroundZoom <= 1.0001 || (kf.cameraFullscreen && kf.opacity > 0)) return; const initialPan = getSectionBackgroundPan(activeSection.id); pushUndo(); @@ -2997,6 +3272,23 @@ import { }); window.addEventListener('mousemove', (e) => { + // Crop region drag + if (draggingCrop && editorState && cropDragState) { + const { x } = canvasToEditorCoords(e.clientX, e.clientY); + const deltaX = x - cropDragState.startMouseX; + const zoom = cropDragState.zoom || 1; + const scaledW = CANVAS_W * Math.min(1, zoom); + const maxCropRange = Math.max(0, scaledW - REEL_CANVAS_W); + const deltaCropX = maxCropRange > 0 ? (deltaX / maxCropRange) * 2 : 0; + const newCropX = clampReelCropX(cropDragState.startCropX + deltaCropX); + const anchor = getSectionAnchorKeyframe(cropDragState.sectionId, true); + if (anchor && Math.abs(clampReelCropX(anchor.reelCropX) - newCropX) > 0.001) { + anchor.reelCropX = newCropX; + cropDragMoved = true; + } + return; + } + if (draggingBackground && editorState && backgroundDragState) { const { x, y } = canvasToEditorCoords(e.clientX, e.clientY); const deltaX = x - backgroundDragState.startMouseX; @@ -3015,7 +3307,13 @@ import { if (!draggingPip || !editorState) return; pipDragMoved = true; const { x, y } = canvasToEditorCoords(e.clientX, e.clientY); - const snapped = snapToNearestCorner(x, y); + const isReel = editorState.outputMode === 'reel'; + const { w, h } = getEffectiveCanvasDimensions(); + // In reel mode, translate mouse coords to crop-relative for snapping + const currentState = getStateAtTime(editorState.currentTime); + const snapX = isReel ? x - reelCropXToPixelOffset(currentState.reelCropX, currentState.backgroundZoom) : x; + const dragPipSize = computePipSize(currentState.pipScale, w); + const snapped = snapToNearestCorner(snapX, y, w, h, dragPipSize); const selectedSection = getSelectedSection(); const section = selectedSection || findSectionForTime(editorState.currentTime); @@ -3029,6 +3327,19 @@ import { }); window.addEventListener('mouseup', () => { + const wasDraggingCrop = draggingCrop; + draggingCrop = false; + cropDragState = null; + if (wasDraggingCrop) { + if (cropDragMoved) { + scheduleProjectSave(); + } else { + undoStack.pop(); + updateUndoRedoButtons(); + } + cropDragMoved = false; + } + const wasDraggingBackground = draggingBackground; draggingBackground = false; backgroundDragState = null; @@ -3155,6 +3466,63 @@ import { editorBgZoomInput.addEventListener('change', commitSectionZoomChange); editorBgZoomInput.addEventListener('pointerup', commitSectionZoomChange); editorBgZoomInput.addEventListener('blur', commitSectionZoomChange); + + // Output mode toggle buttons + if (editorModeLandscapeBtn) { + editorModeLandscapeBtn.addEventListener('click', () => setOutputMode('landscape')); + } + if (editorModeReelBtn) { + editorModeReelBtn.addEventListener('click', () => setOutputMode('reel')); + } + + // Crop preset buttons + function setCropPreset(cropX) { + if (!editorState || editorState.rendering || editorState.outputMode !== 'reel') return; + const section = getSelectedSection() || findSectionForTime(editorState.currentTime); + if (!section) return; + const anchor = getSectionAnchorKeyframe(section.id, true); + if (!anchor) return; + if (Math.abs(clampReelCropX(anchor.reelCropX) - cropX) < 0.001) return; + pushUndo(); + anchor.reelCropX = cropX; + scheduleProjectSave(); + } + if (editorCropLeftBtn) editorCropLeftBtn.addEventListener('click', () => setCropPreset(-1)); + if (editorCropCenterBtn) editorCropCenterBtn.addEventListener('click', () => setCropPreset(0)); + if (editorCropRightBtn) editorCropRightBtn.addEventListener('click', () => setCropPreset(1)); + + // PIP size slider — controls current section's anchor pipScale + let pipSizeDragActive = false; + if (editorPipSizeInput) { + editorPipSizeInput.addEventListener('input', () => { + if (!editorState || editorState.rendering) return; + const newScale = Math.max(MIN_PIP_SCALE, Math.min(MAX_PIP_SCALE, Number(editorPipSizeInput.value))); + if (!pipSizeDragActive) pushUndo(); + pipSizeDragActive = true; + + const section = getSelectedSection() || findSectionForTime(editorState.currentTime); + if (section) { + const anchor = getSectionAnchorKeyframe(section.id, true); + if (anchor) { + anchor.pipScale = newScale; + // Re-snap this section's PIP to nearest corner with new size + const { w, h } = getEffectiveCanvasDimensions(); + const newPipSize = computePipSize(newScale, w); + const snapped = snapToNearestCorner(anchor.pipX, anchor.pipY, w, h, newPipSize); + anchor.pipX = snapped.x; + anchor.pipY = snapped.y; + } + } + + if (editorPipSizeValue) editorPipSizeValue.textContent = newScale.toFixed(2); + scheduleProjectSave(); + }); + const commitPipSizeChange = () => { pipSizeDragActive = false; }; + editorPipSizeInput.addEventListener('change', commitPipSizeChange); + editorPipSizeInput.addEventListener('pointerup', commitPipSizeChange); + editorPipSizeInput.addEventListener('blur', commitPipSizeChange); + } + updateSectionZoomControls(); // ===== Render pipeline ===== @@ -3222,6 +3590,7 @@ import { cameraSyncOffsetMs: editorState.cameraSyncOffsetMs, sourceWidth: editorState.sourceWidth || CANVAS_W, sourceHeight: editorState.sourceHeight || CANVAS_H, + outputMode: editorState.outputMode || 'landscape', outputFolder: saveFolder }); diff --git a/src/renderer/features/timeline/keyframe-ops.js b/src/renderer/features/timeline/keyframe-ops.js index b2bbcd1..4441379 100644 --- a/src/renderer/features/timeline/keyframe-ops.js +++ b/src/renderer/features/timeline/keyframe-ops.js @@ -39,6 +39,8 @@ export function buildSplitAnchorKeyframe(keyframes, parentSectionId, newSectionI backgroundZoom: parent?.backgroundZoom ?? defaultZoom, backgroundPanX: parent?.backgroundPanX ?? 0, backgroundPanY: parent?.backgroundPanY ?? 0, + reelCropX: parent?.reelCropX ?? 0, + pipScale: parent?.pipScale ?? defaults.pipScale ?? 0.22, sectionId: newSectionId, autoSection: true }; diff --git a/src/renderer/styles/main.css b/src/renderer/styles/main.css index d0fd115..ac7f337 100644 --- a/src/renderer/styles/main.css +++ b/src/renderer/styles/main.css @@ -1,3 +1,3 @@ @import url("https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap");*,:after,:before{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:rgba(59,130,246,.5);--tw-ring-offset-shadow:0 0 #0000;--tw-ring-shadow:0 0 #0000;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }::backdrop{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:rgba(59,130,246,.5);--tw-ring-offset-shadow:0 0 #0000;--tw-ring-shadow:0 0 #0000;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: } -/*! tailwindcss v3.4.17 | MIT License | https://tailwindcss.com*/*,:after,:before{box-sizing:border-box;border:0 solid #e5e7eb}:after,:before{--tw-content:""}:host,html{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;font-family:Inter,system-ui,-apple-system,sans-serif;font-feature-settings:normal;font-variation-settings:normal;-webkit-tap-highlight-color:transparent}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,pre,samp{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-feature-settings:normal;font-variation-settings:normal;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-feature-settings:inherit;font-variation-settings:inherit;font-size:100%;font-weight:inherit;line-height:inherit;letter-spacing:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,input:where([type=button]),input:where([type=reset]),input:where([type=submit]){-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dd,dl,figure,h1,h2,h3,h4,h5,h6,hr,p,pre{margin:0}fieldset{margin:0}fieldset,legend{padding:0}menu,ol,ul{list-style:none;margin:0;padding:0}dialog{padding:0}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}[role=button],button{cursor:pointer}:disabled{cursor:default}audio,canvas,embed,iframe,img,object,svg,video{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}[hidden]:where(:not([hidden=until-found])){display:none}.pointer-events-none{pointer-events:none}.visible{visibility:visible}.fixed{position:fixed}.absolute{position:absolute}.relative{position:relative}.inset-0{inset:0}.bottom-0{bottom:0}.top-0{top:0}.z-0{z-index:0}.z-20{z-index:20}.-mx-1{margin-left:-.25rem;margin-right:-.25rem}.mx-1{margin-left:.25rem;margin-right:.25rem}.mb-1{margin-bottom:.25rem}.mb-2{margin-bottom:.5rem}.ml-3{margin-left:.75rem}.mt-0\.5{margin-top:.125rem}.mt-1{margin-top:.25rem}.block{display:block}.flex{display:flex}.hidden{display:none}.h-12{height:3rem}.h-2{height:.5rem}.h-5{height:1.25rem}.h-full{height:100%}.h-screen{height:100vh}.min-h-0{min-height:0}.w-0\.5{width:.125rem}.w-10{width:2.5rem}.w-20{width:5rem}.w-28{width:7rem}.w-56{width:14rem}.w-64{width:16rem}.w-full{width:100%}.w-px{width:1px}.min-w-0{min-width:0}.min-w-\[46px\]{min-width:46px}.min-w-\[80px\]{min-width:80px}.max-w-lg{max-width:32rem}.flex-1{flex:1 1 0%}.shrink-0{flex-shrink:0}.transform{transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}@keyframes pulse{50%{opacity:.5}}.animate-pulse{animation:pulse 2s cubic-bezier(.4,0,.6,1) infinite}.cursor-pointer{cursor:pointer}.cursor-wait{cursor:wait}.select-none{-webkit-user-select:none;-moz-user-select:none;user-select:none}.flex-col{flex-direction:column}.items-start{align-items:flex-start}.items-center{align-items:center}.justify-center{justify-content:center}.justify-between{justify-content:space-between}.gap-2{gap:.5rem}.gap-3{gap:.75rem}.gap-4{gap:1rem}.space-y-0\.5>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.125rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.125rem*var(--tw-space-y-reverse))}.space-y-1\.5>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.375rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.375rem*var(--tw-space-y-reverse))}.space-y-2>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.5rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.5rem*var(--tw-space-y-reverse))}.space-y-3>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.75rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.75rem*var(--tw-space-y-reverse))}.space-y-5>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(1.25rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(1.25rem*var(--tw-space-y-reverse))}.overflow-hidden{overflow:hidden}.overflow-x-auto{overflow-x:auto}.overflow-y-auto{overflow-y:auto}.overflow-y-hidden{overflow-y:hidden}.truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.rounded{border-radius:.25rem}.rounded-full{border-radius:9999px}.rounded-lg{border-radius:.5rem}.rounded-md{border-radius:.375rem}.rounded-xl{border-radius:.75rem}.border{border-width:1px}.border-b{border-bottom-width:1px}.border-blue-500\/40{border-color:rgba(59,130,246,.4)}.border-neutral-700{--tw-border-opacity:1;border-color:rgb(64 64 64/var(--tw-border-opacity,1))}.border-neutral-800{--tw-border-opacity:1;border-color:rgb(38 38 38/var(--tw-border-opacity,1))}.border-neutral-800\/80{border-color:rgba(38,38,38,.8)}.border-red-500\/40{border-color:rgba(239,68,68,.4)}.bg-amber-500{--tw-bg-opacity:1;background-color:rgb(245 158 11/var(--tw-bg-opacity,1))}.bg-black{--tw-bg-opacity:1;background-color:rgb(0 0 0/var(--tw-bg-opacity,1))}.bg-blue-500\/10{background-color:rgba(59,130,246,.1)}.bg-emerald-500{--tw-bg-opacity:1;background-color:rgb(16 185 129/var(--tw-bg-opacity,1))}.bg-emerald-600{--tw-bg-opacity:1;background-color:rgb(5 150 105/var(--tw-bg-opacity,1))}.bg-neutral-700{--tw-bg-opacity:1;background-color:rgb(64 64 64/var(--tw-bg-opacity,1))}.bg-neutral-800{--tw-bg-opacity:1;background-color:rgb(38 38 38/var(--tw-bg-opacity,1))}.bg-neutral-900{--tw-bg-opacity:1;background-color:rgb(23 23 23/var(--tw-bg-opacity,1))}.bg-neutral-950{--tw-bg-opacity:1;background-color:rgb(10 10 10/var(--tw-bg-opacity,1))}.bg-neutral-950\/95{background-color:hsla(0,0%,4%,.95)}.bg-red-500{--tw-bg-opacity:1;background-color:rgb(239 68 68/var(--tw-bg-opacity,1))}.bg-red-500\/10{background-color:rgba(239,68,68,.1)}.bg-red-600{--tw-bg-opacity:1;background-color:rgb(220 38 38/var(--tw-bg-opacity,1))}.bg-white{--tw-bg-opacity:1;background-color:rgb(255 255 255/var(--tw-bg-opacity,1))}.object-contain{-o-object-fit:contain;object-fit:contain}.p-1{padding:.25rem}.p-4{padding:1rem}.p-6{padding:1.5rem}.px-1{padding-left:.25rem;padding-right:.25rem}.px-1\.5{padding-left:.375rem;padding-right:.375rem}.px-2{padding-left:.5rem;padding-right:.5rem}.px-2\.5{padding-left:.625rem;padding-right:.625rem}.px-3{padding-left:.75rem;padding-right:.75rem}.px-3\.5{padding-left:.875rem;padding-right:.875rem}.px-4{padding-left:1rem;padding-right:1rem}.py-0\.5{padding-top:.125rem;padding-bottom:.125rem}.py-1{padding-top:.25rem;padding-bottom:.25rem}.py-1\.5{padding-top:.375rem;padding-bottom:.375rem}.py-2{padding-top:.5rem;padding-bottom:.5rem}.py-2\.5{padding-top:.625rem;padding-bottom:.625rem}.py-3{padding-top:.75rem;padding-bottom:.75rem}.py-6{padding-top:1.5rem;padding-bottom:1.5rem}.text-left{text-align:left}.text-center{text-align:center}.text-right{text-align:right}.font-mono{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace}.font-sans{font-family:Inter,system-ui,-apple-system,sans-serif}.text-\[10px\]{font-size:10px}.text-\[11px\]{font-size:11px}.text-lg{font-size:1.125rem;line-height:1.75rem}.text-sm{font-size:.875rem;line-height:1.25rem}.text-xl{font-size:1.25rem;line-height:1.75rem}.text-xs{font-size:.75rem;line-height:1rem}.font-medium{font-weight:500}.font-semibold{font-weight:600}.uppercase{text-transform:uppercase}.italic{font-style:italic}.tabular-nums{--tw-numeric-spacing:tabular-nums;font-variant-numeric:var(--tw-ordinal) var(--tw-slashed-zero) var(--tw-numeric-figure) var(--tw-numeric-spacing) var(--tw-numeric-fraction)}.leading-relaxed{line-height:1.625}.leading-snug{line-height:1.375}.tracking-tight{letter-spacing:-.025em}.tracking-wider{letter-spacing:.05em}.text-blue-200{--tw-text-opacity:1;color:rgb(191 219 254/var(--tw-text-opacity,1))}.text-neutral-100{--tw-text-opacity:1;color:rgb(245 245 245/var(--tw-text-opacity,1))}.text-neutral-200{--tw-text-opacity:1;color:rgb(229 229 229/var(--tw-text-opacity,1))}.text-neutral-300{--tw-text-opacity:1;color:rgb(212 212 212/var(--tw-text-opacity,1))}.text-neutral-400{--tw-text-opacity:1;color:rgb(163 163 163/var(--tw-text-opacity,1))}.text-neutral-50{--tw-text-opacity:1;color:rgb(250 250 250/var(--tw-text-opacity,1))}.text-neutral-500{--tw-text-opacity:1;color:rgb(115 115 115/var(--tw-text-opacity,1))}.text-neutral-600{--tw-text-opacity:1;color:rgb(82 82 82/var(--tw-text-opacity,1))}.text-neutral-950{--tw-text-opacity:1;color:rgb(10 10 10/var(--tw-text-opacity,1))}.text-red-200{--tw-text-opacity:1;color:rgb(254 202 202/var(--tw-text-opacity,1))}.text-white{--tw-text-opacity:1;color:rgb(255 255 255/var(--tw-text-opacity,1))}.line-through{text-decoration-line:line-through}.antialiased{-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.accent-white{accent-color:#fff}.outline{outline-style:solid}.blur{--tw-blur:blur(8px)}.blur,.filter{filter:var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) var(--tw-invert) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow)}.backdrop-blur-md{--tw-backdrop-blur:blur(12px);-webkit-backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia)}.transition{transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,-webkit-backdrop-filter;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter,-webkit-backdrop-filter;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.transition-all{transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.transition-colors{transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.duration-300{transition-duration:.3s}.duration-75{transition-duration:75ms}select{-webkit-appearance:none;-moz-appearance:none;appearance:none;background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' fill='none' stroke='%23737373' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' viewBox='0 0 24 24'%3E%3Cpath d='m6 9 6 6 6-6'/%3E%3C/svg%3E");background-repeat:no-repeat;background-position:right 10px center;padding-right:32px}::-webkit-scrollbar{width:6px;height:6px}::-webkit-scrollbar-track{background:transparent}::-webkit-scrollbar-thumb{background:#404040;border-radius:3px}::-webkit-scrollbar-thumb:hover{background:#525252}.placeholder\:text-neutral-600::-moz-placeholder{--tw-text-opacity:1;color:rgb(82 82 82/var(--tw-text-opacity,1))}.placeholder\:text-neutral-600::placeholder{--tw-text-opacity:1;color:rgb(82 82 82/var(--tw-text-opacity,1))}.hover\:border-neutral-700:hover{--tw-border-opacity:1;border-color:rgb(64 64 64/var(--tw-border-opacity,1))}.hover\:bg-neutral-200:hover{--tw-bg-opacity:1;background-color:rgb(229 229 229/var(--tw-bg-opacity,1))}.hover\:bg-neutral-600:hover{--tw-bg-opacity:1;background-color:rgb(82 82 82/var(--tw-bg-opacity,1))}.hover\:bg-neutral-700:hover{--tw-bg-opacity:1;background-color:rgb(64 64 64/var(--tw-bg-opacity,1))}.hover\:bg-neutral-800:hover{--tw-bg-opacity:1;background-color:rgb(38 38 38/var(--tw-bg-opacity,1))}.hover\:bg-neutral-800\/60:hover{background-color:rgba(38,38,38,.6)}.hover\:bg-neutral-900:hover{--tw-bg-opacity:1;background-color:rgb(23 23 23/var(--tw-bg-opacity,1))}.hover\:bg-red-700:hover{--tw-bg-opacity:1;background-color:rgb(185 28 28/var(--tw-bg-opacity,1))}.hover\:text-neutral-200:hover{--tw-text-opacity:1;color:rgb(229 229 229/var(--tw-text-opacity,1))}.hover\:text-neutral-300:hover{--tw-text-opacity:1;color:rgb(212 212 212/var(--tw-text-opacity,1))}.focus\:border-neutral-600:focus{--tw-border-opacity:1;border-color:rgb(82 82 82/var(--tw-border-opacity,1))}.focus\:outline-none:focus{outline:2px solid transparent;outline-offset:2px}.focus\:ring-1:focus{--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(1px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 #0000)}.focus\:ring-neutral-600:focus{--tw-ring-opacity:1;--tw-ring-color:rgb(82 82 82/var(--tw-ring-opacity,1))}.disabled\:cursor-default:disabled{cursor:default}.disabled\:cursor-not-allowed:disabled{cursor:not-allowed}.disabled\:opacity-40:disabled{opacity:.4}.disabled\:opacity-50:disabled{opacity:.5}.disabled\:hover\:bg-neutral-800:hover:disabled{--tw-bg-opacity:1;background-color:rgb(38 38 38/var(--tw-bg-opacity,1))} \ No newline at end of file +/*! tailwindcss v3.4.17 | MIT License | https://tailwindcss.com*/*,:after,:before{box-sizing:border-box;border:0 solid #e5e7eb}:after,:before{--tw-content:""}:host,html{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;font-family:Inter,system-ui,-apple-system,sans-serif;font-feature-settings:normal;font-variation-settings:normal;-webkit-tap-highlight-color:transparent}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,pre,samp{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-feature-settings:normal;font-variation-settings:normal;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-feature-settings:inherit;font-variation-settings:inherit;font-size:100%;font-weight:inherit;line-height:inherit;letter-spacing:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,input:where([type=button]),input:where([type=reset]),input:where([type=submit]){-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dd,dl,figure,h1,h2,h3,h4,h5,h6,hr,p,pre{margin:0}fieldset{margin:0}fieldset,legend{padding:0}menu,ol,ul{list-style:none;margin:0;padding:0}dialog{padding:0}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}[role=button],button{cursor:pointer}:disabled{cursor:default}audio,canvas,embed,iframe,img,object,svg,video{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}[hidden]:where(:not([hidden=until-found])){display:none}.pointer-events-none{pointer-events:none}.visible{visibility:visible}.fixed{position:fixed}.absolute{position:absolute}.relative{position:relative}.inset-0{inset:0}.bottom-0{bottom:0}.top-0{top:0}.z-0{z-index:0}.z-20{z-index:20}.-mx-1{margin-left:-.25rem;margin-right:-.25rem}.mx-1{margin-left:.25rem;margin-right:.25rem}.mb-1{margin-bottom:.25rem}.mb-2{margin-bottom:.5rem}.ml-3{margin-left:.75rem}.mt-0\.5{margin-top:.125rem}.mt-1{margin-top:.25rem}.block{display:block}.flex{display:flex}.hidden{display:none}.h-12{height:3rem}.h-2{height:.5rem}.h-5{height:1.25rem}.h-full{height:100%}.h-screen{height:100vh}.min-h-0{min-height:0}.w-0\.5{width:.125rem}.w-10{width:2.5rem}.w-20{width:5rem}.w-24{width:6rem}.w-28{width:7rem}.w-56{width:14rem}.w-64{width:16rem}.w-full{width:100%}.w-px{width:1px}.min-w-0{min-width:0}.min-w-\[38px\]{min-width:38px}.min-w-\[46px\]{min-width:46px}.min-w-\[80px\]{min-width:80px}.max-w-lg{max-width:32rem}.flex-1{flex:1 1 0%}.shrink-0{flex-shrink:0}.transform{transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}@keyframes pulse{50%{opacity:.5}}.animate-pulse{animation:pulse 2s cubic-bezier(.4,0,.6,1) infinite}.cursor-pointer{cursor:pointer}.cursor-wait{cursor:wait}.select-none{-webkit-user-select:none;-moz-user-select:none;user-select:none}.flex-col{flex-direction:column}.items-start{align-items:flex-start}.items-center{align-items:center}.justify-center{justify-content:center}.justify-between{justify-content:space-between}.gap-2{gap:.5rem}.gap-3{gap:.75rem}.gap-4{gap:1rem}.space-y-0\.5>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.125rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.125rem*var(--tw-space-y-reverse))}.space-y-1\.5>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.375rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.375rem*var(--tw-space-y-reverse))}.space-y-2>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.5rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.5rem*var(--tw-space-y-reverse))}.space-y-3>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.75rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.75rem*var(--tw-space-y-reverse))}.space-y-5>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(1.25rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(1.25rem*var(--tw-space-y-reverse))}.overflow-hidden{overflow:hidden}.overflow-x-auto{overflow-x:auto}.overflow-y-auto{overflow-y:auto}.overflow-y-hidden{overflow-y:hidden}.truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.rounded{border-radius:.25rem}.rounded-full{border-radius:9999px}.rounded-lg{border-radius:.5rem}.rounded-md{border-radius:.375rem}.rounded-xl{border-radius:.75rem}.border{border-width:1px}.border-b{border-bottom-width:1px}.border-blue-500\/40{border-color:rgba(59,130,246,.4)}.border-neutral-700{--tw-border-opacity:1;border-color:rgb(64 64 64/var(--tw-border-opacity,1))}.border-neutral-800{--tw-border-opacity:1;border-color:rgb(38 38 38/var(--tw-border-opacity,1))}.border-neutral-800\/80{border-color:rgba(38,38,38,.8)}.border-red-500\/40{border-color:rgba(239,68,68,.4)}.bg-amber-500{--tw-bg-opacity:1;background-color:rgb(245 158 11/var(--tw-bg-opacity,1))}.bg-black{--tw-bg-opacity:1;background-color:rgb(0 0 0/var(--tw-bg-opacity,1))}.bg-blue-500\/10{background-color:rgba(59,130,246,.1)}.bg-emerald-500{--tw-bg-opacity:1;background-color:rgb(16 185 129/var(--tw-bg-opacity,1))}.bg-emerald-600{--tw-bg-opacity:1;background-color:rgb(5 150 105/var(--tw-bg-opacity,1))}.bg-neutral-700{--tw-bg-opacity:1;background-color:rgb(64 64 64/var(--tw-bg-opacity,1))}.bg-neutral-800{--tw-bg-opacity:1;background-color:rgb(38 38 38/var(--tw-bg-opacity,1))}.bg-neutral-900{--tw-bg-opacity:1;background-color:rgb(23 23 23/var(--tw-bg-opacity,1))}.bg-neutral-950{--tw-bg-opacity:1;background-color:rgb(10 10 10/var(--tw-bg-opacity,1))}.bg-neutral-950\/95{background-color:hsla(0,0%,4%,.95)}.bg-red-500{--tw-bg-opacity:1;background-color:rgb(239 68 68/var(--tw-bg-opacity,1))}.bg-red-500\/10{background-color:rgba(239,68,68,.1)}.bg-red-600{--tw-bg-opacity:1;background-color:rgb(220 38 38/var(--tw-bg-opacity,1))}.bg-white{--tw-bg-opacity:1;background-color:rgb(255 255 255/var(--tw-bg-opacity,1))}.object-contain{-o-object-fit:contain;object-fit:contain}.p-1{padding:.25rem}.p-4{padding:1rem}.p-6{padding:1.5rem}.px-1{padding-left:.25rem;padding-right:.25rem}.px-1\.5{padding-left:.375rem;padding-right:.375rem}.px-2{padding-left:.5rem;padding-right:.5rem}.px-2\.5{padding-left:.625rem;padding-right:.625rem}.px-3{padding-left:.75rem;padding-right:.75rem}.px-3\.5{padding-left:.875rem;padding-right:.875rem}.px-4{padding-left:1rem;padding-right:1rem}.py-0\.5{padding-top:.125rem;padding-bottom:.125rem}.py-1{padding-top:.25rem;padding-bottom:.25rem}.py-1\.5{padding-top:.375rem;padding-bottom:.375rem}.py-2{padding-top:.5rem;padding-bottom:.5rem}.py-2\.5{padding-top:.625rem;padding-bottom:.625rem}.py-3{padding-top:.75rem;padding-bottom:.75rem}.py-6{padding-top:1.5rem;padding-bottom:1.5rem}.text-left{text-align:left}.text-center{text-align:center}.text-right{text-align:right}.font-mono{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace}.font-sans{font-family:Inter,system-ui,-apple-system,sans-serif}.text-\[10px\]{font-size:10px}.text-\[11px\]{font-size:11px}.text-lg{font-size:1.125rem;line-height:1.75rem}.text-sm{font-size:.875rem;line-height:1.25rem}.text-xl{font-size:1.25rem;line-height:1.75rem}.text-xs{font-size:.75rem;line-height:1rem}.font-medium{font-weight:500}.font-semibold{font-weight:600}.uppercase{text-transform:uppercase}.italic{font-style:italic}.tabular-nums{--tw-numeric-spacing:tabular-nums;font-variant-numeric:var(--tw-ordinal) var(--tw-slashed-zero) var(--tw-numeric-figure) var(--tw-numeric-spacing) var(--tw-numeric-fraction)}.leading-relaxed{line-height:1.625}.leading-snug{line-height:1.375}.tracking-tight{letter-spacing:-.025em}.tracking-wider{letter-spacing:.05em}.text-black{--tw-text-opacity:1;color:rgb(0 0 0/var(--tw-text-opacity,1))}.text-blue-200{--tw-text-opacity:1;color:rgb(191 219 254/var(--tw-text-opacity,1))}.text-neutral-100{--tw-text-opacity:1;color:rgb(245 245 245/var(--tw-text-opacity,1))}.text-neutral-200{--tw-text-opacity:1;color:rgb(229 229 229/var(--tw-text-opacity,1))}.text-neutral-300{--tw-text-opacity:1;color:rgb(212 212 212/var(--tw-text-opacity,1))}.text-neutral-400{--tw-text-opacity:1;color:rgb(163 163 163/var(--tw-text-opacity,1))}.text-neutral-50{--tw-text-opacity:1;color:rgb(250 250 250/var(--tw-text-opacity,1))}.text-neutral-500{--tw-text-opacity:1;color:rgb(115 115 115/var(--tw-text-opacity,1))}.text-neutral-600{--tw-text-opacity:1;color:rgb(82 82 82/var(--tw-text-opacity,1))}.text-neutral-950{--tw-text-opacity:1;color:rgb(10 10 10/var(--tw-text-opacity,1))}.text-red-200{--tw-text-opacity:1;color:rgb(254 202 202/var(--tw-text-opacity,1))}.text-white{--tw-text-opacity:1;color:rgb(255 255 255/var(--tw-text-opacity,1))}.line-through{text-decoration-line:line-through}.antialiased{-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.accent-white{accent-color:#fff}.outline{outline-style:solid}.blur{--tw-blur:blur(8px)}.blur,.filter{filter:var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) var(--tw-invert) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow)}.backdrop-blur-md{--tw-backdrop-blur:blur(12px);-webkit-backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia)}.transition{transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,-webkit-backdrop-filter;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter,-webkit-backdrop-filter;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.transition-all{transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.transition-colors{transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.duration-300{transition-duration:.3s}.duration-75{transition-duration:75ms}select{-webkit-appearance:none;-moz-appearance:none;appearance:none;background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' fill='none' stroke='%23737373' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' viewBox='0 0 24 24'%3E%3Cpath d='m6 9 6 6 6-6'/%3E%3C/svg%3E");background-repeat:no-repeat;background-position:right 10px center;padding-right:32px}::-webkit-scrollbar{width:6px;height:6px}::-webkit-scrollbar-track{background:transparent}::-webkit-scrollbar-thumb{background:#404040;border-radius:3px}::-webkit-scrollbar-thumb:hover{background:#525252}.placeholder\:text-neutral-600::-moz-placeholder{--tw-text-opacity:1;color:rgb(82 82 82/var(--tw-text-opacity,1))}.placeholder\:text-neutral-600::placeholder{--tw-text-opacity:1;color:rgb(82 82 82/var(--tw-text-opacity,1))}.hover\:border-neutral-700:hover{--tw-border-opacity:1;border-color:rgb(64 64 64/var(--tw-border-opacity,1))}.hover\:bg-neutral-200:hover{--tw-bg-opacity:1;background-color:rgb(229 229 229/var(--tw-bg-opacity,1))}.hover\:bg-neutral-600:hover{--tw-bg-opacity:1;background-color:rgb(82 82 82/var(--tw-bg-opacity,1))}.hover\:bg-neutral-700:hover{--tw-bg-opacity:1;background-color:rgb(64 64 64/var(--tw-bg-opacity,1))}.hover\:bg-neutral-800:hover{--tw-bg-opacity:1;background-color:rgb(38 38 38/var(--tw-bg-opacity,1))}.hover\:bg-neutral-800\/60:hover{background-color:rgba(38,38,38,.6)}.hover\:bg-neutral-900:hover{--tw-bg-opacity:1;background-color:rgb(23 23 23/var(--tw-bg-opacity,1))}.hover\:bg-red-700:hover{--tw-bg-opacity:1;background-color:rgb(185 28 28/var(--tw-bg-opacity,1))}.hover\:text-neutral-200:hover{--tw-text-opacity:1;color:rgb(229 229 229/var(--tw-text-opacity,1))}.hover\:text-neutral-300:hover{--tw-text-opacity:1;color:rgb(212 212 212/var(--tw-text-opacity,1))}.focus\:border-neutral-600:focus{--tw-border-opacity:1;border-color:rgb(82 82 82/var(--tw-border-opacity,1))}.focus\:outline-none:focus{outline:2px solid transparent;outline-offset:2px}.focus\:ring-1:focus{--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(1px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 #0000)}.focus\:ring-neutral-600:focus{--tw-ring-opacity:1;--tw-ring-color:rgb(82 82 82/var(--tw-ring-opacity,1))}.disabled\:cursor-default:disabled{cursor:default}.disabled\:cursor-not-allowed:disabled{cursor:not-allowed}.disabled\:opacity-40:disabled{opacity:.4}.disabled\:opacity-50:disabled{opacity:.5}.disabled\:hover\:bg-neutral-800:hover:disabled{--tw-bg-opacity:1;background-color:rgb(38 38 38/var(--tw-bg-opacity,1))} \ No newline at end of file diff --git a/src/shared/domain/project.js b/src/shared/domain/project.js index ab63da1..eb0b052 100644 --- a/src/shared/domain/project.js +++ b/src/shared/domain/project.js @@ -1,6 +1,7 @@ const path = require('path'); const MIN_BACKGROUND_ZOOM = 1; +const MIN_REEL_BACKGROUND_ZOOM = 0.5; const MAX_BACKGROUND_ZOOM = 3; const MIN_BACKGROUND_PAN = -1; const MAX_BACKGROUND_PAN = 1; @@ -8,6 +9,13 @@ const MIN_CAMERA_SYNC_OFFSET_MS = -2000; const MAX_CAMERA_SYNC_OFFSET_MS = 2000; const EXPORT_AUDIO_PRESET_OFF = 'off'; const EXPORT_AUDIO_PRESET_COMPRESSED = 'compressed'; +const OUTPUT_MODE_LANDSCAPE = 'landscape'; +const OUTPUT_MODE_REEL = 'reel'; +const MIN_REEL_CROP_X = -1; +const MAX_REEL_CROP_X = 1; +const MIN_PIP_SCALE = 0.15; +const MAX_PIP_SCALE = 0.50; +const DEFAULT_PIP_SCALE = 0.22; function createProjectId() { return `project-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`; @@ -86,10 +94,11 @@ function normalizeSections(rawSections = []) { .sort((a, b) => a.start - b.start); } -function normalizeBackgroundZoom(value) { +function normalizeBackgroundZoom(value, outputMode) { + const minZoom = outputMode === OUTPUT_MODE_REEL ? MIN_REEL_BACKGROUND_ZOOM : MIN_BACKGROUND_ZOOM; const zoom = Number(value); - if (!Number.isFinite(zoom)) return MIN_BACKGROUND_ZOOM; - return Math.max(MIN_BACKGROUND_ZOOM, Math.min(MAX_BACKGROUND_ZOOM, zoom)); + if (!Number.isFinite(zoom)) return minZoom; + return Math.max(minZoom, Math.min(MAX_BACKGROUND_ZOOM, zoom)); } function normalizeBackgroundPan(value) { @@ -110,6 +119,8 @@ function normalizeKeyframes(rawKeyframes = []) { backgroundZoom: normalizeBackgroundZoom(keyframe.backgroundZoom), backgroundPanX: normalizeBackgroundPan(keyframe.backgroundPanX), backgroundPanY: normalizeBackgroundPan(keyframe.backgroundPanY), + reelCropX: normalizeReelCropX(keyframe.reelCropX), + pipScale: normalizePipScale(keyframe.pipScale), sectionId: typeof keyframe.sectionId === 'string' ? keyframe.sectionId : null, autoSection: !!keyframe.autoSection })) @@ -128,6 +139,23 @@ function normalizeCameraSyncOffsetMs(value) { return Math.max(MIN_CAMERA_SYNC_OFFSET_MS, Math.min(MAX_CAMERA_SYNC_OFFSET_MS, offset)); } +function normalizeReelCropX(value) { + const v = Number(value); + if (!Number.isFinite(v)) return 0; + return Math.max(MIN_REEL_CROP_X, Math.min(MAX_REEL_CROP_X, v)); +} + +function normalizeOutputMode(value) { + return value === OUTPUT_MODE_REEL ? OUTPUT_MODE_REEL : OUTPUT_MODE_LANDSCAPE; +} + +function normalizePipScale(value) { + if (value === null || value === undefined) return DEFAULT_PIP_SCALE; + const v = Number(value); + if (!Number.isFinite(v)) return DEFAULT_PIP_SCALE; + return Math.max(MIN_PIP_SCALE, Math.min(MAX_PIP_SCALE, v)); +} + function createDefaultProject(name = 'Untitled Project') { const now = new Date().toISOString(); return { @@ -139,7 +167,9 @@ function createDefaultProject(name = 'Untitled Project') { screenFitMode: 'fill', hideFromRecording: true, exportAudioPreset: EXPORT_AUDIO_PRESET_COMPRESSED, - cameraSyncOffsetMs: 0 + cameraSyncOffsetMs: 0, + outputMode: OUTPUT_MODE_LANDSCAPE, + pipScale: DEFAULT_PIP_SCALE }, takes: [], timeline: { @@ -174,7 +204,9 @@ function normalizeProjectData(rawProject, projectFolder) { screenFitMode: rawSettings.screenFitMode === 'fit' ? 'fit' : 'fill', hideFromRecording: rawSettings.hideFromRecording !== false, exportAudioPreset: normalizeExportAudioPreset(rawSettings.exportAudioPreset), - cameraSyncOffsetMs: normalizeCameraSyncOffsetMs(rawSettings.cameraSyncOffsetMs) + cameraSyncOffsetMs: normalizeCameraSyncOffsetMs(rawSettings.cameraSyncOffsetMs), + outputMode: normalizeOutputMode(rawSettings.outputMode), + pipScale: normalizePipScale(rawSettings.pipScale) }, takes: rawTakes.map((take, index) => ({ id: typeof take?.id === 'string' && take.id ? take.id : `take-${index + 1}-${Date.now()}`, @@ -216,10 +248,21 @@ module.exports = { normalizeKeyframes, normalizeExportAudioPreset, normalizeCameraSyncOffsetMs, + normalizeReelCropX, + normalizeOutputMode, + normalizePipScale, createDefaultProject, normalizeProjectData, MIN_CAMERA_SYNC_OFFSET_MS, MAX_CAMERA_SYNC_OFFSET_MS, EXPORT_AUDIO_PRESET_OFF, - EXPORT_AUDIO_PRESET_COMPRESSED + EXPORT_AUDIO_PRESET_COMPRESSED, + OUTPUT_MODE_LANDSCAPE, + OUTPUT_MODE_REEL, + MIN_REEL_CROP_X, + MAX_REEL_CROP_X, + MIN_PIP_SCALE, + MAX_PIP_SCALE, + DEFAULT_PIP_SCALE, + MIN_REEL_BACKGROUND_ZOOM }; diff --git a/tests/unit/project-domain.test.js b/tests/unit/project-domain.test.js index c474cc1..f629ea9 100644 --- a/tests/unit/project-domain.test.js +++ b/tests/unit/project-domain.test.js @@ -6,7 +6,11 @@ const { toProjectRelativePath, normalizeSections, normalizeKeyframes, + normalizeBackgroundZoom, normalizeCameraSyncOffsetMs, + normalizeReelCropX, + normalizeOutputMode, + normalizePipScale, createDefaultProject, normalizeProjectData } = require('../../src/shared/domain/project'); @@ -147,4 +151,114 @@ describe('shared/domain/project', () => { expect(fallbackProject.settings.cameraSyncOffsetMs).toBe(0); expect(createDefaultProject('Demo').settings.cameraSyncOffsetMs).toBe(0); }); + + test('normalizeReelCropX clamps to [-1, 1] and defaults invalid input to 0', () => { + expect(normalizeReelCropX(0.5)).toBe(0.5); + expect(normalizeReelCropX(-0.75)).toBe(-0.75); + expect(normalizeReelCropX(-2.5)).toBe(-1); + expect(normalizeReelCropX(3.0)).toBe(1); + expect(normalizeReelCropX(undefined)).toBe(0); + expect(normalizeReelCropX(null)).toBe(0); + expect(normalizeReelCropX(NaN)).toBe(0); + expect(normalizeReelCropX('nope')).toBe(0); + }); + + test('normalizeOutputMode returns reel for reel and landscape for anything else', () => { + expect(normalizeOutputMode('reel')).toBe('reel'); + expect(normalizeOutputMode('landscape')).toBe('landscape'); + expect(normalizeOutputMode(undefined)).toBe('landscape'); + expect(normalizeOutputMode(null)).toBe('landscape'); + expect(normalizeOutputMode('')).toBe('landscape'); + expect(normalizeOutputMode('portrait')).toBe('landscape'); + }); + + test('normalizeBackgroundZoom clamps to [1, 3] by default (backward compat)', () => { + expect(normalizeBackgroundZoom(2)).toBe(2); + expect(normalizeBackgroundZoom(1)).toBe(1); + expect(normalizeBackgroundZoom(3)).toBe(3); + expect(normalizeBackgroundZoom(0.5)).toBe(1); + expect(normalizeBackgroundZoom(5)).toBe(3); + expect(normalizeBackgroundZoom(null)).toBe(1); + expect(normalizeBackgroundZoom(undefined)).toBe(1); + expect(normalizeBackgroundZoom(NaN)).toBe(1); + }); + + test('normalizeBackgroundZoom with reel mode clamps to [0.5, 3]', () => { + expect(normalizeBackgroundZoom(0.5, 'reel')).toBe(0.5); + expect(normalizeBackgroundZoom(0.7, 'reel')).toBe(0.7); + expect(normalizeBackgroundZoom(0.3, 'reel')).toBe(0.5); + expect(normalizeBackgroundZoom(2, 'reel')).toBe(2); + expect(normalizeBackgroundZoom(5, 'reel')).toBe(3); + expect(normalizeBackgroundZoom(null, 'reel')).toBe(0.5); + expect(normalizeBackgroundZoom(NaN, 'reel')).toBe(0.5); + }); + + test('normalizeBackgroundZoom with landscape mode keeps [1, 3]', () => { + expect(normalizeBackgroundZoom(0.5, 'landscape')).toBe(1); + expect(normalizeBackgroundZoom(1, 'landscape')).toBe(1); + }); + + test('normalizePipScale clamps to [0.15, 0.50] and defaults invalid input to 0.22', () => { + expect(normalizePipScale(0.35)).toBe(0.35); + expect(normalizePipScale(0.15)).toBe(0.15); + expect(normalizePipScale(0.50)).toBe(0.50); + expect(normalizePipScale(0.05)).toBe(0.15); + expect(normalizePipScale(0.8)).toBe(0.50); + expect(normalizePipScale(undefined)).toBe(0.22); + expect(normalizePipScale(null)).toBe(0.22); + expect(normalizePipScale(NaN)).toBe(0.22); + }); + + test('normalizeKeyframes includes reelCropX property', () => { + const keyframes = normalizeKeyframes([ + { time: 0, pipX: 10, pipY: 20, reelCropX: 0.5 }, + { time: 1, pipX: 30, pipY: 40, reelCropX: -2 }, + { time: 2, pipX: 50, pipY: 60 } + ]); + expect(keyframes[0].reelCropX).toBe(0.5); + expect(keyframes[1].reelCropX).toBe(-1); + expect(keyframes[2].reelCropX).toBe(0); + }); + + test('normalizeKeyframes includes pipScale property', () => { + const keyframes = normalizeKeyframes([ + { time: 0, pipX: 10, pipY: 20, pipScale: 0.35 }, + { time: 1, pipX: 30, pipY: 40, pipScale: 0.05 }, + { time: 2, pipX: 50, pipY: 60, pipScale: 0.8 }, + { time: 3, pipX: 70, pipY: 80 } + ]); + expect(keyframes[0].pipScale).toBe(0.35); + expect(keyframes[1].pipScale).toBe(0.15); + expect(keyframes[2].pipScale).toBe(0.50); + expect(keyframes[3].pipScale).toBe(0.22); + }); + + test('createDefaultProject includes outputMode and pipScale in settings', () => { + const project = createDefaultProject('Test'); + expect(project.settings.outputMode).toBe('landscape'); + expect(project.settings.pipScale).toBe(0.22); + }); + + test('normalizeProjectData hydrates outputMode and pipScale in settings', () => { + const reelProject = normalizeProjectData( + { settings: { outputMode: 'reel', pipScale: 0.35 } }, + '/tmp/my-project' + ); + expect(reelProject.settings.outputMode).toBe('reel'); + expect(reelProject.settings.pipScale).toBe(0.35); + + const defaultProject = normalizeProjectData( + { settings: {} }, + '/tmp/my-project' + ); + expect(defaultProject.settings.outputMode).toBe('landscape'); + expect(defaultProject.settings.pipScale).toBe(0.22); + + const invalidProject = normalizeProjectData( + { settings: { outputMode: 'weird', pipScale: 'bad' } }, + '/tmp/my-project' + ); + expect(invalidProject.settings.outputMode).toBe('landscape'); + expect(invalidProject.settings.pipScale).toBe(0.22); + }); }); diff --git a/tests/unit/render-filter-service.test.js b/tests/unit/render-filter-service.test.js index 7cc6d84..6bd06a6 100644 --- a/tests/unit/render-filter-service.test.js +++ b/tests/unit/render-filter-service.test.js @@ -4,6 +4,8 @@ const { buildAlphaExpr, buildCamFullAlphaExpr, buildFilterComplex, + buildScreenFilter, + resolveOutputSize, panToFocusCoord } = require('../../src/main/services/render-filter-service'); @@ -123,4 +125,232 @@ describe('main/services/render-filter-service', () => { ); expect(filter).toContain('[0:v]setpts=PTS-STARTPTS[screen]'); }); + + test('resolveOutputSize returns 9:16 dimensions for reel mode', () => { + const reel1080 = resolveOutputSize(1920, 1080, 'reel'); + expect(reel1080.outW).toBe(608); + expect(reel1080.outH).toBe(1080); + + const reel1440 = resolveOutputSize(2560, 1440, 'reel'); + expect(reel1440.outW).toBe(810); + expect(reel1440.outH).toBe(1440); + }); + + test('resolveOutputSize returns landscape dimensions by default', () => { + const landscape = resolveOutputSize(1920, 1080); + expect(landscape.outW).toBe(1920); + expect(landscape.outH).toBe(1080); + + const landscapeExplicit = resolveOutputSize(1920, 1080, 'landscape'); + expect(landscapeExplicit.outW).toBe(1920); + expect(landscapeExplicit.outH).toBe(1080); + }); + + test('buildScreenFilter with reel mode includes crop filter for static reelCropX', () => { + const filter = buildScreenFilter( + [{ time: 0, backgroundZoom: 1, backgroundPanX: 0, backgroundPanY: 0, reelCropX: 0 }], + 'fill', + 1920, + 1080, + 1920, + 1080, + '[screen]', + true, + 30, + 'reel' + ); + expect(filter).toContain('crop=608:1080:'); + expect(filter).toContain('[screen]'); + }); + + test('buildScreenFilter with reel mode and animated reelCropX includes interpolation', () => { + const filter = buildScreenFilter( + [ + { time: 0, backgroundZoom: 1, backgroundPanX: 0, backgroundPanY: 0, reelCropX: -0.5 }, + { time: 2, backgroundZoom: 1, backgroundPanX: 0, backgroundPanY: 0, reelCropX: 0.5 } + ], + 'fill', + 1920, + 1080, + 1920, + 1080, + '[screen]', + true, + 30, + 'reel' + ); + expect(filter).toContain('crop=608:1080:'); + expect(filter).toContain('if(gte(t,2.000)'); + }); + + test('buildScreenFilter with reel mode and static zoom < 1 produces split/overlay filter', () => { + const filter = buildScreenFilter( + [{ time: 0, backgroundZoom: 0.7, backgroundPanX: 0, backgroundPanY: 0, reelCropX: 0 }], + 'fill', + 1920, + 1080, + 1920, + 1080, + '[screen]', + false, + 30, + 'reel' + ); + expect(filter).toContain('split[for_zoom][for_bg]'); + expect(filter).toContain('colorlevels=romax=0.2:gomax=0.2:bomax=0.2'); + expect(filter).toContain('[dark_bg]'); + expect(filter).toContain('crop=608:1080:'); + expect(filter).toContain('[screen]'); + // Should scale content uniformly to 70% of both dimensions + let scaledW = Math.round(1920 * 0.7); + if (scaledW % 2 !== 0) scaledW -= 1; + let scaledH = Math.round(1080 * 0.7); + if (scaledH % 2 !== 0) scaledH -= 1; + const offsetX = Math.round((1920 - scaledW) / 2); + const offsetY = Math.round((1080 - scaledH) / 2); + expect(filter).toContain(`scale=${scaledW}:${scaledH}`); + expect(filter).toContain(`overlay=${offsetX}:${offsetY}`); + }); + + test('buildScreenFilter with reel mode and animated zoom crossing 1.0 produces zoompan + scale pipeline', () => { + const filter = buildScreenFilter( + [ + { time: 0, backgroundZoom: 0.7, backgroundPanX: 0, backgroundPanY: 0, reelCropX: 0 }, + { time: 2, backgroundZoom: 1.5, backgroundPanX: 0.5, backgroundPanY: 0, reelCropX: 0 } + ], + 'fill', + 1920, + 1080, + 1920, + 1080, + '[screen]', + false, + 30, + 'reel' + ); + expect(filter).toContain('split[for_zoom][for_bg]'); + expect(filter).toContain('colorlevels=romax=0.2:gomax=0.2:bomax=0.2'); + expect(filter).toContain("zoompan=z='max(1.000,"); + expect(filter).toContain('[zoomed]'); + expect(filter).toContain(`scale=w='max(2,2*floor(1920*min(1.0,`); + expect(filter).toContain(`h='max(2,2*floor(1080*min(1.0,`); + expect(filter).toContain(':eval=frame'); + expect(filter).toContain('[scaled]'); + expect(filter).toContain("overlay=x='(main_w-overlay_w)/2':y='(main_h-overlay_h)/2':eval=frame"); + expect(filter).toContain('crop=608:1080:'); + expect(filter).toContain('[screen]'); + }); + + test('buildScreenFilter with reel mode and zoom >= 1 remains unchanged', () => { + const filter = buildScreenFilter( + [ + { time: 0, backgroundZoom: 1.5, backgroundPanX: 0, backgroundPanY: 0, reelCropX: 0 }, + { time: 2, backgroundZoom: 2.0, backgroundPanX: 0.5, backgroundPanY: 0, reelCropX: 0 } + ], + 'fill', + 1920, + 1080, + 1920, + 1080, + '[screen]', + false, + 30, + 'reel' + ); + // Should NOT use the split/overlay pipeline + expect(filter).not.toContain('split[for_zoom][for_bg]'); + expect(filter).not.toContain('colorlevels'); + // Should use standard zoompan + expect(filter).toContain("zoompan=z='"); + expect(filter).toContain('crop=608:1080:'); + }); + + test('resolveOutputSize behavior unchanged for zoom-out feature', () => { + // Landscape unchanged + expect(resolveOutputSize(1920, 1080).outW).toBe(1920); + expect(resolveOutputSize(1920, 1080).outH).toBe(1080); + // Reel unchanged + expect(resolveOutputSize(1920, 1080, 'reel').outW).toBe(608); + expect(resolveOutputSize(1920, 1080, 'reel').outH).toBe(1080); + }); + + test('buildFilterComplex with reel mode uses reel output dimensions', () => { + const filter = buildFilterComplex( + [ + { time: 0, pipX: 100, pipY: 100, pipVisible: true, cameraFullscreen: false, backgroundZoom: 1, backgroundPanX: 0, backgroundPanY: 0, reelCropX: 0, pipScale: 0.33 } + ], + 200, + 'fill', + 1920, + 1080, + 608, + 1080, + true, + 30, + 'reel' + ); + // pipScale 0.33 * outW 608 = 200.64 → 201 (static) + expect(filter).toContain('crop=608:1080:'); + expect(filter).toContain('scale=201:201'); + expect(filter).toContain('overlay'); + }); + + test('buildFilterComplex with static pipScale uses fixed PIP size', () => { + const filter = buildFilterComplex( + [ + { time: 0, pipX: 100, pipY: 100, pipVisible: true, cameraFullscreen: false, backgroundZoom: 1, pipScale: 0.22 }, + { time: 2, pipX: 120, pipY: 120, pipVisible: true, cameraFullscreen: false, backgroundZoom: 1, pipScale: 0.22 } + ], + 422, + 'fill', + 1920, + 1080, + 1920, + 1080 + ); + // Static pipScale 0.22 * outW 1920 = 422 + expect(filter).toContain('scale=422:422'); + // Static case: only one scale step (no animated second scale) + expect(filter).not.toContain('scale=w='); + }); + + test('buildFilterComplex with varying pipScale uses two-stage scale (fixed + animated)', () => { + const filter = buildFilterComplex( + [ + { time: 0, pipX: 100, pipY: 100, pipVisible: true, cameraFullscreen: false, backgroundZoom: 1, pipScale: 0.22 }, + { time: 2, pipX: 120, pipY: 120, pipVisible: true, cameraFullscreen: false, backgroundZoom: 1, pipScale: 0.40 } + ], + 422, + 'fill', + 1920, + 1080, + 1920, + 1080 + ); + // First scale: fixed max pip size (0.40 * 1920 = 768) + expect(filter).toContain('scale=768:768'); + // Second scale: animated with eval=frame + expect(filter).toContain('eval=frame'); + expect(filter).toContain('0.220'); + expect(filter).toContain('0.400'); + // Overlay uses eval=frame for variable-size PIP + expect(filter).toContain('overlay=x='); + expect(filter).toContain(':eval=frame[out]'); + }); + + test('buildFilterComplex defaults pipScale to 0.22 when not in keyframes', () => { + const filter = buildFilterComplex( + [ + { time: 0, pipX: 100, pipY: 100, pipVisible: true, cameraFullscreen: false, backgroundZoom: 1 } + ], + 422, + 'fill', + 1920, + 1080, + 1920, + 1080 + ); + // Default pipScale 0.22 * outW 1920 = 422 + expect(filter).toContain('scale=422:422'); + }); }); diff --git a/tests/unit/render-service.test.js b/tests/unit/render-service.test.js index c9a375f..3baa3f3 100644 --- a/tests/unit/render-service.test.js +++ b/tests/unit/render-service.test.js @@ -27,6 +27,19 @@ describe('main/services/render-service', () => { expect(sections[1].backgroundPanY).toBe(1); }); + test('normalizeSectionInput normalizes reelCropX on sections', () => { + const sections = normalizeSectionInput([ + { takeId: 'a', sourceStart: 0, sourceEnd: 1, reelCropX: 0.5 }, + { takeId: 'b', sourceStart: 1, sourceEnd: 2, reelCropX: -3 }, + { takeId: 'c', sourceStart: 2, sourceEnd: 3 } + ]); + + expect(sections).toHaveLength(3); + expect(sections[0].reelCropX).toBe(0.5); + expect(sections[1].reelCropX).toBe(-1); + expect(sections[2].reelCropX).toBe(0); + }); + test('assertFilePath throws for missing files and accepts existing file', () => { const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'video-render-test-')); const file = path.join(tmpDir, 'input.webm'); @@ -436,6 +449,39 @@ describe('main/services/render-service', () => { expect(argString).toContain('[1:v]trim=start=2.000:end=3.000,setpts=PTS-STARTPTS,fps=fps=30[cv2]'); }); + test('renderComposite produces reel crop filter when outputMode is reel', async () => { + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'video-render-reel-')); + const outputDir = path.join(tmpDir, 'out'); + const screenPath = path.join(tmpDir, 'screen.webm'); + fs.writeFileSync(screenPath, 'screen', 'utf8'); + + const execCalls = []; + await renderComposite( + { + outputFolder: outputDir, + takes: [{ id: 'take-1', screenPath, cameraPath: null }], + sections: [{ takeId: 'take-1', sourceStart: 0, sourceEnd: 1.25, reelCropX: 0 }], + keyframes: [{ time: 0, pipX: 10, pipY: 10, pipVisible: false, cameraFullscreen: false, reelCropX: 0 }], + pipSize: 200, + sourceWidth: 1920, + sourceHeight: 1080, + screenFitMode: 'fill', + outputMode: 'reel' + }, + { + ffmpegPath: '/usr/bin/ffmpeg', + now: () => 555, + probeVideoFpsWithFfmpeg: async () => 30, + runFfmpeg: async ({ ffmpegPath, args }) => { + execCalls.push({ bin: ffmpegPath, args }); + } + } + ); + + const argString = execCalls[0].args.join(' '); + expect(argString).toContain('crop=608:1080:'); + }); + test('renderComposite forwards mapped progress updates from ffmpeg output time', async () => { const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'video-render-progress-')); const outputDir = path.join(tmpDir, 'out'); From bfb2721531f835d517ad61c6b760c525056e7143 Mon Sep 17 00:00:00 2001 From: amitay keisar Date: Thu, 19 Mar 2026 18:22:15 +0200 Subject: [PATCH 02/15] chore: add openspec change artifacts and update gitignore - Add openspec specs (reel-mode, pip-overlay, reel-zoom-out) and archived change artifacts documenting the feature design process - Add .DS_Store and .agents/ to gitignore Co-Authored-By: Claude Opus 4.6 --- .gitignore | 2 + .../.openspec.yaml | 2 + .../design.md | 33 +++ .../proposal.md | 28 +++ .../specs/per-section-pip-scale/spec.md | 57 +++++ .../2026-03-19-per-section-pip-scale/tasks.md | 50 ++++ .../2026-03-19-reel-mode-9x16/.openspec.yaml | 2 + .../2026-03-19-reel-mode-9x16/design.md | 231 ++++++++++++++++++ .../2026-03-19-reel-mode-9x16/proposal.md | 31 +++ .../specs/output-aspect-ratio/spec.md | 77 ++++++ .../specs/pip-size-control/spec.md | 145 +++++++++++ .../specs/reel-crop/spec.md | 152 ++++++++++++ .../2026-03-19-reel-mode-9x16/tasks.md | 91 +++++++ .../.openspec.yaml | 2 + .../design.md | 65 +++++ .../proposal.md | 25 ++ .../specs/reel-zoom-out/spec.md | 75 ++++++ .../2026-03-19-reel-zoom-out-blur-bg/tasks.md | 45 ++++ openspec/specs/pip-overlay/spec.md | 179 ++++++++++++++ openspec/specs/reel-mode/spec.md | 224 +++++++++++++++++ openspec/specs/reel-zoom-out/spec.md | 75 ++++++ 21 files changed, 1591 insertions(+) create mode 100644 openspec/changes/archive/2026-03-19-per-section-pip-scale/.openspec.yaml create mode 100644 openspec/changes/archive/2026-03-19-per-section-pip-scale/design.md create mode 100644 openspec/changes/archive/2026-03-19-per-section-pip-scale/proposal.md create mode 100644 openspec/changes/archive/2026-03-19-per-section-pip-scale/specs/per-section-pip-scale/spec.md create mode 100644 openspec/changes/archive/2026-03-19-per-section-pip-scale/tasks.md create mode 100644 openspec/changes/archive/2026-03-19-reel-mode-9x16/.openspec.yaml create mode 100644 openspec/changes/archive/2026-03-19-reel-mode-9x16/design.md create mode 100644 openspec/changes/archive/2026-03-19-reel-mode-9x16/proposal.md create mode 100644 openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/output-aspect-ratio/spec.md create mode 100644 openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/pip-size-control/spec.md create mode 100644 openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/reel-crop/spec.md create mode 100644 openspec/changes/archive/2026-03-19-reel-mode-9x16/tasks.md create mode 100644 openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/.openspec.yaml create mode 100644 openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/design.md create mode 100644 openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/proposal.md create mode 100644 openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/specs/reel-zoom-out/spec.md create mode 100644 openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/tasks.md create mode 100644 openspec/specs/pip-overlay/spec.md create mode 100644 openspec/specs/reel-mode/spec.md create mode 100644 openspec/specs/reel-zoom-out/spec.md diff --git a/.gitignore b/.gitignore index 42033b3..99a5b73 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,8 @@ dist-smoke .env .cursor/hooks/state/ +.DS_Store +.agents/ .claude/worktrees/* pnpm-lock.yaml package-lock.json diff --git a/openspec/changes/archive/2026-03-19-per-section-pip-scale/.openspec.yaml b/openspec/changes/archive/2026-03-19-per-section-pip-scale/.openspec.yaml new file mode 100644 index 0000000..4e61834 --- /dev/null +++ b/openspec/changes/archive/2026-03-19-per-section-pip-scale/.openspec.yaml @@ -0,0 +1,2 @@ +schema: spec-driven +created: 2026-03-19 diff --git a/openspec/changes/archive/2026-03-19-per-section-pip-scale/design.md b/openspec/changes/archive/2026-03-19-per-section-pip-scale/design.md new file mode 100644 index 0000000..a99abbb --- /dev/null +++ b/openspec/changes/archive/2026-03-19-per-section-pip-scale/design.md @@ -0,0 +1,33 @@ +## Design Decisions + +### Decision 1: pipScale as a keyframe property + +**Choice**: Add `pipScale` to keyframe objects alongside `backgroundZoom`, `reelCropX`, etc. + +**Rationale**: Follows the exact same pattern as other per-section properties. Each section anchor keyframe stores a `pipScale` value. The `getStateAtTime()` function interpolates between values during the 0.3s transition window. This is consistent with how zoom, pan, and crop already work. + +**Data flow**: `keyframe.pipScale` → `getStateAtTime()` interpolation → `editorState.pipSize = computePipSize(pipScale, effectiveW)` → draw loop uses size → render receives per-keyframe values + +### Decision 2: Keep pipScale in project settings as the default + +**Choice**: Keep `settings.pipScale` as the default/initial value for new keyframes. Remove it from being the authoritative runtime value. + +**Rationale**: When creating a new project or adding new sections, the default pipScale comes from settings. Existing projects without per-keyframe pipScale values fall back to the project-level default. This preserves backward compatibility — old projects work unchanged. + +### Decision 3: PIP Size slider controls current section anchor + +**Choice**: The PIP Size slider reads/writes the current section's anchor keyframe `pipScale`, matching how the zoom slider works. + +**Rationale**: Consistent UX — every per-section property uses the same control pattern. The slider updates the anchor keyframe, pushes undo, and schedules a project save. + +### Decision 4: Animated PIP size in FFmpeg render + +**Choice**: Build animated PIP size expressions in `buildFilterComplex()` using the same `buildNumericExpr()` approach as zoom. + +**Rationale**: The PIP size needs to smoothly animate between sections in the rendered output. Using the existing expression builder keeps the FFmpeg pipeline consistent. The PIP scale filter expression, corner radius, alpha mask, and overlay position all need to use the animated size. + +### Decision 5: Re-snap PIP position on pipScale change + +**Choice**: When pipScale changes for a section, automatically re-snap the PIP position to the nearest corner using the new size. + +**Rationale**: Changing PIP size without adjusting position would cause the PIP to overlap edges. The existing `snapToNearestCorner()` function handles this. This matches the current behavior when the global PIP size slider is adjusted. diff --git a/openspec/changes/archive/2026-03-19-per-section-pip-scale/proposal.md b/openspec/changes/archive/2026-03-19-per-section-pip-scale/proposal.md new file mode 100644 index 0000000..5d802ef --- /dev/null +++ b/openspec/changes/archive/2026-03-19-per-section-pip-scale/proposal.md @@ -0,0 +1,28 @@ +## Why + +PIP (camera overlay) size is currently a global project setting — every section uses the same `pipScale`. Users need the ability to vary PIP size per section, exactly like they already control zoom, pan, and crop position per section. This enables emphasizing the speaker in some sections and shrinking the overlay in others. + +## What Changes + +- Move `pipScale` from a global project setting to a per-keyframe property on section anchor keyframes +- Add `pipScale` to keyframe interpolation so transitions between sections animate the PIP size smoothly (0.3s linear, same as other properties) +- The existing PIP Size slider in the editor now controls the current section's `pipScale` (same pattern as the zoom slider) +- "Apply to Future Sections" copies `pipScale` along with other section properties +- The FFmpeg render pipeline receives per-section `pipScale` and builds animated PIP size expressions +- PIP position auto-snaps to nearest corner when `pipScale` changes (existing behavior preserved) + +## Capabilities + +### New Capabilities +- `per-section-pip-scale`: Per-section PIP sizing with interpolated transitions and render pipeline support + +### Modified Capabilities + +## Impact + +- `src/shared/domain/project.js` — Add `pipScale` to keyframe normalization, remove from project settings (or keep as default) +- `src/renderer/app.js` — Move pipScale to keyframe anchors, update slider to control per-section value, interpolate in `getStateAtTime()`, re-snap PIP on size change +- `src/main/services/render-filter-service.js` — Animate PIP size in `buildFilterComplex()` using expressions +- `src/main/services/render-service.js` — Pass per-section pipScale through the pipeline +- `tests/unit/project-domain.test.js` — Update keyframe normalization tests +- `tests/unit/render-filter-service.test.js` — Add animated PIP size tests diff --git a/openspec/changes/archive/2026-03-19-per-section-pip-scale/specs/per-section-pip-scale/spec.md b/openspec/changes/archive/2026-03-19-per-section-pip-scale/specs/per-section-pip-scale/spec.md new file mode 100644 index 0000000..c4faea1 --- /dev/null +++ b/openspec/changes/archive/2026-03-19-per-section-pip-scale/specs/per-section-pip-scale/spec.md @@ -0,0 +1,57 @@ +## ADDED Requirements + +### Requirement: Per-section PIP scale + +The system SHALL store `pipScale` as a per-keyframe property on section anchor keyframes, with values clamped to [0.15, 0.50] and a default of 0.22. + +#### Scenario: PIP size varies between sections + +- **WHEN** section A has `pipScale` 0.22 and section B has `pipScale` 0.40 +- **THEN** the PIP overlay in section A SHALL be 22% of the effective canvas width, and in section B SHALL be 40% + +#### Scenario: Smooth PIP size transition + +- **WHEN** transitioning from a section with `pipScale` 0.22 to one with `pipScale` 0.40 +- **THEN** the PIP size SHALL animate linearly over the 0.3s transition window + +#### Scenario: PIP Size slider controls current section + +- **WHEN** the user adjusts the PIP Size slider while a section is selected +- **THEN** only that section's anchor keyframe `pipScale` SHALL be updated + +#### Scenario: Apply to Future Sections includes pipScale + +- **WHEN** the user applies style to future sections +- **THEN** `pipScale` SHALL be copied alongside zoom, pan, cropX, and other section properties + +### Requirement: PIP position re-snap on scale change + +When `pipScale` changes for a section, the PIP position SHALL be re-snapped to the nearest corner using the new size, maintaining proper margins from the edges. + +#### Scenario: Resize re-snaps position + +- **WHEN** a section's `pipScale` changes from 0.22 to 0.40 +- **THEN** the PIP's `pipX` and `pipY` SHALL be recalculated to snap to the nearest corner with the new size + +### Requirement: FFmpeg render with animated PIP size + +The FFmpeg render pipeline SHALL support per-keyframe `pipScale` values, producing animated PIP size transitions in the output video. + +#### Scenario: Rendered output matches editor preview + +- **WHEN** rendering a video with sections having different `pipScale` values +- **THEN** the PIP size in the output video SHALL match the editor preview at each point in time + +#### Scenario: Static PIP size (all sections same) + +- **WHEN** all sections have the same `pipScale` +- **THEN** the render pipeline SHALL use a fixed PIP size (no expression overhead) + +### Requirement: Backward compatibility + +Existing projects without per-keyframe `pipScale` SHALL use the project-level `settings.pipScale` as the default value for all keyframes. Behavior is identical to before this change. + +#### Scenario: Legacy project loaded + +- **WHEN** a project saved before this change is loaded (keyframes have no `pipScale`) +- **THEN** all sections SHALL use `settings.pipScale` (or 0.22 if absent) as their `pipScale` diff --git a/openspec/changes/archive/2026-03-19-per-section-pip-scale/tasks.md b/openspec/changes/archive/2026-03-19-per-section-pip-scale/tasks.md new file mode 100644 index 0000000..9b03407 --- /dev/null +++ b/openspec/changes/archive/2026-03-19-per-section-pip-scale/tasks.md @@ -0,0 +1,50 @@ +## 1. Domain Model (src/shared/domain/project.js) + +- [x] 1.1 Add `pipScale` to `normalizeKeyframes()`: normalize using `normalizePipScale()`, default to `DEFAULT_PIP_SCALE` (0.22) +- [x] 1.2 Add unit tests for keyframe `pipScale` normalization (valid, clamped, default fallback) + +## 2. Editor Logic — Per-Section pipScale (src/renderer/app.js) + +- [x] 2.1 Add `pipScale` to the default keyframe fallback in `getSectionAnchorKeyframe()` +- [x] 2.2 Add `pipScale` to `syncSectionAnchorKeyframes()` so it's synced to each section's anchor +- [x] 2.3 Add `pipScale` to `applyStyleToFutureSections()` so "Apply to Future" copies it +- [x] 2.4 Add `pipScale` to `buildSplitAnchorKeyframe()` in `keyframe-ops.js` +- [x] 2.5 Add `pipScale` to `getStateAtTime()`: include in default, interpolate during transitions, include in return object +- [x] 2.6 Compute `pipSize` from interpolated `pipScale` in the draw loop (replacing the global `editorState.pipSize`) + +## 3. Editor UI — PIP Size Slider (src/renderer/app.js) + +- [x] 3.1 Change PIP Size slider input handler to read/write the current section's anchor `pipScale` (instead of `editorState.pipScale`) +- [x] 3.2 Update slider display on section change: read `pipScale` from current section anchor and update slider value/label +- [x] 3.3 Re-snap PIP position when pipScale changes on a section (snap to nearest corner with new size) + +## 4. Editor — Keyframe & Render Data (src/renderer/app.js) + +- [x] 4.1 Add `pipScale` to `getRenderKeyframes()` minimal keyframe output +- [x] 4.2 Add `pipScale` to `getRenderSections()` section output +- [x] 4.3 Add `pipScale` to `getProjectTimelineSnapshot()` keyframe serialization + +## 5. FFmpeg Render Pipeline (src/main/services/render-filter-service.js) + +- [x] 5.1 Modify `buildFilterComplex()` to accept per-keyframe `pipScale` and compute animated PIP size expressions +- [x] 5.2 Build animated scale, corner radius, position, and alpha expressions using `pipScale` from keyframes +- [x] 5.3 Handle static case (all keyframes same pipScale): use fixed PIP size (no expression overhead) +- [x] 5.4 Add unit tests for animated PIP size in buildFilterComplex + +## 6. Render Service (src/main/services/render-service.js) + +- [x] 6.1 Pass per-keyframe `pipScale` through to `buildFilterComplex()` (already in keyframes array) +- [x] 6.2 Remove or deprecate the global `pipSize` parameter (use keyframe values instead) + +## 7. Backward Compatibility + +- [x] 7.1 When loading keyframes without `pipScale`, default to `settings.pipScale` (or 0.22) +- [x] 7.2 Ensure `enterEditor()` initialization seeds keyframe `pipScale` from project settings for legacy data + +## 8. Verification + +- [x] 8.1 Run `npm run check` — all tests pass, lint clean, typecheck clean +- [ ] 8.2 Manual test: adjust PIP size on one section, verify other sections unaffected +- [ ] 8.3 Manual test: transition between sections with different PIP sizes — smooth animation +- [ ] 8.4 Manual test: render video with varying PIP sizes — output matches editor +- [ ] 8.5 Manual test: load old project — PIP size defaults correctly across all sections diff --git a/openspec/changes/archive/2026-03-19-reel-mode-9x16/.openspec.yaml b/openspec/changes/archive/2026-03-19-reel-mode-9x16/.openspec.yaml new file mode 100644 index 0000000..4e61834 --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-mode-9x16/.openspec.yaml @@ -0,0 +1,2 @@ +schema: spec-driven +created: 2026-03-19 diff --git a/openspec/changes/archive/2026-03-19-reel-mode-9x16/design.md b/openspec/changes/archive/2026-03-19-reel-mode-9x16/design.md new file mode 100644 index 0000000..6bd8ca1 --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-mode-9x16/design.md @@ -0,0 +1,231 @@ +## Context + +Loop is an Electron desktop app for recording screen-based videos with AI-powered editing. The current architecture produces exclusively 16:9 (landscape) output at source resolution (typically 1920x1080). The rendering pipeline uses ffmpeg with a filter complex that chains: section trimming/concatenation, zoom/pan via `zoompan` filter, and camera PIP overlay via `overlay` filter. + +**Current render pipeline flow:** +``` +Source (16:9) → trim/concat → scale → zoompan → camera overlay → output (16:9) +``` + +**Key existing systems this change touches:** +- **Keyframe system** (`project.js`): Each keyframe has `{ time, pipX, pipY, pipVisible, cameraFullscreen, backgroundZoom, backgroundPanX, backgroundPanY, sectionId, autoSection }`. Keyframes are per-section anchors that define camera and zoom state. Transitions between keyframes use 0.3s linear interpolation. +- **FFmpeg expression builder** (`render-filter-service.js`): `buildNumericExpr()` generates nested `if(gte(...))` ffmpeg expressions for runtime interpolation of any numeric property across keyframes. This is the core of smooth animation during rendering. +- **Editor preview** (`app.js:getStateAtTime()`): JavaScript equivalent of the ffmpeg interpolation — computes the interpolated state at any given time for real-time canvas preview. +- **Canvas system**: Fixed 1920x1080 canvas. PIP positioned in absolute pixel coordinates within this space. Corner snapping on drag release. +- **Project persistence**: Full project state serialized to JSON. All keyframe properties and settings are round-tripped through normalizers (`normalizeKeyframes`, `normalizeProjectData`). + +**Constraints:** +- AGENTS.md mandates test-first development and `npm run check` before completion +- Shared domain logic belongs in `src/shared/`, renderer features in `src/renderer/features/` +- Cross-platform behavior matters — no macOS-only assumptions + +## Goals / Non-Goals + +**Goals:** +- Users can export recordings as 9:16 vertical video for social media platforms +- The crop region is visually represented in the editor preview at all times +- Crop position is configurable per section with smooth animated transitions +- Camera PIP size is adjustable to fit the narrower 9:16 frame +- Camera fullscreen mode adapts to the 9:16 output dimensions +- Existing 16:9 workflow is completely unaffected (backward compatible) +- Existing zoom/pan features compose cleanly with the reel crop + +**Non-Goals:** +- Auto-tracking mouse cursor or content detection for automatic crop positioning +- Arbitrary aspect ratios (only 16:9 and 9:16) +- Per-keyframe PIP size animation (v1 uses a global project setting) +- Vertical recording source support (source is always assumed 16:9) +- Mobile/web export — output is always a local MP4 file + +## Decisions + +### Decision 1: Reel crop as a new keyframe property (`reelCropX`) + +**Choice:** Add a single `reelCropX` property (range -1 to +1, default 0) to the existing keyframe data model. + +**Rationale:** The existing keyframe system already handles per-section anchored properties with smooth interpolation. `backgroundZoom`, `backgroundPanX`, `backgroundPanY` follow this exact pattern. Adding `reelCropX` to the same system means: +- Zero new interpolation logic — `buildNumericExpr()` handles it automatically for ffmpeg rendering +- Zero new preview logic — `getStateAtTime()` interpolation block handles it with the same `t` factor +- Undo/redo works automatically (keyframe mutations are already tracked) +- "Apply to Future" button copies it alongside other section properties +- Section splitting inherits it from the parent section + +**Alternatives considered:** +- *Per-section property (not keyframe)*: Would require a separate interpolation system for smooth transitions. More work, less consistency. +- *Separate "crop keyframe" system*: Over-engineered. The existing keyframe system is designed for exactly this kind of property. + +**Effect on the app:** Every code path that creates, copies, normalizes, or serializes keyframes needs to include `reelCropX`. This includes: `normalizeKeyframes()`, `getSectionAnchorKeyframe()`, `syncSectionAnchorKeyframes()`, `buildSplitAnchorKeyframe()`, `applyStyleToFutureSections()`, `getRenderKeyframes()`, `getRenderSections()`, `getProjectTimelineSnapshot()`. + +--- + +### Decision 2: Crop applied AFTER zoom/pan in the ffmpeg pipeline + +**Choice:** The reel crop filter is appended after the zoompan filter in the ffmpeg filter chain. + +**Pipeline becomes:** +``` +Source → scale → zoompan → CROP (9:16 strip) → camera overlay → output +``` + +**Rationale:** Zoom/pan operates on the full 16:9 frame, narrowing what the viewer sees. The reel crop then selects which vertical strip of that zoomed view to show. This composition is intuitive: +- Zoom focuses on a region of interest +- Reel crop frames it for vertical output +- They're independent controls that compose cleanly + +**Implementation:** In `buildScreenFilter()`, when `outputMode === 'reel'`, append a `crop=REEL_W:REEL_H:X_EXPR:0` filter after the zoompan output (or after the base scale if no zoom animation exists). The `X_EXPR` is built using `buildNumericExpr(keyframes, 'reelCropX', ...)` converted to pixel coordinates via the formula: `((reelCropX + 1) / 2) * (sourceW - cropW)`. + +**Crop math for 1920x1080 source:** +- Reel output: `outW = round(1080 * 9/16) = 608`, `outH = 1080` +- Crop region: 608px wide, 1080px tall +- Horizontal range: 0px to 1312px (= 1920 - 608) +- `reelCropX = -1` → pixel offset 0 (left edge) +- `reelCropX = 0` → pixel offset 656 (center) +- `reelCropX = +1` → pixel offset 1312 (right edge) + +**Effect on the app:** `buildScreenFilter()` and `buildFilterComplex()` gain an `outputMode` parameter. The `resolveOutputSize()` function gains an `outputMode` parameter to return 608x1080 for reel mode. All callers of these functions must pass `outputMode` through. + +--- + +### Decision 3: PIP coordinates relative to the crop region (not full canvas) + +**Choice:** In reel mode, PIP `pipX`/`pipY` coordinates are relative to the 9:16 output frame (608x1080), not the full 16:9 canvas (1920x1080). + +**Rationale:** PIP must always appear within the output frame. If coordinates were relative to the full 16:9 canvas, the PIP could be positioned outside the visible crop region. By using crop-relative coordinates: +- Corner snapping works naturally (snaps to corners of the 9:16 frame) +- PIP drag is bounded to the visible output area +- The ffmpeg overlay filter receives coordinates in output space, which is correct +- No need for complex "clamp PIP to crop region" logic + +**Effect on the app:** +- `snapToNearestCorner()` must use different canvas dimensions based on output mode +- Default PIP position must be recalculated for reel mode +- When toggling 16:9 ↔ 9:16, existing PIP positions need re-clamping to the new coordinate space +- The editor preview must offset PIP drawing by the crop region's X position (since the preview canvas is still 1920x1080, but PIP coords are in 608-space) +- `buildFilterComplex()` scaling factor (`outW / canvasW`) automatically handles coordinate translation when `canvasW` reflects the reel canvas width + +**Visualization of coordinate spaces:** +``` +Full preview canvas (1920x1080): +┌──────────────────────────────────────────────┐ +│░░░░░░║ cropX=200 ║░░░░░░░│ +│░░░░░░║ ║░░░░░░░│ +│░░░░░░║ PIP at (400, 800) ║░░░░░░░│ +│░░░░░░║ in 608-space ║░░░░░░░│ +│░░░░░░║ → drawn at (600, 800) ║░░░░░░░│ +│░░░░░░║ in preview canvas ║░░░░░░░│ +└──────────────────────────────────────────────┘ + +Preview PIP position = cropPixelOffset + pipX +Render PIP position = pipX (directly in output space) +``` + +--- + +### Decision 4: PIP size as a global project setting (not per-keyframe) + +**Choice:** Add a `pipScale` project setting (range 0.15 to 0.5, default 0.22) that controls PIP size as a fraction of the effective canvas width. + +**Rationale:** +- Current PIP size is already a fixed constant (`PIP_FRACTION = 0.22`), not per-keyframe +- Making it per-keyframe would require adding `pipScale` to keyframe model, interpolation in `getStateAtTime()`, expression building in `buildNumericExpr()`, and ffmpeg filter changes for dynamic PIP scaling — significant complexity +- A global setting solves the primary problem: PIP at 22% of 1920 = 422px works for 16:9, but 22% of 608 = 134px is too small for 9:16. Users need a way to adjust this. +- Can be promoted to per-keyframe in a future iteration if demand exists + +**Effect on the app:** +- New `pipScale` field in project settings (alongside `screenFitMode`, `cameraSyncOffsetMs`, etc.) +- `PIP_SIZE` becomes computed: `Math.round(effectiveCanvasW * pipScale)` instead of a constant +- The PIP size slider appears in the editor controls +- `renderComposite()` receives `pipSize` (already parameterized) — the renderer computes it from `pipScale` and canvas width +- `buildFilterComplex()` already takes `pipSize` as a parameter — no change needed there + +--- + +### Decision 5: Editor preview shows full 16:9 with crop overlay + +**Choice:** The editor canvas stays at 1920x1080 in all modes. In reel mode, the full 16:9 frame is rendered, then a semi-transparent dark overlay covers the area outside the 9:16 crop region, with dashed boundary lines. + +**Rationale:** +- Users need spatial context to position the crop effectively +- Seeing the full frame helps when deciding where to place the crop region +- The crop region boundaries serve as visual guides +- Dragging the crop is more intuitive when you can see what's outside it +- The existing canvas drawing pipeline doesn't need restructuring + +**Alternative considered:** Render only the 9:16 crop area in the preview. Rejected because it removes spatial context needed for crop positioning. Users can't see what's to the left or right of their crop. + +**Effect on the app:** +- `editorDrawLoop()` gains a post-processing step: after drawing screen + camera, draw the crop overlay +- The overlay consists of: two semi-transparent black rectangles (left and right of crop), and a dashed white rectangle around the crop boundary +- This runs every frame of the editor draw loop — performance impact is negligible (two `fillRect` calls and one `strokeRect`) + +--- + +### Decision 6: Output mode toggle placement and controls layout + +**Choice:** Add the aspect ratio toggle (16:9 / 9:16) to the editor controls bar, along with a PIP Size slider. To avoid overflow, reorganize into context-sensitive groupings. + +**Layout:** +``` +[Undo] [Redo] [Play] [Split] [Camera] [Full] | [Zoom ━━━] [PIP Size ━━━] [16:9|9:16] | [Apply to Future] +``` + +The PIP Size slider appears only when a camera is present. The Crop X slider is NOT shown as a separate control — instead, the user positions the crop by **dragging the crop region directly on the preview canvas**. This keeps the controls clean and the interaction spatial. + +**Rationale:** A crop slider would be redundant with direct canvas manipulation. Dragging is more intuitive for spatial positioning. The zoom slider stays because zoom range (1x-3x) is less spatially intuitive. PIP size slider stays because size adjustment is a scalar value, not a spatial position. + +--- + +### Decision 7: Handling 16:9 ↔ 9:16 toggle transition + +**Choice:** When switching output modes: + +**16:9 → 9:16:** +- All sections' `reelCropX` defaults to 0 (center) if not previously set +- PIP positions are re-mapped: `newPipX = pipX * (REEL_CANVAS_W / CANVAS_W)`, clamped to reel bounds +- PIP size is recalculated using `pipScale * REEL_CANVAS_W` +- The crop overlay appears immediately + +**9:16 → 16:9:** +- `reelCropX` values are preserved (not deleted) so toggling back doesn't lose work +- PIP positions are re-mapped back: `newPipX = pipX * (CANVAS_W / REEL_CANVAS_W)`, clamped +- PIP size is recalculated using `pipScale * CANVAS_W` +- The crop overlay disappears + +**Rationale:** Preserving `reelCropX` on toggle-back prevents accidental data loss. Users may toggle back and forth while experimenting. The PIP position re-mapping ensures the camera stays in approximately the same visual position relative to the output frame. + +--- + +### Decision 8: Camera fullscreen in reel mode + +**Choice:** In reel mode, "fullscreen" camera fills the 9:16 output frame (608x1080). The camera feed (typically 16:9 from a webcam) is scaled with `force_original_aspect_ratio=increase` then cropped to 608x1080, showing the center of the face. + +**Rationale:** This matches the existing fullscreen behavior pattern — camera fills the entire output frame. For a landscape camera source, the center-crop approach preserves the subject's face (which is typically centered). + +**Effect on the app:** In `buildFilterComplex()`, the camera fullscreen scaling already uses `outW:outH`. Since `resolveOutputSize()` returns reel dimensions when in reel mode, this adapts automatically. The preview in `editorDrawLoop()` must also scale the fullscreen camera to reel dimensions — the `camW/camH` calculation uses the effective canvas size. + +--- + +### Decision 9: Backward compatibility and project migration + +**Choice:** No migration needed. Existing projects without `outputMode` or `reelCropX` default to `'landscape'` and `0` respectively through the normalizer functions. + +**Rationale:** The normalizer pattern used throughout the project (`normalizeKeyframes`, `normalizeProjectData`) already handles missing fields gracefully. Adding new optional fields with sensible defaults is the established pattern (as was done for `backgroundZoom`, `backgroundPanX`, etc.). + +**Effect:** `normalizeOutputMode(undefined)` returns `'landscape'`. `normalizeReelCropX(undefined)` returns `0`. No project file format versioning needed. + +## Risks / Trade-offs + +### Risk 1: Zoom + crop interaction confusion +Users might not understand that zoom operates on the full frame while crop selects within the zoomed result. **Mitigation:** The preview shows the composition visually — zoom changes the background, crop overlay adjusts on top. The visual feedback should make the interaction clear. If users report confusion, a future enhancement could add a tooltip or brief animation. + +### Risk 2: PIP position drift on mode toggle +Re-mapping PIP coordinates between 1920→608 and back involves rounding. Multiple toggles could cause slight position drift. **Mitigation:** Clamp to nearest corner after toggle, which is the snap behavior already used on PIP drag release. This makes the position deterministic. + +### Risk 3: FFmpeg filter complexity +Adding the crop filter increases the filter complex string length. For multi-section renders with many keyframes, the `reelCropX` interpolation expression could become long. **Mitigation:** The existing `buildNumericExpr()` already handles arbitrary-length keyframe lists for zoom/pan without issues. The crop expression is the same pattern. + +### Risk 4: Preview performance +Drawing the crop overlay adds `fillRect` and `strokeRect` calls per frame. **Mitigation:** These are trivial 2D canvas operations. No measurable performance impact expected. + +### Risk 5: PIP too small in 9:16 even with slider +At minimum `pipScale` (0.15), PIP in reel mode = `0.15 * 608 = 91px`. This may be too small for visibility. **Mitigation:** Set minimum `pipScale` differently per mode, or enforce a minimum absolute pixel size (e.g., 80px). diff --git a/openspec/changes/archive/2026-03-19-reel-mode-9x16/proposal.md b/openspec/changes/archive/2026-03-19-reel-mode-9x16/proposal.md new file mode 100644 index 0000000..76e8b56 --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-mode-9x16/proposal.md @@ -0,0 +1,31 @@ +## Why + +Loop currently only exports 16:9 (landscape) video. Content creators increasingly need vertical 9:16 output for Instagram Reels, TikTok, and YouTube Shorts. Today, users must use external tools to crop and reframe their recordings for vertical platforms — breaking the single-tool workflow Loop promises. Adding native reel mode lets users produce vertical content directly from the same recording, with the same smooth animated transitions Loop already provides. + +## What Changes + +- Add an **output aspect ratio toggle** (16:9 / 9:16) in the editor controls +- In 9:16 mode, display a **crop overlay** on the editor preview showing the visible vertical strip within the 16:9 source, with the area outside the crop grayed out +- The crop region is **draggable** horizontally — the user positions it per section via keyframe anchors +- Crop positions **animate smoothly** between sections using the existing 0.3s transition system +- Add a **PIP size slider** to control the camera overlay size (essential since the default 422px PIP is ~70% of the 608px reel width) +- The **camera fullscreen** mode adapts to fill the 9:16 output frame +- The **ffmpeg render pipeline** produces 9:16 output (608x1080 for 1920x1080 source) by cropping after zoom/pan +- Existing zoom/pan controls continue to work, composing with the reel crop + +## Capabilities + +### New Capabilities +- `reel-crop`: Crop overlay system for selecting a 9:16 vertical strip from 16:9 source, including draggable positioning, per-section keyframe anchors, smooth animated transitions, and ffmpeg crop filter generation +- `pip-size-control`: Adjustable PIP (picture-in-picture) camera overlay size via a project-level slider, replacing the fixed 422px constant +- `output-aspect-ratio`: Project-level output mode toggle between 16:9 (landscape) and 9:16 (reel), affecting render pipeline output dimensions and editor preview + +### Modified Capabilities + +## Impact + +- **Data model** (`src/shared/domain/project.js`): New keyframe property `reelCropX`, new project settings `outputMode` and `pipScale` +- **Render pipeline** (`src/main/services/render-filter-service.js`, `render-service.js`): New crop filter in ffmpeg chain, modified output resolution logic, PIP size parameterization +- **Editor UI** (`src/index.html`, `src/renderer/app.js`): New controls, crop overlay drawing, drag handling, coordinate space changes for PIP in reel mode +- **Project persistence**: New fields serialized/deserialized in project JSON +- **Tests**: New unit tests for domain normalizers, render filter builders, and section input normalization diff --git a/openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/output-aspect-ratio/spec.md b/openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/output-aspect-ratio/spec.md new file mode 100644 index 0000000..88af33c --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/output-aspect-ratio/spec.md @@ -0,0 +1,77 @@ +## ADDED Requirements + +### Requirement: Project output mode setting +The system SHALL support an `outputMode` project setting with two values: `'landscape'` (16:9) and `'reel'` (9:16). The default value SHALL be `'landscape'`. This setting is stored in `project.settings.outputMode` and persisted with the project JSON. + +#### Scenario: Default output mode for new projects +- **WHEN** a new project is created via `createDefaultProject()` +- **THEN** the project's `settings.outputMode` SHALL be `'landscape'` + +#### Scenario: Normalizing invalid output mode values +- **WHEN** project data is loaded with an invalid or missing `outputMode` value (undefined, null, empty string, arbitrary string) +- **THEN** `normalizeOutputMode()` SHALL return `'landscape'` + +#### Scenario: Normalizing valid reel mode +- **WHEN** project data is loaded with `outputMode` set to `'reel'` +- **THEN** `normalizeOutputMode()` SHALL return `'reel'` + +#### Scenario: Persisting output mode +- **WHEN** the user changes the output mode and the project is saved +- **THEN** the `outputMode` value SHALL be included in the serialized project settings JSON +- **AND** loading the project SHALL restore the same `outputMode` value + +### Requirement: Output mode toggle UI +The editor controls SHALL include a toggle button group allowing the user to switch between 16:9 (landscape) and 9:16 (reel) output modes. The toggle SHALL be placed in the editor playback controls bar alongside existing controls. + +#### Scenario: Toggling to reel mode +- **WHEN** the user clicks the 9:16 toggle button +- **THEN** the editor SHALL set `outputMode` to `'reel'` +- **AND** the crop overlay SHALL appear on the preview canvas +- **AND** the PIP size SHALL be recalculated for the narrower canvas +- **AND** a project save SHALL be scheduled + +#### Scenario: Toggling to landscape mode +- **WHEN** the user clicks the 16:9 toggle button while in reel mode +- **THEN** the editor SHALL set `outputMode` to `'landscape'` +- **AND** the crop overlay SHALL disappear +- **AND** existing `reelCropX` keyframe values SHALL be preserved (not deleted) +- **AND** a project save SHALL be scheduled + +#### Scenario: Toggle is undoable +- **WHEN** the user toggles the output mode +- **THEN** the change SHALL be pushed to the undo stack +- **AND** pressing undo SHALL restore the previous output mode + +### Requirement: Output resolution for reel mode +When `outputMode` is `'reel'`, the `resolveOutputSize()` function SHALL return dimensions in 9:16 aspect ratio, calculated as: `outW = round(sourceHeight * 9 / 16)` (ensured even), `outH = sourceHeight` (ensured even). + +#### Scenario: Reel mode output dimensions for 1920x1080 source +- **WHEN** `resolveOutputSize(1920, 1080, 'reel')` is called +- **THEN** it SHALL return `{ outW: 608, outH: 1080 }` (note: 1080 * 9/16 = 607.5, rounded to 608, already even) + +#### Scenario: Reel mode output dimensions for 2560x1440 source +- **WHEN** `resolveOutputSize(2560, 1440, 'reel')` is called +- **THEN** it SHALL return `{ outW: 810, outH: 1440 }` (1440 * 9/16 = 810, already even) + +#### Scenario: Landscape mode output dimensions unchanged +- **WHEN** `resolveOutputSize(1920, 1080, 'landscape')` is called +- **THEN** it SHALL return `{ outW: 1920, outH: 1080 }` (existing behavior, no regression) + +#### Scenario: Default mode is landscape +- **WHEN** `resolveOutputSize(1920, 1080)` is called without an `outputMode` parameter +- **THEN** it SHALL return landscape dimensions (backward compatible) + +### Requirement: Render pipeline passes output mode +The `renderComposite()` function SHALL accept `outputMode` in its options and pass it through to `buildFilterComplex()` and `buildScreenFilter()`. The ffmpeg output SHALL match the dimensions returned by `resolveOutputSize()` for the given mode. + +#### Scenario: Rendering in reel mode +- **WHEN** `renderComposite()` is called with `outputMode: 'reel'` and source dimensions 1920x1080 +- **THEN** the output MP4 SHALL have dimensions 608x1080 + +#### Scenario: Rendering in landscape mode +- **WHEN** `renderComposite()` is called with `outputMode: 'landscape'` (or no outputMode) +- **THEN** the output MP4 SHALL have dimensions matching the existing 16:9 behavior + +#### Scenario: Camera black fallback uses correct dimensions +- **WHEN** a section has no camera and `outputMode` is `'reel'` +- **THEN** the black color fallback filter SHALL use reel dimensions (608x1080), not 1920x1080 diff --git a/openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/pip-size-control/spec.md b/openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/pip-size-control/spec.md new file mode 100644 index 0000000..3c9168d --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/pip-size-control/spec.md @@ -0,0 +1,145 @@ +## ADDED Requirements + +### Requirement: pipScale project setting +The system SHALL support a `pipScale` project setting controlling the PIP camera overlay size as a fraction of the effective canvas width. The value range SHALL be 0.15 to 0.50. The default value SHALL be 0.22 (matching the current hardcoded `PIP_FRACTION`). + +#### Scenario: Default pipScale for new projects +- **WHEN** a new project is created via `createDefaultProject()` +- **THEN** the project's `settings.pipScale` SHALL be 0.22 + +#### Scenario: Normalizing valid pipScale +- **WHEN** project data is loaded with `pipScale` set to 0.35 +- **THEN** `normalizePipScale()` SHALL return 0.35 + +#### Scenario: Normalizing out-of-range low pipScale +- **WHEN** project data is loaded with `pipScale` set to 0.05 +- **THEN** `normalizePipScale()` SHALL return 0.15 (clamped to minimum) + +#### Scenario: Normalizing out-of-range high pipScale +- **WHEN** project data is loaded with `pipScale` set to 0.8 +- **THEN** `normalizePipScale()` SHALL return 0.50 (clamped to maximum) + +#### Scenario: Normalizing missing pipScale +- **WHEN** project data is loaded with `pipScale` set to undefined, null, or NaN +- **THEN** `normalizePipScale()` SHALL return 0.22 (default) + +#### Scenario: Persisting pipScale +- **WHEN** the user changes the PIP size and the project is saved +- **THEN** the `pipScale` value SHALL be included in the serialized project settings +- **AND** loading the project SHALL restore the same `pipScale` value + +### Requirement: PIP size computed from pipScale and canvas width +The PIP pixel size SHALL be computed as `Math.round(effectiveCanvasW * pipScale)`, where `effectiveCanvasW` is 1920 in landscape mode or 608 (REEL_CANVAS_W) in reel mode. This replaces the current fixed `PIP_SIZE = Math.round(1920 * 0.22)` = 422. + +#### Scenario: PIP size in landscape mode at default scale +- **WHEN** `outputMode` is `'landscape'` and `pipScale` is 0.22 +- **THEN** PIP size SHALL be `round(1920 * 0.22)` = 422 pixels (same as current behavior) + +#### Scenario: PIP size in reel mode at default scale +- **WHEN** `outputMode` is `'reel'` and `pipScale` is 0.22 +- **THEN** PIP size SHALL be `round(608 * 0.22)` = 134 pixels + +#### Scenario: PIP size in reel mode at increased scale +- **WHEN** `outputMode` is `'reel'` and `pipScale` is 0.35 +- **THEN** PIP size SHALL be `round(608 * 0.35)` = 213 pixels + +#### Scenario: PIP size passed to render pipeline +- **WHEN** `renderComposite()` is called +- **THEN** the `pipSize` parameter SHALL reflect the computed size from `pipScale` and the effective canvas width for the current `outputMode` + +### Requirement: PIP size slider UI +The editor controls SHALL include a range slider for adjusting `pipScale`. The slider SHALL appear only when the project has camera footage. + +#### Scenario: Slider visible with camera +- **WHEN** the editor has camera footage (`hasCamera` is true) +- **THEN** the PIP Size slider SHALL be visible in the controls bar + +#### Scenario: Slider hidden without camera +- **WHEN** the editor has no camera footage +- **THEN** the PIP Size slider SHALL be hidden + +#### Scenario: Adjusting PIP size +- **WHEN** the user moves the PIP Size slider +- **THEN** the PIP size SHALL update immediately in the preview +- **AND** the `pipScale` setting SHALL be updated +- **AND** a project save SHALL be scheduled + +#### Scenario: PIP size change is undoable +- **WHEN** the user changes the PIP size via the slider +- **THEN** the change SHALL be pushed to the undo stack + +### Requirement: PIP position re-clamping on mode change +When the output mode changes, existing PIP positions in keyframes SHALL be re-mapped to the new coordinate space to keep the camera in approximately the same visual position relative to the output frame. + +#### Scenario: PIP position re-mapping on 16:9 to 9:16 switch +- **WHEN** the user switches from landscape to reel mode +- **AND** a keyframe has `pipX: 1478, pipY: 638` (bottom-right corner in 1920-space) +- **THEN** the keyframe's `pipX` SHALL be re-mapped to approximately `round(1478 * (608 / 1920))` and clamped to valid bounds within the 608-wide canvas +- **AND** `pipY` SHALL be re-clamped to valid bounds within the 1080-tall canvas + +#### Scenario: PIP snaps to nearest corner after mode toggle +- **WHEN** the output mode is toggled +- **THEN** PIP positions SHALL be snapped to the nearest corner of the new canvas dimensions using `snapToNearestCorner()` with the effective canvas width/height + +#### Scenario: PIP position re-mapping on 9:16 to 16:9 switch +- **WHEN** the user switches from reel to landscape mode +- **THEN** PIP positions SHALL be re-mapped back to the 1920x1080 coordinate space +- **AND** positions SHALL be snapped to nearest corner + +### Requirement: Corner snapping uses effective canvas dimensions +The `snapToNearestCorner()` function SHALL use the effective canvas dimensions based on `outputMode` (1920x1080 for landscape, 608x1080 for reel) when determining snap positions. + +#### Scenario: Corner snap in reel mode +- **WHEN** PIP is dragged and released in reel mode with effective canvas 608x1080 +- **THEN** snap positions SHALL be calculated relative to the 608x1080 frame +- **AND** the four corners SHALL be: (margin, margin), (608-pipSize-margin, margin), (margin, 1080-pipSize-margin), (608-pipSize-margin, 1080-pipSize-margin) + +#### Scenario: Corner snap in landscape mode +- **WHEN** PIP is dragged and released in landscape mode +- **THEN** snap positions SHALL use the existing 1920x1080 dimensions (no behavior change) + +### Requirement: Default PIP position computed from effective canvas +The default PIP position (used for the first keyframe or when no anchor exists) SHALL be computed as `(effectiveCanvasW - pipSize - margin, effectiveCanvasH - pipSize - margin)` — the bottom-right corner of the effective canvas. + +#### Scenario: Default PIP position in reel mode +- **WHEN** a default PIP position is needed in reel mode with `pipScale: 0.35` (pipSize = 213) +- **THEN** defaultPipX SHALL be `608 - 213 - 15` = 380 +- **AND** defaultPipY SHALL be `1080 - 213 - 15` = 852 + +#### Scenario: Default PIP position in landscape mode +- **WHEN** a default PIP position is needed in landscape mode with `pipScale: 0.22` (pipSize = 422) +- **THEN** defaultPipX SHALL be `1920 - 422 - 20` = 1478 (same as current behavior) +- **AND** defaultPipY SHALL be `1080 - 422 - 20` = 638 (same as current behavior) + +### Requirement: Camera fullscreen adapts to output mode +When `cameraFullscreen` is true, the camera SHALL fill the output frame dimensions. In reel mode, this means scaling to 608x1080. The camera source (typically 16:9) SHALL be scaled with `force_original_aspect_ratio=increase` then center-cropped to the output dimensions. + +#### Scenario: Fullscreen camera in reel mode render +- **WHEN** rendering with `cameraFullscreen: true` and `outputMode: 'reel'` +- **THEN** the camera fullscreen filter SHALL scale to 608x1080 (not 1920x1080) +- **AND** the camera source SHALL be center-cropped to fit the 9:16 frame + +#### Scenario: Fullscreen camera in reel mode preview +- **WHEN** the editor preview draws a fullscreen camera in reel mode +- **THEN** the fullscreen camera transition SHALL scale to the reel canvas dimensions (608x1080) +- **AND** the camera SHALL be drawn within the crop region boundaries + +#### Scenario: Fullscreen camera in landscape mode unchanged +- **WHEN** rendering or previewing with `cameraFullscreen: true` and `outputMode: 'landscape'` +- **THEN** behavior SHALL be identical to current implementation (camera fills 1920x1080) + +### Requirement: PIP drawn relative to crop region in preview +In reel mode, the editor preview SHALL draw the PIP at position `(cropPixelOffset + pipX, pipY)` on the 1920x1080 canvas, where `pipX`/`pipY` are in the 608x1080 reel coordinate space and `cropPixelOffset` is the crop region's left edge in canvas pixels. + +#### Scenario: PIP preview position in reel mode +- **WHEN** reel mode is active with `reelCropX: 0` (cropPixelOffset = 656) and `pipX: 380` +- **THEN** the PIP SHALL be drawn at canvas position `(656 + 380, pipY)` = `(1036, pipY)` + +#### Scenario: PIP preview position in landscape mode +- **WHEN** landscape mode is active with `pipX: 1478` +- **THEN** the PIP SHALL be drawn at canvas position `(1478, pipY)` (no offset, current behavior) + +#### Scenario: PIP drag bounded to crop region +- **WHEN** the user drags PIP in reel mode +- **THEN** the drag SHALL be constrained to positions within the 608x1080 effective canvas +- **AND** the PIP SHALL NOT be draggable outside the crop region boundaries diff --git a/openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/reel-crop/spec.md b/openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/reel-crop/spec.md new file mode 100644 index 0000000..138cca9 --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-mode-9x16/specs/reel-crop/spec.md @@ -0,0 +1,152 @@ +## ADDED Requirements + +### Requirement: reelCropX keyframe property +The keyframe data model SHALL include a `reelCropX` property representing the horizontal position of the 9:16 crop region within the 16:9 source frame. The value range SHALL be -1.0 (left edge) to +1.0 (right edge), with 0.0 representing center. The default value SHALL be 0. + +#### Scenario: Normalizing valid reelCropX values +- **WHEN** a keyframe is normalized with `reelCropX` set to a number within [-1, 1] +- **THEN** `normalizeReelCropX()` SHALL return the value unchanged + +#### Scenario: Normalizing out-of-range reelCropX +- **WHEN** a keyframe is normalized with `reelCropX` set to -2.5 +- **THEN** `normalizeReelCropX()` SHALL return -1 (clamped to minimum) + +#### Scenario: Normalizing out-of-range positive reelCropX +- **WHEN** a keyframe is normalized with `reelCropX` set to 3.0 +- **THEN** `normalizeReelCropX()` SHALL return 1 (clamped to maximum) + +#### Scenario: Normalizing missing reelCropX +- **WHEN** a keyframe is normalized with `reelCropX` set to undefined, null, NaN, or a non-numeric string +- **THEN** `normalizeReelCropX()` SHALL return 0 (default center) + +#### Scenario: reelCropX included in normalized keyframes +- **WHEN** `normalizeKeyframes()` processes an array of raw keyframes +- **THEN** each output keyframe SHALL include a normalized `reelCropX` property + +### Requirement: reelCropX in section input normalization +The `normalizeSectionInput()` function in render-service SHALL normalize `reelCropX` on each section alongside existing `backgroundZoom`, `backgroundPanX`, `backgroundPanY`. + +#### Scenario: Section with reelCropX +- **WHEN** a section with `reelCropX: 0.5` is normalized +- **THEN** the output section SHALL include `reelCropX: 0.5` + +#### Scenario: Section without reelCropX +- **WHEN** a section without `reelCropX` is normalized +- **THEN** the output section SHALL include `reelCropX: 0` (default) + +### Requirement: Crop overlay in editor preview +When `outputMode` is `'reel'`, the editor preview canvas SHALL display a crop overlay consisting of: +1. Semi-transparent dark rectangles covering the area outside the 9:16 crop region (left and right of crop) +2. A dashed white border around the crop region boundary + +The crop region width SHALL be `round(CANVAS_H * 9 / 16)` = 608 pixels within the 1920x1080 canvas. The crop region height SHALL be the full canvas height (1080). + +#### Scenario: Crop overlay visible in reel mode +- **WHEN** the editor is in reel mode (`outputMode === 'reel'`) +- **THEN** the editor preview SHALL show semi-transparent dark areas outside the 9:16 crop region +- **AND** a dashed white rectangle SHALL outline the crop boundary + +#### Scenario: Crop overlay hidden in landscape mode +- **WHEN** the editor is in landscape mode (`outputMode === 'landscape'`) +- **THEN** no crop overlay SHALL be drawn on the preview canvas + +#### Scenario: Crop overlay reflects current reelCropX +- **WHEN** the current section's `reelCropX` is -1 (left edge) +- **THEN** the crop region SHALL be positioned at the left edge of the canvas +- **AND** only the right side SHALL have a dark overlay + +#### Scenario: Crop overlay updates during playback transitions +- **WHEN** the timeline plays across a keyframe boundary where `reelCropX` changes +- **THEN** the crop overlay SHALL smoothly animate to the new position using the same 0.3s transition duration as other keyframe properties + +### Requirement: Draggable crop region +In reel mode, the user SHALL be able to drag the crop region horizontally on the editor preview canvas to reposition it. Dragging SHALL update the `reelCropX` property of the current section's anchor keyframe. + +#### Scenario: Dragging crop region +- **WHEN** the user clicks inside the crop region and drags horizontally +- **THEN** the crop region SHALL follow the mouse movement horizontally +- **AND** the active section's anchor keyframe `reelCropX` SHALL be updated to reflect the new position +- **AND** the value SHALL be clamped to the [-1, 1] range + +#### Scenario: Drag push to undo stack +- **WHEN** the user begins dragging the crop region +- **THEN** the state before the drag SHALL be pushed to the undo stack + +#### Scenario: Drag does not work in landscape mode +- **WHEN** the editor is in landscape mode +- **THEN** horizontal drag on the preview canvas SHALL NOT trigger crop region movement + +### Requirement: Smooth animated crop transitions during rendering +When keyframes have different `reelCropX` values, the ffmpeg render pipeline SHALL produce smooth animated transitions between crop positions using the same 0.3s `TRANSITION_DURATION` as other keyframe properties. + +#### Scenario: Animated crop in ffmpeg filter +- **WHEN** two consecutive keyframes have `reelCropX` values of -0.5 and 0.5 +- **THEN** the ffmpeg filter chain SHALL include a `crop` filter with a dynamic X expression built by `buildNumericExpr()` that interpolates between the corresponding pixel offsets over the 0.3s transition window + +#### Scenario: Static crop position +- **WHEN** all keyframes have the same `reelCropX` value of 0 +- **THEN** the ffmpeg crop filter SHALL use a static X offset (no interpolation needed) + +#### Scenario: Crop filter placement in pipeline +- **WHEN** the render pipeline builds the screen filter for reel mode +- **THEN** the crop filter SHALL be placed AFTER the zoompan filter (or after the base scale if no zoom animation exists) +- **AND** the crop SHALL output at `REEL_W x REEL_H` resolution + +### Requirement: Smooth animated crop transitions in editor preview +The `getStateAtTime()` function SHALL interpolate `reelCropX` between keyframes using the same transition logic as other properties (linear blend over 0.3s when approaching the next keyframe). + +#### Scenario: Preview interpolation of reelCropX +- **WHEN** the playhead is within 0.3s before a keyframe that changes `reelCropX` +- **THEN** `getStateAtTime()` SHALL return an interpolated `reelCropX` value blending between the current and next keyframe values + +#### Scenario: No transition when values match +- **WHEN** adjacent keyframes have the same `reelCropX` value +- **THEN** no interpolation SHALL occur for `reelCropX` + +### Requirement: reelCropX in render keyframes and sections +The `getRenderKeyframes()` and `getRenderSections()` functions SHALL include `reelCropX` in their output, alongside existing `backgroundZoom`, `backgroundPanX`, `backgroundPanY`. + +#### Scenario: Render keyframes include reelCropX +- **WHEN** `getRenderKeyframes()` is called +- **THEN** each keyframe in the output SHALL include a `reelCropX` property clamped to [-1, 1] + +#### Scenario: Render sections include reelCropX +- **WHEN** `getRenderSections()` is called +- **THEN** each section in the output SHALL include a `reelCropX` property from its anchor keyframe + +### Requirement: reelCropX propagation in section operations +Section operations that copy or create keyframe properties SHALL include `reelCropX`: + +#### Scenario: Section split inherits reelCropX +- **WHEN** a section is split at the playhead +- **THEN** the new section's anchor keyframe SHALL inherit `reelCropX` from the parent section's anchor + +#### Scenario: Apply to future copies reelCropX +- **WHEN** the user clicks "Apply to Future" +- **THEN** all future sections' anchor keyframes SHALL receive the current section's `reelCropX` value + +#### Scenario: Default anchor keyframe includes reelCropX +- **WHEN** a new section anchor keyframe is created as a fallback (no existing anchor) +- **THEN** it SHALL include `reelCropX: 0` (center default) + +### Requirement: Crop pixel offset calculation +The conversion from `reelCropX` (-1 to +1) to pixel X offset SHALL follow the formula: `pixelOffset = ((reelCropX + 1) / 2) * (sourceWidth - cropWidth)`, clamped to `[0, sourceWidth - cropWidth]`. + +#### Scenario: Center crop calculation +- **WHEN** `reelCropX` is 0 and source is 1920px wide with 608px crop +- **THEN** pixel offset SHALL be `((0 + 1) / 2) * (1920 - 608)` = 656 + +#### Scenario: Left edge crop calculation +- **WHEN** `reelCropX` is -1 +- **THEN** pixel offset SHALL be `((-1 + 1) / 2) * (1920 - 608)` = 0 + +#### Scenario: Right edge crop calculation +- **WHEN** `reelCropX` is 1 +- **THEN** pixel offset SHALL be `((1 + 1) / 2) * (1920 - 608)` = 1312 + +### Requirement: reelCropX persisted in project save +The `reelCropX` property SHALL be included in keyframe serialization via `getProjectTimelineSnapshot()` and round-tripped through `normalizeKeyframes()` on load. + +#### Scenario: Save and reload preserves reelCropX +- **WHEN** a project with keyframes containing `reelCropX: 0.75` is saved and reloaded +- **THEN** the loaded keyframes SHALL contain `reelCropX: 0.75` diff --git a/openspec/changes/archive/2026-03-19-reel-mode-9x16/tasks.md b/openspec/changes/archive/2026-03-19-reel-mode-9x16/tasks.md new file mode 100644 index 0000000..b1d0f7a --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-mode-9x16/tasks.md @@ -0,0 +1,91 @@ +## 1. Domain Model (src/shared/domain/project.js) + +- [x] 1.1 Add `normalizeReelCropX()` function (clamp to [-1, 1], default 0), export constants `MIN_REEL_CROP_X`, `MAX_REEL_CROP_X` +- [x] 1.2 Add `normalizeOutputMode()` function (return `'landscape'` or `'reel'`), export constants `OUTPUT_MODE_LANDSCAPE`, `OUTPUT_MODE_REEL` +- [x] 1.3 Add `normalizePipScale()` function (clamp to [0.15, 0.50], default 0.22), export constants `MIN_PIP_SCALE`, `MAX_PIP_SCALE`, `DEFAULT_PIP_SCALE` +- [x] 1.4 Extend `normalizeKeyframes()` to include `reelCropX` property on each keyframe +- [x] 1.5 Extend `createDefaultProject()` to include `outputMode: 'landscape'` and `pipScale: 0.22` in settings +- [x] 1.6 Extend `normalizeProjectData()` to hydrate `outputMode` and `pipScale` in settings + +## 2. Domain Model Unit Tests (tests/unit/project-domain.test.js) + +- [x] 2.1 Add tests for `normalizeReelCropX`: valid values, out-of-range clamping, invalid input defaults to 0 +- [x] 2.2 Add tests for `normalizeOutputMode`: `'reel'` returns `'reel'`, invalid/missing returns `'landscape'` +- [x] 2.3 Add tests for `normalizePipScale`: valid values, clamping, invalid defaults to 0.22 +- [x] 2.4 Add tests for `normalizeKeyframes` including `reelCropX` preservation +- [x] 2.5 Add tests for `createDefaultProject` including `outputMode` and `pipScale` in settings +- [x] 2.6 Add tests for `normalizeProjectData` hydrating `outputMode` and `pipScale` + +## 3. Render Filter Service (src/main/services/render-filter-service.js) + +- [x] 3.1 Modify `resolveOutputSize()` to accept `outputMode` parameter and return 9:16 dimensions for `'reel'` +- [x] 3.2 Modify `buildScreenFilter()` to accept `outputMode` parameter; when `'reel'`, append `crop=REEL_W:REEL_H:X_EXPR:0` filter after zoompan using `buildNumericExpr()` for animated `reelCropX` +- [x] 3.3 Modify `buildFilterComplex()` to accept `outputMode` parameter and pass through to `resolveOutputSize()` and `buildScreenFilter()`; PIP scaling uses reel output dimensions automatically + +## 4. Render Filter Service Unit Tests (tests/unit/render-filter-service.test.js) + +- [x] 4.1 Add tests for `resolveOutputSize()` with `'reel'` mode: 1920x1080 → 608x1080, 2560x1440 → 810x1440 +- [x] 4.2 Add tests for `resolveOutputSize()` backward compatibility: no `outputMode` param returns landscape dimensions +- [x] 4.3 Add tests for `buildScreenFilter()` with reel mode: output contains `crop=608:1080` in filter string +- [x] 4.4 Add tests for `buildScreenFilter()` with reel mode and animated `reelCropX`: filter contains interpolation expression +- [x] 4.5 Add tests for `buildFilterComplex()` with reel mode: correct output dimensions in filter, PIP scaling correct + +## 5. Render Service (src/main/services/render-service.js) + +- [x] 5.1 Import `normalizeReelCropX` and `normalizeOutputMode` from shared domain +- [x] 5.2 Extend `normalizeSectionInput()` to include `reelCropX` field +- [x] 5.3 Extend `renderComposite()` to read `outputMode` from opts and pass to `buildFilterComplex()` and `buildScreenFilter()` +- [x] 5.4 Fix camera black fallback to use reel dimensions when `outputMode === 'reel'` (line 259: `color=black:s=...`) + +## 6. Render Service Unit Tests (tests/unit/render-service.test.js) + +- [x] 6.1 Add tests for `normalizeSectionInput()` normalizing `reelCropX` on sections +- [x] 6.2 Add tests for `renderComposite()` passing `outputMode` through to filter builders (verify via mock/spy) + +## 7. Editor HTML (src/index.html) + +- [x] 7.1 Add output mode toggle buttons (16:9 / 9:16) in the editor controls bar after the Zoom control +- [x] 7.2 Add PIP Size slider (`input[type=range]` min=0.15 max=0.50 step=0.01) in the controls bar, visible only when camera is present + +## 8. Editor Logic — State & Controls (src/renderer/app.js) + +- [x] 8.1 Add reel-mode constants: `REEL_CANVAS_W = Math.round(CANVAS_H * 9 / 16)`, `REEL_CANVAS_H = CANVAS_H` +- [x] 8.2 Add DOM refs for new HTML elements (mode toggle buttons, PIP size slider) +- [x] 8.3 Implement `setOutputMode(mode)`: toggle state, recalculate PIP defaults, re-map PIP positions, snap to corner, push undo, schedule save, update UI +- [x] 8.4 Implement mode toggle button event listeners +- [x] 8.5 Implement `updateOutputModeUI()`: toggle active/inactive button styles, show/hide reel-specific controls +- [x] 8.6 Implement PIP size slider: event handler updates `pipScale` setting, recalculates `pipSize`, updates PIP defaults, pushes undo, schedules save +- [x] 8.7 Modify `snapToNearestCorner()` to accept effective canvas dimensions (or derive from `editorState.outputMode`) + +## 9. Editor Logic — Keyframe & Section Integration (src/renderer/app.js) + +- [x] 9.1 Extend `getStateAtTime()`: add `reelCropX` to default keyframe, interpolate during transitions, include in return object +- [x] 9.2 Extend `getRenderKeyframes()`: include `reelCropX` in minimal keyframe output +- [x] 9.3 Extend `getRenderSections()`: include `reelCropX` from section anchor keyframe +- [x] 9.4 Extend render call (`renderComposite` invocation): pass `outputMode: editorState.outputMode` +- [x] 9.5 Extend `getSectionAnchorKeyframe()` fallback: include `reelCropX: 0` +- [x] 9.6 Extend `syncSectionAnchorKeyframes()`: include `reelCropX` in synced properties +- [x] 9.7 Extend `applyStyleToFutureSections()`: copy `reelCropX` to future section anchors +- [x] 9.8 Extend `buildSplitAnchorKeyframe()` in `keyframe-ops.js`: include `reelCropX` from parent + +## 10. Editor Logic — Preview & Interaction (src/renderer/app.js) + +- [x] 10.1 Modify `editorDrawLoop()`: after drawing screen + camera, if reel mode, draw semi-transparent dark overlay outside crop region and dashed crop boundary +- [x] 10.2 Modify `editorDrawLoop()`: in reel mode, offset PIP drawing by crop region's pixel X position (PIP coords are in reel-space, preview is in full canvas space) +- [x] 10.3 Implement crop region drag handling: mousedown detects drag start within crop region, mousemove updates `reelCropX` of active section's anchor keyframe, mouseup finalizes +- [x] 10.4 Modify PIP drag handling: in reel mode, constrain drag to effective canvas dimensions (608x1080) and offset mouse coordinates by crop pixel offset +- [x] 10.5 Modify fullscreen camera preview: in reel mode, scale fullscreen camera transition to reel canvas dimensions + +## 11. Editor Logic — Project Persistence (src/renderer/app.js) + +- [x] 11.1 Extend `getProjectTimelineSnapshot()`: include `reelCropX` in keyframe serialization +- [x] 11.2 Extend `buildProjectSavePayload()`: include `outputMode` and `pipScale` in settings +- [x] 11.3 Extend editor initialization (`openEditor`/`loadProject`): restore `outputMode` and `pipScale` from loaded project, compute effective canvas dimensions and PIP size + +## 12. Verification + +- [x] 12.1 Run `npm run check` — all tests pass, lint clean, typecheck clean +- [ ] 12.2 Manual test: record a short clip, switch to 9:16, adjust crop per section, render, verify output is vertical 608x1080 MP4 with smooth crop transitions +- [ ] 12.3 Manual test: verify PIP size slider works in both modes, PIP stays within crop bounds in reel mode +- [ ] 12.4 Manual test: verify toggle 16:9 ↔ 9:16 preserves crop positions and re-maps PIP correctly +- [ ] 12.5 Manual test: verify existing 16:9 workflow is completely unaffected (backward compatibility) diff --git a/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/.openspec.yaml b/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/.openspec.yaml new file mode 100644 index 0000000..4e61834 --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/.openspec.yaml @@ -0,0 +1,2 @@ +schema: spec-driven +created: 2026-03-19 diff --git a/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/design.md b/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/design.md new file mode 100644 index 0000000..714dc87 --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/design.md @@ -0,0 +1,65 @@ +## Context + +The reel mode (9:16) feature is implemented. In reel mode, a 608px-wide vertical crop is taken from the 1920px-wide landscape canvas. The existing `backgroundZoom` (1x–3x) zooms into the source before cropping. Users want to zoom OUT — show more than 608px of content width by shrinking the content, with the resulting letterbox bars (top/bottom) filled attractively. + +Current zoom range: `MIN_BACKGROUND_ZOOM=1` to `MAX_BACKGROUND_ZOOM=3` in `src/shared/domain/project.js`. The zoom is applied via ffmpeg `zoompan` filter and the editor's `drawEditorScreenWithZoom()`. + +## Goals / Non-Goals + +**Goals:** +- Allow zoom values < 1.0 in reel mode to "zoom out" and show more content width +- Fill the vertical letterbox bars with a darkened scaled copy of the content (Option B — no blur, just darken to ~20-30% brightness) +- Consistent visual between editor preview and ffmpeg render output +- Minimal performance impact in the editor draw loop + +**Non-Goals:** +- Real-time Gaussian blur in the preview (too expensive) +- Zoom-out in landscape mode (no letterbox bars to fill in 16:9) +- Animated background fill transitions (the darkened background just follows the content) + +## Decisions + +### 1. Zoom range is mode-dependent + +In landscape mode, zoom stays 1.0–3.0. In reel mode, zoom extends to 0.5–3.0. The minimum of 0.5 means showing up to ~1216px of source width in the 608px frame — content occupies ~50% of the frame height, which is the practical limit before the content becomes too small. + +**In the domain model:** `normalizeBackgroundZoom` keeps its current 1.0–3.0 range. A new `normalizeReelBackgroundZoom` (or a mode-aware variant) clamps to 0.5–3.0. The editor's `clampSectionZoom` becomes mode-aware so the slider and keyframe system use the right bounds. + +**Alternative considered:** A separate "reel zoom" property. Rejected because it's conceptually the same axis — how much of the source is visible. Reusing `backgroundZoom` with an extended range is simpler and integrates automatically with existing keyframe interpolation. + +### 2. Zoom slider range updates dynamically + +When switching to reel mode, the HTML slider's `min` attribute changes from `1` to `0.5`. When switching back to landscape, it reverts to `1` and any zoom values < 1 are clamped up to 1. This happens in `updateOutputModeUI()` and `setOutputMode()`. + +### 3. Editor preview: darkened scaled copy (no blur) + +When zoom < 1 in reel mode, the draw loop: +1. Draws the screen content into the crop region at the zoomed-out scale (centered vertically) +2. For the background, takes the same content, scales it to fill the full crop area (608x1080), draws it at very low opacity (~0.2) over a black background + +Implementation in `editorDrawLoop`: +- Draw black fill for the crop region +- Draw the crop-region content scaled to fill → draw with `globalAlpha = 0.2` +- Draw the actual zoomed-out content centered over it at full opacity + +This is just 2 extra `drawImage` calls per frame — negligible cost. + +### 4. FFmpeg pipeline: split + overlay + +When any keyframe has zoom < 1 in reel mode, the filter chain adds: +1. After the crop, split the stream +2. One branch: scale to fill 608x1080 + darken via `colorlevels=rimax=0.3:gimin=0:bimin=0` +3. Other branch: scale to fit within 608x1080 (preserving aspect), pad with transparent to 608x1080 +4. Overlay the sharp content on the dark fill + +If zoom is animated (transitions between zoom-out and zoom-in), the scale expressions need to be dynamic. This adds complexity to `buildScreenFilter` but follows the existing `buildNumericExpr` pattern. + +### 5. Pan behavior during zoom-out + +When zoom < 1, the content is smaller than the frame width, so horizontal pan has no effect (the content is fully visible). The pan controls should be disabled or ignored when zoom < 1. Vertical centering is fixed (content is centered in the 608x1080 frame). + +## Risks / Trade-offs + +- **Zoom transitions crossing 1.0**: Animating from zoom 0.7 to zoom 1.5 crosses the boundary where the darkened background appears/disappears. The background opacity should fade smoothly rather than popping in/out. → Mitigation: Interpolate background opacity based on zoom level near the 1.0 boundary. +- **Existing projects with zoom=1**: No impact — zoom ≥ 1 follows the existing pipeline unchanged. The new code path only activates when zoom < 1 in reel mode. +- **FFmpeg filter complexity**: The split/overlay adds filter nodes but only when zoom < 1 is actually used. No impact on landscape renders or reel renders with zoom ≥ 1. diff --git a/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/proposal.md b/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/proposal.md new file mode 100644 index 0000000..d3c1043 --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/proposal.md @@ -0,0 +1,25 @@ +## Why + +In reel mode (9:16), users may want to show more of their screen content than fits in the 608px-wide crop. Currently the zoom slider only goes 1x–3x (zoom in). Extending it below 1x ("zoom out") lets users shrink the content to fit more width, with the vertical letterbox bars filled by a darkened copy of the same content — a common pattern in social media reels that looks polished without being distracting. + +## What Changes + +- Extend the `backgroundZoom` range to support values below 1.0 (e.g. 0.5–1.0) when in reel mode, while keeping the 1.0–3.0 range in landscape mode +- When reel zoom < 1, the content is scaled to fit the crop width, leaving empty vertical space above and below +- The empty vertical space is filled with a darkened, scaled-up copy of the crop content (Option B: no blur, just heavy darkening to ~20–30% brightness) +- Both the editor preview canvas and the ffmpeg render pipeline produce the same visual result +- The zoom slider's min value becomes dynamic: 0.5 in reel mode, 1.0 in landscape mode + +## Capabilities + +### New Capabilities +- `reel-zoom-out`: Zoom-out (< 1x) support in reel mode with darkened background fill for letterbox bars + +### Modified Capabilities + +## Impact + +- `src/renderer/app.js` — editor preview draw loop must render the darkened background + scaled content when zoom < 1 in reel mode; zoom slider range becomes mode-dependent +- `src/main/services/render-filter-service.js` — ffmpeg filter chain needs a split/overlay pipeline when reel zoom < 1: one branch for the darkened fill, one for the sharp content +- `src/shared/domain/project.js` — `normalizeBackgroundZoom` may need to accept values below 1.0 (or a new normalizer for reel zoom) +- `src/index.html` — zoom slider `min` attribute updated dynamically based on output mode diff --git a/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/specs/reel-zoom-out/spec.md b/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/specs/reel-zoom-out/spec.md new file mode 100644 index 0000000..e4ad9eb --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/specs/reel-zoom-out/spec.md @@ -0,0 +1,75 @@ +## ADDED Requirements + +### Requirement: Zoom-out range in reel mode +The system SHALL allow `backgroundZoom` values between 0.5 and 3.0 (inclusive) when `outputMode` is `'reel'`. In landscape mode, the zoom range SHALL remain 1.0–3.0. When switching from reel mode to landscape mode, any zoom value below 1.0 SHALL be clamped to 1.0. + +#### Scenario: Zoom slider min in reel mode +- **WHEN** the output mode is set to `'reel'` +- **THEN** the zoom slider's minimum value SHALL be 0.5 + +#### Scenario: Zoom slider min in landscape mode +- **WHEN** the output mode is set to `'landscape'` +- **THEN** the zoom slider's minimum value SHALL be 1.0 + +#### Scenario: Zoom clamping on mode switch +- **WHEN** a section has `backgroundZoom` of 0.7 and the user switches from reel to landscape mode +- **THEN** the section's `backgroundZoom` SHALL be clamped to 1.0 + +#### Scenario: Domain normalizer accepts reel zoom values +- **WHEN** `normalizeBackgroundZoom` is called with value 0.5 and mode `'reel'` +- **THEN** it SHALL return 0.5 + +#### Scenario: Domain normalizer clamps below reel minimum +- **WHEN** `normalizeBackgroundZoom` is called with value 0.3 and mode `'reel'` +- **THEN** it SHALL return 0.5 + +### Requirement: Darkened background fill for zoom-out +When `backgroundZoom` is less than 1.0 in reel mode, the system SHALL fill the vertical letterbox bars with a darkened, scaled-up copy of the crop region content. The background content SHALL be darkened to approximately 20–30% of original brightness. + +#### Scenario: Editor preview with zoom-out +- **WHEN** reel mode is active and `backgroundZoom` is 0.7 +- **THEN** the editor preview SHALL show the content centered vertically within the crop region, with darkened content filling the top and bottom bars + +#### Scenario: FFmpeg render with zoom-out +- **WHEN** rendering in reel mode with `backgroundZoom` of 0.7 +- **THEN** the output video SHALL show the content centered vertically within the 608x1080 frame, with darkened content filling the top and bottom bars + +#### Scenario: Zoom at exactly 1.0 +- **WHEN** reel mode is active and `backgroundZoom` is 1.0 +- **THEN** no darkened background SHALL be drawn — the content fills the crop region completely (no letterbox bars) + +### Requirement: Content scaling during zoom-out +When zoom < 1.0 in reel mode, the content SHALL be scaled to fit the crop width (608px for 1080p source) while preserving aspect ratio. The content SHALL be vertically centered within the frame. + +#### Scenario: Content dimensions at zoom 0.5 +- **WHEN** reel mode is active with 1920x1080 source and `backgroundZoom` is 0.5 +- **THEN** the content SHALL occupy the full 608px width and approximately 342px height (608 * 1080/1920), centered vertically in the 1080px frame + +#### Scenario: Content dimensions at zoom 0.75 +- **WHEN** reel mode is active with 1920x1080 source and `backgroundZoom` is 0.75 +- **THEN** the content SHALL occupy the full 608px width and approximately 810px height, centered vertically + +### Requirement: Pan disabled during zoom-out +When `backgroundZoom` is less than 1.0, background pan controls SHALL have no visual effect since the content is fully visible and smaller than the frame. + +#### Scenario: Pan has no effect at zoom 0.7 +- **WHEN** reel mode is active, `backgroundZoom` is 0.7, and `backgroundPanX` is set to 1.0 +- **THEN** the content SHALL remain centered — pan values are ignored when zoom < 1.0 + +### Requirement: Smooth zoom transitions across 1.0 boundary +When zoom animates between a value below 1.0 and a value above 1.0 (crossing the boundary), the darkened background SHALL fade smoothly rather than appearing/disappearing abruptly. + +#### Scenario: Animated zoom from 0.7 to 1.5 +- **WHEN** a keyframe transition animates `backgroundZoom` from 0.7 to 1.5 +- **THEN** the darkened background SHALL gradually fade out as zoom approaches 1.0, and the content SHALL scale smoothly throughout the transition + +### Requirement: Backward compatibility +Existing projects with zoom values between 1.0 and 3.0 SHALL be completely unaffected. The zoom-out pipeline only activates when zoom < 1.0 in reel mode. + +#### Scenario: Landscape render unchanged +- **WHEN** rendering in landscape mode with `backgroundZoom` of 2.0 +- **THEN** the output SHALL be identical to the output before this change + +#### Scenario: Reel render with zoom >= 1 unchanged +- **WHEN** rendering in reel mode with `backgroundZoom` of 1.5 +- **THEN** the output SHALL be identical to the output before this change (zoom + crop, no darkened background) diff --git a/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/tasks.md b/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/tasks.md new file mode 100644 index 0000000..d48bb12 --- /dev/null +++ b/openspec/changes/archive/2026-03-19-reel-zoom-out-blur-bg/tasks.md @@ -0,0 +1,45 @@ +## 1. Domain Model (src/shared/domain/project.js) + +- [x] 1.1 Add `MIN_REEL_BACKGROUND_ZOOM = 0.5` constant and export it +- [x] 1.2 Extend `normalizeBackgroundZoom` to accept an optional `outputMode` parameter: when `'reel'`, clamp to [0.5, 3]; otherwise keep [1, 3] +- [x] 1.3 Add unit tests for `normalizeBackgroundZoom` with reel mode: 0.5 returns 0.5, 0.3 returns 0.5, 0.7 returns 0.7, null returns 0.5 + +## 2. Editor Logic — Zoom Range (src/renderer/app.js) + +- [x] 2.1 Make `clampSectionZoom()` mode-aware: use min 0.5 when `editorState.outputMode === 'reel'`, otherwise min 1.0 +- [x] 2.2 Update `updateOutputModeUI()` to set zoom slider `min` attribute to `0.5` in reel mode, `1` in landscape mode +- [x] 2.3 Update `setOutputMode()`: when switching from reel to landscape, clamp all keyframe `backgroundZoom` values below 1.0 up to 1.0 +- [x] 2.4 Update zoom slider display format to show values < 1 properly (e.g. `0.70x`) + +## 3. Editor Preview — Zoom-Out Drawing (src/renderer/app.js) + +- [x] 3.1 Modify `drawEditorScreenWithZoom()` to handle zoom < 1 in reel mode: draw black fill, then darkened scaled-to-fill content at ~20% opacity, then sharp zoomed-out content centered vertically +- [x] 3.2 In the reel crop overlay section of `editorDrawLoop()`, ensure the darkened background is drawn within the crop region bounds (not outside it) +- [x] 3.3 Ensure pan values are visually ignored when zoom < 1 (content always centered) + +## 4. FFmpeg Render Pipeline (src/main/services/render-filter-service.js) + +- [x] 4.1 Modify `buildScreenFilter()`: when reel mode and any keyframe has zoom < 1, build a split/overlay pipeline — one branch darkened fill, one branch sharp content, composited together +- [x] 4.2 Handle animated zoom that crosses the 1.0 boundary: the darkened background should fade based on zoom level expression +- [x] 4.3 Handle static zoom < 1 case (no animation): simpler filter with fixed scale + overlay +- [x] 4.4 Ensure zoom >= 1 in reel mode is completely unchanged (backward compatibility) + +## 5. Render Filter Tests (tests/unit/render-filter-service.test.js) + +- [x] 5.1 Add test: `buildScreenFilter` with reel mode and static zoom 0.7 produces split/overlay filter with darkened fill +- [x] 5.2 Add test: `buildScreenFilter` with reel mode and animated zoom crossing 1.0 produces correct expressions +- [x] 5.3 Add test: `buildScreenFilter` with reel mode and zoom >= 1 remains unchanged +- [x] 5.4 Add test: `resolveOutputSize` behavior unchanged + +## 6. Domain Model Tests (tests/unit/project-domain.test.js) + +- [x] 6.1 Add tests for `normalizeBackgroundZoom` with reel outputMode parameter +- [x] 6.2 Add tests verifying backward compatibility: calls without outputMode unchanged + +## 7. Verification + +- [x] 7.1 Run `npm run check` — all tests pass, lint clean, typecheck clean +- [ ] 7.2 Manual test: in reel mode, drag zoom slider below 1.0 — content shrinks with dark background fill +- [ ] 7.3 Manual test: render a reel video with zoom-out sections — output shows darkened background +- [ ] 7.4 Manual test: switch from reel with zoom 0.7 to landscape — zoom snaps to 1.0 +- [ ] 7.5 Manual test: landscape mode and reel mode with zoom >= 1 completely unaffected diff --git a/openspec/specs/pip-overlay/spec.md b/openspec/specs/pip-overlay/spec.md new file mode 100644 index 0000000..99b5bf9 --- /dev/null +++ b/openspec/specs/pip-overlay/spec.md @@ -0,0 +1,179 @@ +## Requirements + +### Requirement: pipScale normalization +The system SHALL support a `pipScale` value controlling the PIP camera overlay size as a fraction of the effective canvas width. The value range SHALL be 0.15 to 0.50. The default value SHALL be 0.22. + +#### Scenario: Default pipScale for new projects +- **WHEN** a new project is created via `createDefaultProject()` +- **THEN** the project's `settings.pipScale` SHALL be 0.22 + +#### Scenario: Normalizing valid pipScale +- **WHEN** project data is loaded with `pipScale` set to 0.35 +- **THEN** `normalizePipScale()` SHALL return 0.35 + +#### Scenario: Normalizing out-of-range low pipScale +- **WHEN** project data is loaded with `pipScale` set to 0.05 +- **THEN** `normalizePipScale()` SHALL return 0.15 (clamped to minimum) + +#### Scenario: Normalizing out-of-range high pipScale +- **WHEN** project data is loaded with `pipScale` set to 0.8 +- **THEN** `normalizePipScale()` SHALL return 0.50 (clamped to maximum) + +#### Scenario: Normalizing missing pipScale +- **WHEN** project data is loaded with `pipScale` set to undefined, null, or NaN +- **THEN** `normalizePipScale()` SHALL return 0.22 (default) + +#### Scenario: Persisting pipScale +- **WHEN** the user changes the PIP size and the project is saved +- **THEN** the `pipScale` value SHALL be included in the serialized project settings and keyframes +- **AND** loading the project SHALL restore the same `pipScale` values + +### Requirement: Per-section PIP scale +The system SHALL store `pipScale` as a per-keyframe property on section anchor keyframes, allowing each section to have an independent PIP size. + +#### Scenario: PIP size varies between sections +- **WHEN** section A has `pipScale` 0.22 and section B has `pipScale` 0.40 +- **THEN** the PIP overlay in section A SHALL be 22% of the effective canvas width, and in section B SHALL be 40% + +#### Scenario: Smooth PIP size transition +- **WHEN** transitioning from a section with `pipScale` 0.22 to one with `pipScale` 0.40 +- **THEN** the PIP size SHALL animate linearly over the 0.3s transition window + +#### Scenario: Apply to Future Sections includes pipScale +- **WHEN** the user applies style to future sections +- **THEN** `pipScale` SHALL be copied alongside zoom, pan, cropX, and other section properties + +#### Scenario: Section split inherits pipScale +- **WHEN** a section is split at the playhead +- **THEN** the new section's anchor keyframe SHALL inherit `pipScale` from the parent section's anchor + +#### Scenario: Backward compatibility +- **WHEN** a project saved before per-section pipScale is loaded (keyframes have no `pipScale`) +- **THEN** all sections SHALL use `settings.pipScale` (or 0.22 if absent) as their `pipScale` + +### Requirement: PIP size computed from pipScale and canvas width +The PIP pixel size SHALL be computed as `Math.round(effectiveCanvasW * pipScale)`, where `effectiveCanvasW` is 1920 in landscape mode or 608 (REEL_CANVAS_W) in reel mode. + +#### Scenario: PIP size in landscape mode at default scale +- **WHEN** `outputMode` is `'landscape'` and `pipScale` is 0.22 +- **THEN** PIP size SHALL be `round(1920 * 0.22)` = 422 pixels + +#### Scenario: PIP size in reel mode at default scale +- **WHEN** `outputMode` is `'reel'` and `pipScale` is 0.22 +- **THEN** PIP size SHALL be `round(608 * 0.22)` = 134 pixels + +#### Scenario: PIP size in reel mode at increased scale +- **WHEN** `outputMode` is `'reel'` and `pipScale` is 0.35 +- **THEN** PIP size SHALL be `round(608 * 0.35)` = 213 pixels + +### Requirement: PIP size slider UI +The editor controls SHALL include a range slider for adjusting `pipScale`. The slider SHALL appear only when the project has camera footage. The slider controls the current section's anchor keyframe `pipScale`. + +#### Scenario: Slider visible with camera +- **WHEN** the editor has camera footage (`hasCamera` is true) +- **THEN** the PIP Size slider SHALL be visible in the controls bar + +#### Scenario: Slider hidden without camera +- **WHEN** the editor has no camera footage +- **THEN** the PIP Size slider SHALL be hidden + +#### Scenario: Adjusting PIP size +- **WHEN** the user moves the PIP Size slider while a section is selected +- **THEN** only that section's anchor keyframe `pipScale` SHALL be updated +- **AND** the PIP size SHALL update immediately in the preview +- **AND** a project save SHALL be scheduled + +#### Scenario: PIP size change is undoable +- **WHEN** the user changes the PIP size via the slider +- **THEN** the change SHALL be pushed to the undo stack + +#### Scenario: Slider updates on section change +- **WHEN** the user selects a different section +- **THEN** the PIP Size slider SHALL update to reflect that section's `pipScale` value + +### Requirement: PIP position re-snap on scale change +When `pipScale` changes for a section, the PIP position SHALL be re-snapped to the nearest corner using the new size, maintaining proper margins from the edges. + +#### Scenario: Resize re-snaps position +- **WHEN** a section's `pipScale` changes from 0.22 to 0.40 +- **THEN** the PIP's `pipX` and `pipY` SHALL be recalculated to snap to the nearest corner with the new size + +### Requirement: PIP position re-clamping on mode change +When the output mode changes, existing PIP positions in keyframes SHALL be re-mapped to the new coordinate space to keep the camera in approximately the same visual position relative to the output frame. + +#### Scenario: PIP position re-mapping on 16:9 to 9:16 switch +- **WHEN** the user switches from landscape to reel mode +- **AND** a keyframe has `pipX: 1478, pipY: 638` (bottom-right corner in 1920-space) +- **THEN** the keyframe's `pipX` SHALL be re-mapped and clamped to valid bounds within the 608-wide canvas +- **AND** positions SHALL be snapped to nearest corner + +#### Scenario: PIP position re-mapping on 9:16 to 16:9 switch +- **WHEN** the user switches from reel to landscape mode +- **THEN** PIP positions SHALL be re-mapped back to the 1920x1080 coordinate space +- **AND** positions SHALL be snapped to nearest corner + +### Requirement: Corner snapping uses effective canvas dimensions +The `snapToNearestCorner()` function SHALL use the effective canvas dimensions based on `outputMode` (1920x1080 for landscape, 608x1080 for reel) when determining snap positions. + +#### Scenario: Corner snap in reel mode +- **WHEN** PIP is dragged and released in reel mode with effective canvas 608x1080 +- **THEN** snap positions SHALL be calculated relative to the 608x1080 frame + +#### Scenario: Corner snap in landscape mode +- **WHEN** PIP is dragged and released in landscape mode +- **THEN** snap positions SHALL use the existing 1920x1080 dimensions (no behavior change) + +### Requirement: Default PIP position computed from effective canvas +The default PIP position SHALL be computed as `(effectiveCanvasW - pipSize - margin, effectiveCanvasH - pipSize - margin)` — the bottom-right corner of the effective canvas. + +#### Scenario: Default PIP position in reel mode +- **WHEN** a default PIP position is needed in reel mode with `pipScale: 0.35` (pipSize = 213) +- **THEN** defaultPipX SHALL be `608 - 213 - 15` = 380 +- **AND** defaultPipY SHALL be `1080 - 213 - 15` = 852 + +#### Scenario: Default PIP position in landscape mode +- **WHEN** a default PIP position is needed in landscape mode with `pipScale: 0.22` (pipSize = 422) +- **THEN** defaultPipX SHALL be `1920 - 422 - 20` = 1478 +- **AND** defaultPipY SHALL be `1080 - 422 - 20` = 638 + +### Requirement: Camera fullscreen adapts to output mode +When `cameraFullscreen` is true, the camera SHALL fill the output frame dimensions. In reel mode, this means scaling to 608x1080. The camera source (typically 16:9) SHALL be scaled with `force_original_aspect_ratio=increase` then center-cropped to the output dimensions. + +#### Scenario: Fullscreen camera in reel mode render +- **WHEN** rendering with `cameraFullscreen: true` and `outputMode: 'reel'` +- **THEN** the camera fullscreen filter SHALL scale to 608x1080 (not 1920x1080) +- **AND** the camera source SHALL be center-cropped to fit the 9:16 frame + +#### Scenario: Fullscreen camera in landscape mode unchanged +- **WHEN** rendering or previewing with `cameraFullscreen: true` and `outputMode: 'landscape'` +- **THEN** behavior SHALL be identical to current implementation (camera fills 1920x1080) + +### Requirement: PIP drawn relative to crop region in preview +In reel mode, the editor preview SHALL draw the PIP at position `(cropPixelOffset + pipX, pipY)` on the 1920x1080 canvas, where `pipX`/`pipY` are in the 608x1080 reel coordinate space and `cropPixelOffset` is the crop region's left edge in canvas pixels. + +#### Scenario: PIP preview position in reel mode +- **WHEN** reel mode is active with `reelCropX: 0` (cropPixelOffset = 656) and `pipX: 380` +- **THEN** the PIP SHALL be drawn at canvas position `(656 + 380, pipY)` = `(1036, pipY)` + +#### Scenario: PIP preview position in landscape mode +- **WHEN** landscape mode is active with `pipX: 1478` +- **THEN** the PIP SHALL be drawn at canvas position `(1478, pipY)` (no offset, current behavior) + +#### Scenario: PIP drag bounded to crop region +- **WHEN** the user drags PIP in reel mode +- **THEN** the drag SHALL be constrained to positions within the 608x1080 effective canvas + +### Requirement: FFmpeg render with animated PIP size +The FFmpeg render pipeline SHALL support per-keyframe `pipScale` values, producing animated PIP size transitions in the output video using a two-stage scale approach. + +#### Scenario: Rendered output matches editor preview +- **WHEN** rendering a video with sections having different `pipScale` values +- **THEN** the PIP size in the output video SHALL match the editor preview at each point in time + +#### Scenario: Static PIP size (all sections same) +- **WHEN** all sections have the same `pipScale` +- **THEN** the render pipeline SHALL use a fixed PIP size (no expression overhead) + +#### Scenario: Two-stage scale for animated PIP +- **WHEN** sections have different `pipScale` values +- **THEN** the pipeline SHALL first scale to the max pip size (fixed), apply format/geq for round corners, then apply an animated `scale(eval=frame)` to the target size diff --git a/openspec/specs/reel-mode/spec.md b/openspec/specs/reel-mode/spec.md new file mode 100644 index 0000000..7faaefa --- /dev/null +++ b/openspec/specs/reel-mode/spec.md @@ -0,0 +1,224 @@ +## Requirements + +### Requirement: Project output mode setting +The system SHALL support an `outputMode` project setting with two values: `'landscape'` (16:9) and `'reel'` (9:16). The default value SHALL be `'landscape'`. This setting is stored in `project.settings.outputMode` and persisted with the project JSON. + +#### Scenario: Default output mode for new projects +- **WHEN** a new project is created via `createDefaultProject()` +- **THEN** the project's `settings.outputMode` SHALL be `'landscape'` + +#### Scenario: Normalizing invalid output mode values +- **WHEN** project data is loaded with an invalid or missing `outputMode` value (undefined, null, empty string, arbitrary string) +- **THEN** `normalizeOutputMode()` SHALL return `'landscape'` + +#### Scenario: Normalizing valid reel mode +- **WHEN** project data is loaded with `outputMode` set to `'reel'` +- **THEN** `normalizeOutputMode()` SHALL return `'reel'` + +#### Scenario: Persisting output mode +- **WHEN** the user changes the output mode and the project is saved +- **THEN** the `outputMode` value SHALL be included in the serialized project settings JSON +- **AND** loading the project SHALL restore the same `outputMode` value + +### Requirement: Output mode toggle UI +The editor controls SHALL include a toggle button group allowing the user to switch between 16:9 (landscape) and 9:16 (reel) output modes. The toggle SHALL be placed in the editor playback controls bar alongside existing controls. + +#### Scenario: Toggling to reel mode +- **WHEN** the user clicks the 9:16 toggle button +- **THEN** the editor SHALL set `outputMode` to `'reel'` +- **AND** the crop overlay SHALL appear on the preview canvas +- **AND** the PIP size SHALL be recalculated for the narrower canvas +- **AND** a project save SHALL be scheduled + +#### Scenario: Toggling to landscape mode +- **WHEN** the user clicks the 16:9 toggle button while in reel mode +- **THEN** the editor SHALL set `outputMode` to `'landscape'` +- **AND** the crop overlay SHALL disappear +- **AND** existing `reelCropX` keyframe values SHALL be preserved (not deleted) +- **AND** a project save SHALL be scheduled + +#### Scenario: Toggle is undoable +- **WHEN** the user toggles the output mode +- **THEN** the change SHALL be pushed to the undo stack +- **AND** pressing undo SHALL restore the previous output mode + +### Requirement: Output resolution for reel mode +When `outputMode` is `'reel'`, the `resolveOutputSize()` function SHALL return dimensions in 9:16 aspect ratio, calculated as: `outW = round(sourceHeight * 9 / 16)` (ensured even), `outH = sourceHeight` (ensured even). + +#### Scenario: Reel mode output dimensions for 1920x1080 source +- **WHEN** `resolveOutputSize(1920, 1080, 'reel')` is called +- **THEN** it SHALL return `{ outW: 608, outH: 1080 }` + +#### Scenario: Reel mode output dimensions for 2560x1440 source +- **WHEN** `resolveOutputSize(2560, 1440, 'reel')` is called +- **THEN** it SHALL return `{ outW: 810, outH: 1440 }` + +#### Scenario: Landscape mode output dimensions unchanged +- **WHEN** `resolveOutputSize(1920, 1080, 'landscape')` is called +- **THEN** it SHALL return `{ outW: 1920, outH: 1080 }` + +#### Scenario: Default mode is landscape +- **WHEN** `resolveOutputSize(1920, 1080)` is called without an `outputMode` parameter +- **THEN** it SHALL return landscape dimensions (backward compatible) + +### Requirement: Render pipeline passes output mode +The `renderComposite()` function SHALL accept `outputMode` in its options and pass it through to `buildFilterComplex()` and `buildScreenFilter()`. The ffmpeg output SHALL match the dimensions returned by `resolveOutputSize()` for the given mode. + +#### Scenario: Rendering in reel mode +- **WHEN** `renderComposite()` is called with `outputMode: 'reel'` and source dimensions 1920x1080 +- **THEN** the output MP4 SHALL have dimensions 608x1080 + +#### Scenario: Rendering in landscape mode +- **WHEN** `renderComposite()` is called with `outputMode: 'landscape'` (or no outputMode) +- **THEN** the output MP4 SHALL have dimensions matching the existing 16:9 behavior + +#### Scenario: Camera black fallback uses correct dimensions +- **WHEN** a section has no camera and `outputMode` is `'reel'` +- **THEN** the black color fallback filter SHALL use reel dimensions (608x1080), not 1920x1080 + +### Requirement: reelCropX keyframe property +The keyframe data model SHALL include a `reelCropX` property representing the horizontal position of the 9:16 crop region within the 16:9 source frame. The value range SHALL be -1.0 (left edge) to +1.0 (right edge), with 0.0 representing center. The default value SHALL be 0. + +#### Scenario: Normalizing valid reelCropX values +- **WHEN** a keyframe is normalized with `reelCropX` set to a number within [-1, 1] +- **THEN** `normalizeReelCropX()` SHALL return the value unchanged + +#### Scenario: Normalizing out-of-range reelCropX +- **WHEN** a keyframe is normalized with `reelCropX` set to -2.5 +- **THEN** `normalizeReelCropX()` SHALL return -1 (clamped to minimum) + +#### Scenario: Normalizing missing reelCropX +- **WHEN** a keyframe is normalized with `reelCropX` set to undefined, null, NaN, or a non-numeric string +- **THEN** `normalizeReelCropX()` SHALL return 0 (default center) + +#### Scenario: reelCropX included in normalized keyframes +- **WHEN** `normalizeKeyframes()` processes an array of raw keyframes +- **THEN** each output keyframe SHALL include a normalized `reelCropX` property + +### Requirement: reelCropX in section input normalization +The `normalizeSectionInput()` function in render-service SHALL normalize `reelCropX` on each section alongside existing `backgroundZoom`, `backgroundPanX`, `backgroundPanY`. + +#### Scenario: Section with reelCropX +- **WHEN** a section with `reelCropX: 0.5` is normalized +- **THEN** the output section SHALL include `reelCropX: 0.5` + +#### Scenario: Section without reelCropX +- **WHEN** a section without `reelCropX` is normalized +- **THEN** the output section SHALL include `reelCropX: 0` (default) + +### Requirement: Crop overlay in editor preview +When `outputMode` is `'reel'`, the editor preview canvas SHALL display a crop overlay consisting of: +1. Semi-transparent dark rectangles covering the area outside the 9:16 crop region (left and right of crop) +2. A dashed white border around the crop region boundary + +The crop region width SHALL be `round(CANVAS_H * 9 / 16)` = 608 pixels within the 1920x1080 canvas. The crop region height SHALL be the full canvas height (1080). + +#### Scenario: Crop overlay visible in reel mode +- **WHEN** the editor is in reel mode (`outputMode === 'reel'`) +- **THEN** the editor preview SHALL show semi-transparent dark areas outside the 9:16 crop region +- **AND** a dashed white rectangle SHALL outline the crop boundary + +#### Scenario: Crop overlay hidden in landscape mode +- **WHEN** the editor is in landscape mode (`outputMode === 'landscape'`) +- **THEN** no crop overlay SHALL be drawn on the preview canvas + +#### Scenario: Crop overlay reflects current reelCropX +- **WHEN** the current section's `reelCropX` is -1 (left edge) +- **THEN** the crop region SHALL be positioned at the left edge of the canvas +- **AND** only the right side SHALL have a dark overlay + +#### Scenario: Crop overlay updates during playback transitions +- **WHEN** the timeline plays across a keyframe boundary where `reelCropX` changes +- **THEN** the crop overlay SHALL smoothly animate to the new position using the same 0.3s transition duration as other keyframe properties + +### Requirement: Draggable crop region +In reel mode, the user SHALL be able to drag the crop region horizontally on the editor preview canvas to reposition it. Dragging SHALL update the `reelCropX` property of the current section's anchor keyframe. + +#### Scenario: Dragging crop region +- **WHEN** the user clicks inside the crop region and drags horizontally +- **THEN** the crop region SHALL follow the mouse movement horizontally +- **AND** the active section's anchor keyframe `reelCropX` SHALL be updated to reflect the new position +- **AND** the value SHALL be clamped to the [-1, 1] range + +#### Scenario: Drag push to undo stack +- **WHEN** the user begins dragging the crop region +- **THEN** the state before the drag SHALL be pushed to the undo stack + +#### Scenario: Drag does not work in landscape mode +- **WHEN** the editor is in landscape mode +- **THEN** horizontal drag on the preview canvas SHALL NOT trigger crop region movement + +### Requirement: Smooth animated crop transitions during rendering +When keyframes have different `reelCropX` values, the ffmpeg render pipeline SHALL produce smooth animated transitions between crop positions using the same 0.3s `TRANSITION_DURATION` as other keyframe properties. + +#### Scenario: Animated crop in ffmpeg filter +- **WHEN** two consecutive keyframes have `reelCropX` values of -0.5 and 0.5 +- **THEN** the ffmpeg filter chain SHALL include a `crop` filter with a dynamic X expression built by `buildNumericExpr()` that interpolates between the corresponding pixel offsets over the 0.3s transition window + +#### Scenario: Static crop position +- **WHEN** all keyframes have the same `reelCropX` value of 0 +- **THEN** the ffmpeg crop filter SHALL use a static X offset (no interpolation needed) + +#### Scenario: Crop filter placement in pipeline +- **WHEN** the render pipeline builds the screen filter for reel mode +- **THEN** the crop filter SHALL be placed AFTER the zoompan filter (or after the base scale if no zoom animation exists) +- **AND** the crop SHALL output at `REEL_W x REEL_H` resolution + +### Requirement: Smooth animated crop transitions in editor preview +The `getStateAtTime()` function SHALL interpolate `reelCropX` between keyframes using the same transition logic as other properties (linear blend over 0.3s when approaching the next keyframe). + +#### Scenario: Preview interpolation of reelCropX +- **WHEN** the playhead is within 0.3s before a keyframe that changes `reelCropX` +- **THEN** `getStateAtTime()` SHALL return an interpolated `reelCropX` value blending between the current and next keyframe values + +#### Scenario: No transition when values match +- **WHEN** adjacent keyframes have the same `reelCropX` value +- **THEN** no interpolation SHALL occur for `reelCropX` + +### Requirement: reelCropX in render keyframes and sections +The `getRenderKeyframes()` and `getRenderSections()` functions SHALL include `reelCropX` in their output, alongside existing `backgroundZoom`, `backgroundPanX`, `backgroundPanY`. + +#### Scenario: Render keyframes include reelCropX +- **WHEN** `getRenderKeyframes()` is called +- **THEN** each keyframe in the output SHALL include a `reelCropX` property clamped to [-1, 1] + +#### Scenario: Render sections include reelCropX +- **WHEN** `getRenderSections()` is called +- **THEN** each section in the output SHALL include a `reelCropX` property from its anchor keyframe + +### Requirement: reelCropX propagation in section operations +Section operations that copy or create keyframe properties SHALL include `reelCropX`: + +#### Scenario: Section split inherits reelCropX +- **WHEN** a section is split at the playhead +- **THEN** the new section's anchor keyframe SHALL inherit `reelCropX` from the parent section's anchor + +#### Scenario: Apply to future copies reelCropX +- **WHEN** the user clicks "Apply to Future" +- **THEN** all future sections' anchor keyframes SHALL receive the current section's `reelCropX` value + +#### Scenario: Default anchor keyframe includes reelCropX +- **WHEN** a new section anchor keyframe is created as a fallback (no existing anchor) +- **THEN** it SHALL include `reelCropX: 0` (center default) + +### Requirement: Crop pixel offset calculation +The conversion from `reelCropX` (-1 to +1) to pixel X offset SHALL follow the formula: `pixelOffset = ((reelCropX + 1) / 2) * (sourceWidth - cropWidth)`, clamped to `[0, sourceWidth - cropWidth]`. + +#### Scenario: Center crop calculation +- **WHEN** `reelCropX` is 0 and source is 1920px wide with 608px crop +- **THEN** pixel offset SHALL be `((0 + 1) / 2) * (1920 - 608)` = 656 + +#### Scenario: Left edge crop calculation +- **WHEN** `reelCropX` is -1 +- **THEN** pixel offset SHALL be `((-1 + 1) / 2) * (1920 - 608)` = 0 + +#### Scenario: Right edge crop calculation +- **WHEN** `reelCropX` is 1 +- **THEN** pixel offset SHALL be `((1 + 1) / 2) * (1920 - 608)` = 1312 + +### Requirement: reelCropX persisted in project save +The `reelCropX` property SHALL be included in keyframe serialization via `getProjectTimelineSnapshot()` and round-tripped through `normalizeKeyframes()` on load. + +#### Scenario: Save and reload preserves reelCropX +- **WHEN** a project with keyframes containing `reelCropX: 0.75` is saved and reloaded +- **THEN** the loaded keyframes SHALL contain `reelCropX: 0.75` diff --git a/openspec/specs/reel-zoom-out/spec.md b/openspec/specs/reel-zoom-out/spec.md new file mode 100644 index 0000000..e4ad9eb --- /dev/null +++ b/openspec/specs/reel-zoom-out/spec.md @@ -0,0 +1,75 @@ +## ADDED Requirements + +### Requirement: Zoom-out range in reel mode +The system SHALL allow `backgroundZoom` values between 0.5 and 3.0 (inclusive) when `outputMode` is `'reel'`. In landscape mode, the zoom range SHALL remain 1.0–3.0. When switching from reel mode to landscape mode, any zoom value below 1.0 SHALL be clamped to 1.0. + +#### Scenario: Zoom slider min in reel mode +- **WHEN** the output mode is set to `'reel'` +- **THEN** the zoom slider's minimum value SHALL be 0.5 + +#### Scenario: Zoom slider min in landscape mode +- **WHEN** the output mode is set to `'landscape'` +- **THEN** the zoom slider's minimum value SHALL be 1.0 + +#### Scenario: Zoom clamping on mode switch +- **WHEN** a section has `backgroundZoom` of 0.7 and the user switches from reel to landscape mode +- **THEN** the section's `backgroundZoom` SHALL be clamped to 1.0 + +#### Scenario: Domain normalizer accepts reel zoom values +- **WHEN** `normalizeBackgroundZoom` is called with value 0.5 and mode `'reel'` +- **THEN** it SHALL return 0.5 + +#### Scenario: Domain normalizer clamps below reel minimum +- **WHEN** `normalizeBackgroundZoom` is called with value 0.3 and mode `'reel'` +- **THEN** it SHALL return 0.5 + +### Requirement: Darkened background fill for zoom-out +When `backgroundZoom` is less than 1.0 in reel mode, the system SHALL fill the vertical letterbox bars with a darkened, scaled-up copy of the crop region content. The background content SHALL be darkened to approximately 20–30% of original brightness. + +#### Scenario: Editor preview with zoom-out +- **WHEN** reel mode is active and `backgroundZoom` is 0.7 +- **THEN** the editor preview SHALL show the content centered vertically within the crop region, with darkened content filling the top and bottom bars + +#### Scenario: FFmpeg render with zoom-out +- **WHEN** rendering in reel mode with `backgroundZoom` of 0.7 +- **THEN** the output video SHALL show the content centered vertically within the 608x1080 frame, with darkened content filling the top and bottom bars + +#### Scenario: Zoom at exactly 1.0 +- **WHEN** reel mode is active and `backgroundZoom` is 1.0 +- **THEN** no darkened background SHALL be drawn — the content fills the crop region completely (no letterbox bars) + +### Requirement: Content scaling during zoom-out +When zoom < 1.0 in reel mode, the content SHALL be scaled to fit the crop width (608px for 1080p source) while preserving aspect ratio. The content SHALL be vertically centered within the frame. + +#### Scenario: Content dimensions at zoom 0.5 +- **WHEN** reel mode is active with 1920x1080 source and `backgroundZoom` is 0.5 +- **THEN** the content SHALL occupy the full 608px width and approximately 342px height (608 * 1080/1920), centered vertically in the 1080px frame + +#### Scenario: Content dimensions at zoom 0.75 +- **WHEN** reel mode is active with 1920x1080 source and `backgroundZoom` is 0.75 +- **THEN** the content SHALL occupy the full 608px width and approximately 810px height, centered vertically + +### Requirement: Pan disabled during zoom-out +When `backgroundZoom` is less than 1.0, background pan controls SHALL have no visual effect since the content is fully visible and smaller than the frame. + +#### Scenario: Pan has no effect at zoom 0.7 +- **WHEN** reel mode is active, `backgroundZoom` is 0.7, and `backgroundPanX` is set to 1.0 +- **THEN** the content SHALL remain centered — pan values are ignored when zoom < 1.0 + +### Requirement: Smooth zoom transitions across 1.0 boundary +When zoom animates between a value below 1.0 and a value above 1.0 (crossing the boundary), the darkened background SHALL fade smoothly rather than appearing/disappearing abruptly. + +#### Scenario: Animated zoom from 0.7 to 1.5 +- **WHEN** a keyframe transition animates `backgroundZoom` from 0.7 to 1.5 +- **THEN** the darkened background SHALL gradually fade out as zoom approaches 1.0, and the content SHALL scale smoothly throughout the transition + +### Requirement: Backward compatibility +Existing projects with zoom values between 1.0 and 3.0 SHALL be completely unaffected. The zoom-out pipeline only activates when zoom < 1.0 in reel mode. + +#### Scenario: Landscape render unchanged +- **WHEN** rendering in landscape mode with `backgroundZoom` of 2.0 +- **THEN** the output SHALL be identical to the output before this change + +#### Scenario: Reel render with zoom >= 1 unchanged +- **WHEN** rendering in reel mode with `backgroundZoom` of 1.5 +- **THEN** the output SHALL be identical to the output before this change (zoom + crop, no darkened background) From 0ac4d45798cc977b4a762fb037421e85afa106f5 Mon Sep 17 00:00:00 2001 From: amitay keisar Date: Thu, 19 Mar 2026 18:47:48 +0200 Subject: [PATCH 03/15] chore: add Claude Code custom commands for OpenSpec workflow Add opsx slash commands for managing OpenSpec changes: apply, archive, bulk-archive, continue, explore, ff, new, onboard, sync, verify Co-Authored-By: Claude Opus 4.6 --- .claude/commands/opsx/apply.md | 152 ++++++++ .claude/commands/opsx/archive.md | 157 ++++++++ .claude/commands/opsx/bulk-archive.md | 242 ++++++++++++ .claude/commands/opsx/continue.md | 114 ++++++ .claude/commands/opsx/explore.md | 174 +++++++++ .claude/commands/opsx/ff.md | 94 +++++ .claude/commands/opsx/new.md | 69 ++++ .claude/commands/opsx/onboard.md | 525 ++++++++++++++++++++++++++ .claude/commands/opsx/sync.md | 134 +++++++ .claude/commands/opsx/verify.md | 164 ++++++++ 10 files changed, 1825 insertions(+) create mode 100644 .claude/commands/opsx/apply.md create mode 100644 .claude/commands/opsx/archive.md create mode 100644 .claude/commands/opsx/bulk-archive.md create mode 100644 .claude/commands/opsx/continue.md create mode 100644 .claude/commands/opsx/explore.md create mode 100644 .claude/commands/opsx/ff.md create mode 100644 .claude/commands/opsx/new.md create mode 100644 .claude/commands/opsx/onboard.md create mode 100644 .claude/commands/opsx/sync.md create mode 100644 .claude/commands/opsx/verify.md diff --git a/.claude/commands/opsx/apply.md b/.claude/commands/opsx/apply.md new file mode 100644 index 0000000..bf23721 --- /dev/null +++ b/.claude/commands/opsx/apply.md @@ -0,0 +1,152 @@ +--- +name: "OPSX: Apply" +description: Implement tasks from an OpenSpec change (Experimental) +category: Workflow +tags: [workflow, artifacts, experimental] +--- + +Implement tasks from an OpenSpec change. + +**Input**: Optionally specify a change name (e.g., `/opsx:apply add-auth`). If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes. + +**Steps** + +1. **Select the change** + + If a name is provided, use it. Otherwise: + - Infer from conversation context if the user mentioned a change + - Auto-select if only one active change exists + - If ambiguous, run `openspec list --json` to get available changes and use the **AskUserQuestion tool** to let the user select + + Always announce: "Using change: " and how to override (e.g., `/opsx:apply `). + +2. **Check status to understand the schema** + ```bash + openspec status --change "" --json + ``` + Parse the JSON to understand: + - `schemaName`: The workflow being used (e.g., "spec-driven") + - Which artifact contains the tasks (typically "tasks" for spec-driven, check status for others) + +3. **Get apply instructions** + + ```bash + openspec instructions apply --change "" --json + ``` + + This returns: + - Context file paths (varies by schema) + - Progress (total, complete, remaining) + - Task list with status + - Dynamic instruction based on current state + + **Handle states:** + - If `state: "blocked"` (missing artifacts): show message, suggest using `/opsx:continue` + - If `state: "all_done"`: congratulate, suggest archive + - Otherwise: proceed to implementation + +4. **Read context files** + + Read the files listed in `contextFiles` from the apply instructions output. + The files depend on the schema being used: + - **spec-driven**: proposal, specs, design, tasks + - Other schemas: follow the contextFiles from CLI output + +5. **Show current progress** + + Display: + - Schema being used + - Progress: "N/M tasks complete" + - Remaining tasks overview + - Dynamic instruction from CLI + +6. **Implement tasks (loop until done or blocked)** + + For each pending task: + - Show which task is being worked on + - Make the code changes required + - Keep changes minimal and focused + - Mark task complete in the tasks file: `- [ ]` → `- [x]` + - Continue to next task + + **Pause if:** + - Task is unclear → ask for clarification + - Implementation reveals a design issue → suggest updating artifacts + - Error or blocker encountered → report and wait for guidance + - User interrupts + +7. **On completion or pause, show status** + + Display: + - Tasks completed this session + - Overall progress: "N/M tasks complete" + - If all done: suggest archive + - If paused: explain why and wait for guidance + +**Output During Implementation** + +``` +## Implementing: (schema: ) + +Working on task 3/7: +[...implementation happening...] +✓ Task complete + +Working on task 4/7: +[...implementation happening...] +✓ Task complete +``` + +**Output On Completion** + +``` +## Implementation Complete + +**Change:** +**Schema:** +**Progress:** 7/7 tasks complete ✓ + +### Completed This Session +- [x] Task 1 +- [x] Task 2 +... + +All tasks complete! You can archive this change with `/opsx:archive`. +``` + +**Output On Pause (Issue Encountered)** + +``` +## Implementation Paused + +**Change:** +**Schema:** +**Progress:** 4/7 tasks complete + +### Issue Encountered + + +**Options:** +1.