|
| 1 | +import { useEffect, useMemo, useState, type CSSProperties } from "react" |
| 2 | +import { PROJECT_SETTINGS } from "../../../project/project" |
| 3 | +import { useGlobalCurrentFrame } from "../frame" |
| 4 | +import { type AudioSegment, useAudioSegments } from "../audio-plan" |
| 5 | +import { loadWaveformData, type WaveformData } from "../audio-waveform" |
| 6 | +import { useTimelineClips } from "../timeline" |
| 7 | + |
| 8 | +type CharacterProps = { |
| 9 | + mouthClosed: string |
| 10 | + mouthOpen: string |
| 11 | + threshold?: number |
| 12 | + clipLabel?: string |
| 13 | + style?: CSSProperties |
| 14 | + className?: string |
| 15 | + alt?: string |
| 16 | +} |
| 17 | + |
| 18 | +const DEFAULT_THRESHOLD = 0.1 |
| 19 | + |
| 20 | +const clamp = (value: number, min: number, max: number) => |
| 21 | + Math.min(max, Math.max(min, value)) |
| 22 | + |
| 23 | +const useWaveformBank = (paths: string[]) => { |
| 24 | + const [bank, setBank] = useState<Map<string, WaveformData | null>>(new Map()) |
| 25 | + |
| 26 | + const { key, list } = useMemo(() => { |
| 27 | + const unique = Array.from(new Set(paths.filter(Boolean))).sort() |
| 28 | + return { key: unique.join("\n"), list: unique } |
| 29 | + }, [paths]) |
| 30 | + |
| 31 | + useEffect(() => { |
| 32 | + let alive = true |
| 33 | + for (const path of list) { |
| 34 | + void loadWaveformData(path).then((data) => { |
| 35 | + if (!alive) return |
| 36 | + setBank((prev) => { |
| 37 | + if (prev.get(path) === data) return prev |
| 38 | + const next = new Map(prev) |
| 39 | + next.set(path, data) |
| 40 | + return next |
| 41 | + }) |
| 42 | + }) |
| 43 | + } |
| 44 | + return () => { |
| 45 | + alive = false |
| 46 | + } |
| 47 | + }, [key, list]) |
| 48 | + |
| 49 | + return bank |
| 50 | +} |
| 51 | + |
| 52 | +const resolveSegmentAmplitude = ( |
| 53 | + segment: AudioSegment, |
| 54 | + waveform: WaveformData | null, |
| 55 | + currentFrame: number, |
| 56 | + fps: number, |
| 57 | +) => { |
| 58 | + if (!waveform || waveform.peaks.length === 0 || waveform.durationSec <= 0) return 0 |
| 59 | + const durationFrames = Math.max(0, segment.durationFrames) |
| 60 | + if (durationFrames <= 0) return 0 |
| 61 | + |
| 62 | + const relativeFrame = currentFrame - segment.projectStartFrame |
| 63 | + if (relativeFrame < 0 || relativeFrame >= durationFrames) return 0 |
| 64 | + |
| 65 | + const sourceFrame = Math.max(0, segment.sourceStartFrame + relativeFrame) |
| 66 | + const timeSec = Math.max(0, sourceFrame / fps) |
| 67 | + const ratio = clamp(timeSec / waveform.durationSec, 0, 1) |
| 68 | + const index = Math.min( |
| 69 | + waveform.peaks.length - 1, |
| 70 | + Math.max(0, Math.floor(ratio * waveform.peaks.length)), |
| 71 | + ) |
| 72 | + |
| 73 | + let amplitude = waveform.peaks[index] ?? 0 |
| 74 | + const volume = Number.isFinite(segment.volume) ? Math.max(0, segment.volume ?? 1) : 1 |
| 75 | + amplitude *= volume |
| 76 | + |
| 77 | + const fadeInFrames = Math.max(0, segment.fadeInFrames ?? 0) |
| 78 | + if (fadeInFrames > 0) { |
| 79 | + amplitude *= clamp(relativeFrame / fadeInFrames, 0, 1) |
| 80 | + } |
| 81 | + |
| 82 | + const fadeOutFrames = Math.max(0, segment.fadeOutFrames ?? 0) |
| 83 | + if (fadeOutFrames > 0) { |
| 84 | + const fadeOutStart = Math.max(0, durationFrames - fadeOutFrames) |
| 85 | + if (relativeFrame >= fadeOutStart) { |
| 86 | + amplitude *= clamp((durationFrames - 1 - relativeFrame) / fadeOutFrames, 0, 1) |
| 87 | + } |
| 88 | + } |
| 89 | + |
| 90 | + return amplitude |
| 91 | +} |
| 92 | + |
| 93 | +export const Character = ({ |
| 94 | + mouthClosed, |
| 95 | + mouthOpen, |
| 96 | + threshold = DEFAULT_THRESHOLD, |
| 97 | + clipLabel, |
| 98 | + style, |
| 99 | + className, |
| 100 | + alt, |
| 101 | +}: CharacterProps) => { |
| 102 | + const currentFrame = useGlobalCurrentFrame() |
| 103 | + const clips = useTimelineClips() |
| 104 | + const audioSegments = useAudioSegments() |
| 105 | + const fps = PROJECT_SETTINGS.fps |
| 106 | + |
| 107 | + const relevantSegments = useMemo(() => { |
| 108 | + if (!clipLabel) return audioSegments |
| 109 | + const ids = new Set( |
| 110 | + clips.filter((clip) => clip.label === clipLabel).map((clip) => clip.id), |
| 111 | + ) |
| 112 | + if (ids.size === 0) return [] |
| 113 | + return audioSegments.filter((segment) => segment.clipId && ids.has(segment.clipId)) |
| 114 | + }, [audioSegments, clipLabel, clips]) |
| 115 | + |
| 116 | + const waveformPaths = useMemo( |
| 117 | + () => relevantSegments.map((segment) => segment.source.path), |
| 118 | + [relevantSegments], |
| 119 | + ) |
| 120 | + const waveformBank = useWaveformBank(waveformPaths) |
| 121 | + |
| 122 | + const amplitude = useMemo(() => { |
| 123 | + let max = 0 |
| 124 | + for (const segment of relevantSegments) { |
| 125 | + const waveform = waveformBank.get(segment.source.path) ?? null |
| 126 | + const value = resolveSegmentAmplitude(segment, waveform, currentFrame, fps) |
| 127 | + if (value > max) max = value |
| 128 | + } |
| 129 | + return max |
| 130 | + }, [currentFrame, fps, relevantSegments, waveformBank]) |
| 131 | + |
| 132 | + const safeThreshold = Number.isFinite(threshold) ? Math.max(0, threshold) : DEFAULT_THRESHOLD |
| 133 | + const isSpeaking = amplitude >= safeThreshold |
| 134 | + |
| 135 | + return ( |
| 136 | + <img |
| 137 | + src={isSpeaking ? mouthOpen : mouthClosed} |
| 138 | + alt={alt ?? "character"} |
| 139 | + className={className} |
| 140 | + style={{ display: "block", ...style }} |
| 141 | + /> |
| 142 | + ) |
| 143 | +} |
0 commit comments